diff --git a/.gitignore b/.gitignore index eaa4aae..ff7d489 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,3 @@ venv build dist loaih.egg-info -**/__pycache__ diff --git a/loaih/__init__.py b/loaih/__init__.py index d15ccc8..a612057 100644 --- a/loaih/__init__.py +++ b/loaih/__init__.py @@ -1,201 +1,367 @@ -#!/usr/bin/env python -# encoding: utf-8 +#!/usr/bin/env python3 import urllib.request +import loaih.versions as versions from lxml import etree -from packaging.version import parse as parse_version -import datetime +import tempfile, os, sys, glob, subprocess, shutil, re, shlex -class Definitions(object): - DOWNLOADPAGE = "https://www.libreoffice.org/download/download/" - ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/" - RELEASE = "https://download.documentfoundation.org/libreoffice/stable/" - DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/" - PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/" +class Build(object): + LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ] + LANGBASIC = [ 'en-GB' ] + ARCHSTD = [ u'x86', u'x86_64' ] - SELECTORS = { - 'still': { - 'URL': DOWNLOADPAGE, - 'xpath': '(//span[@class="dl_version_number"])[last()]/text()' - }, - 'fresh': { - 'URL': DOWNLOADPAGE, - 'xpath': '(//span[@class="dl_version_number"])[1]/text()' - }, - 'prerelease': { - 'URL': DOWNLOADPAGE, - 'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()' - }, - 'daily': { - 'URL': DAILY, - 'xpath': '//td/a' - } - } - -class Base(object): - # Class for static methods which might be useful even outside the build - # scripts. - - @staticmethod - def dailyurl(date = datetime.datetime.today()): - """Returns the URL for the latest valid daily build.""" - # As per other parts of the build, we need to maintain an URL also for - # x86 versions that it isn't really provided. - # As such, the return value must be a dictionary - - # Get the anchor for today's builds - a = etree.HTML(urllib.request.urlopen(Definitions.DAILY).read()).xpath("//td/a[contains(text(), '" + date.strftime('%Y-%m-%d') + "')]/text()") - if len(a) == 0: - # No results found, no version found, let's return a - return { 'x86': '-', 'x86_64': '-' } - - # On the contrary, more than a version is found. let's order the - # list and get the latest item - return { 'x86': '-', 'x86_64': Definitions.SELECTORS['daily']['URL'] + sorted(a)[-1] } - - @staticmethod - def dailyver(date = datetime.datetime.today()): - """Returns versions present on the latest daily build.""" - url = Base.dailyurl(date)['x86_64'] - # If no daily releases has been provided yet, return empty - if url == '-': - return [] - - # Rerun the page parsing, this time to find out the versions built - b = etree.HTML(urllib.request.urlopen(url).read()).xpath("//td/a[contains(text(), '_deb.tar.gz')]/text()") - # This should have returned the main package for a version, but can - # have returned multiple ones, so let's treat it as a list - return [ x.split('_')[1] for x in b ] - - @staticmethod - def namedver(query): - """Gets the version for a specific named version.""" - - if query == 'daily' or query == 'yesterday': - # Daily needs double parsing for the same result to apply. - # We first select today's build anchor: - date = datetime.datetime.today() - if query == 'yesterday': - # Use yesterdays' date for testing purposes. - date += datetime.timedelta(days=-1) - return Base.dailyver(date) - - # In case the query isn't for daily - return etree.HTML(urllib.request.urlopen(Definitions.SELECTORS[query]['URL']).read()).xpath(Definitions.SELECTORS[query]['xpath']) - - @staticmethod - def fullversion(version): - """Get latest full version from Archive based on partial version.""" - versionlist = etree.HTML(urllib.request.urlopen(Definitions.ARCHIVE).read()).xpath(f"//td/a[starts-with(text(), '{version}')]/text()") - cleanlist = sorted([ x.strip('/') for x in versionlist ]) - - # Sorting, then returning the last version - return cleanlist[-1] - - @staticmethod - def urlfromqueryandver(query, version): - """Returns the fetching URL based on the queried version and the numeric version of it.""" - # This has the purpose to simplify and explain how the releases are - # layed out. - - # If the query tells about daily or 'yesterday' (for testing purposes), - # we might ignore versions and return the value coming from dailyurl: - if query == 'daily': - return Base.dailyurl() - if query == 'yesterday': - date = datetime.datetime.today() + datetime.timedelta(days=-1) - return Base.dailyurl(date) - - # All other versions will be taken from Archive, as such we need a full - # version. - - # If the version has only 2 points in it (or splits into three parts by '.'), that's not a full version and we will call the getlatestver() function - fullversion = str(version) - if len(fullversion.split('.')) <= 3: - fullversion = str(Base.fullversion(version)) - - # So the final URL is the Archive one, plus the full versions, plus a - # final '/deb/' - and an arch subfolder - baseurl = Definitions.ARCHIVE + fullversion + '/deb/' - retval = {} - - # x86 binaries are not anymore offered after 6.3.0. - if parse_version(fullversion) < parse_version('6.3.0'): - retval['x86'] = baseurl + 'x86/' - else: - retval['x86'] = '-' - - retval['x86_64'] = baseurl + 'x86_64/' - - return retval - - @staticmethod - def collectedbuilds(query): - """Creates a list of Builds based on each queried version found.""" - retval = [] - if '.' in query: - # Called with a numeric query. Pass it to RemoteBuild - retval.append(RemoteBuild(query)) - else: - # Named query - a = Base.namedver(query) - if isinstance(a, list) and len(a) > 1: - retval.extend([ RemoteBuild(query, version) for version in a ]) - else: - retval.append(RemoteBuild(query)) - - return sorted(retval, key=lambda x: x.version) - - -class RemoteBuild(object): - - def __init__(self, query, version = None): - """Should simplify the single builded version.""" + def __init__(self, query, arch): + """Build all versions that can be found in the indicated repo.""" self.query = query - self.version = '' - self.basedirurl = { 'x86': '-', 'x86_64': '-' } - - if version and isinstance(version, str): - self.version = version - - if not '.' in self.query: - # Named version. - # Let's check if a specific version was requested. - if self.version == '': - # In case it was not requested, we will carry on the generic - # namedver() query. - # If the results are more than one, we'll take the latest (since we are requested to provide a single build). - a = Base.namedver(self.query) - - if isinstance(a, list): - if len(a) == 1: - # version is a single one. - self.version = a[0] - else: - # In this case, we will select the latest release. - self.version = sorted(a)[-1] - - # If the version has already a version, as requested by user, - # continue using that version - else: - # In case of numbered queries, put it as initial version - self.version = self.query - - if len(str(self.version).split('.')) < 4: - # If not 4 dotted, let's search for the 4 dotted version - self.version = Base.fullversion(self.version) + self.arch = arch - self.basedirurl = Base.urlfromqueryandver(self.query, self.version) + # Getting versions and so on + v = versions.BuildVersion(self.query) + self.version = v.version + print(f"Debug {self.version}") + self.short_version = str.join('.', self.version.split('.')[0:2]) + self.branch_version = None + if not '.' in self.query: + self.branch_version = self.query + self.url = v.basedirurl - def todict(self): - return { - 'query': self.query, - 'version': self.version, - 'basedirurl': self.basedirurl - } + # Other default values + self.language = 'basic' + self.offline_help = False + self.portable = False + self.updatable = True + self.sign = True + self.storage_path = '/mnt/appimage' + self.download_path = '/var/tmp/downloads' - def __str__(self): - return f"""query: {self.query} -version: {self.version} -x86: {self.basedirurl['x86']} -x86_64: {self.basedirurl['x86_64']}""" + # Specific build version + self.appversion = '' + self.appimagefilename = {} + self.zsyncfilename = {} + + # Creating a tempfile + self.builddir = tempfile.mkdtemp() + self.tarballs = {} + self.built = { u'x86': False, u'x86_64': False } + + # Preparing the default for the relative path on the storage for + # different versions. + # The path will evaluated as part of the check() function, as it is + # understood the storage_path can be changed before that phase. + self.relative_path = [] + self.full_path = '' + self.baseurl = '' + + + def calculate(self): + """Calculate exclusions and other variables.""" + # AppName + self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev' + + # Calculating languagepart + self.languagepart = "." + if ',' in self.language: + self.languagepart += self.language.replace(',', '-') + else: + self.languagepart += self.language + + # Calculating help part + self.helppart = '.help' if self.offline_help else '' + + # Building the required names + for arch in Build.ARCHSTD: + self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch) + self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync' + + # Mandate to the private function to calculate the full_path available + # for the storage and the checks. + self.__calculate_full_path__() + + + def __gen_appimagefilename__(self, version, arch): + """Generalize the construction of the name of the app.""" + self.appversion = version + self.languagepart + self.helppart + return self.appname + f'-{self.appversion}-{arch}.AppImage' + + + def __calculate_full_path__(self): + """Calculate relative path of the build, based on internal other variables.""" + if len(self.relative_path) == 0: + if self.query == 'daily': + self.relative_path.append('daily') + elif self.query == 'primageerelease': + self.relative_path.append('prerelease') + + # Not the same check, an additional one + if self.portable: + self.relative_path.append('portable') + + fullpath_arr = self.storage_path.split('/') + # Joining relative path only if it is not null + if len(self.relative_path) > 0: + fullpath_arr.extend(self.relative_path) + self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr)) + + + def check(self): + """Checking if the requested AppImage has been already built.""" + if not len(self.appimagefilename) == 2: + self.calculate() + + for arch in self.arch: + print(f"Searching for {self.appimagefilename[arch]}") + res = subprocess.run(shlex.split(f"find {self.full_path} -name {self.appimagefilename[arch]}"), capture_output=True, env={ "LC_ALL": "C" }, text=True, encoding='utf-8') + + if "No such file or directory" in res.stderr: + # Folder is not existent: so the version was not built + # Build stays false, and we go to the next arch + continue + + if res.stdout and len(res.stdout.strip("\n")) > 0: + # All good, the command was executed fine. + print(f"Build for {self.version} found.") + self.built[arch] = True + + if self.built[arch]: + print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.") + + + def download(self): + """Downloads the contents of the URL as it was a folder.""" + print(f"Started downloads for {self.version}. Please wait.") + for arch in self.arch: + # Checking if a valid path has been provided + if self.url[arch] == '-': + print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.") + # Faking already built it so to skip other checks. + self.built[arch] = True + continue + + if self.built[arch]: + print(f"A build for {arch} was already found. Skipping specific packages.") + continue + + # Identifying downloads + contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a") + self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ] + tarballs = self.tarballs[arch] + maintarball = tarballs[0] + + # Create and change directory to the download location + os.makedirs(self.download_path, exist_ok = True) + os.chdir(self.download_path) + for archive in tarballs: + # If the archive is already there, do not do anything. + if os.path.exists(archive): + continue + + # Download the archive + try: + urllib.request.urlretrieve(self.url[arch] + archive, archive) + except: + print(f"Failed to download {archive}.") + + print(f"Finished downloads for {self.version}.") + + def build(self): + """Building all the versions.""" + + for arch in self.arch: + if self.built[arch]: + # Already built for arch or path not available. User has already been warned. + continue + + # Preparation tasks + self.appnamedir = os.path.join(self.builddir, self.appname) + os.makedirs(self.appnamedir, exist_ok=True) + # And then cd to the appname folder. + os.chdir(self.appnamedir) + # Download appimagetool from github + appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage" + urllib.request.urlretrieve(appimagetoolurl, 'appimagetool') + os.chmod('appimagetool', 0o755) + + # Build the requested version. + self.__unpackbuild__(arch) + + + def __unpackbuild__(self, arch): + # We start by filtering out tarballs from the list + buildtarballs = [ self.tarballs[arch][0] ] + + # Let's process standard languages and append results to the + # buildtarball + if self.language == 'basic': + if self.offline_help: + buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ]) + else: + buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x]) + elif self.language == 'standard': + for lang in Build.LANGSTD: + if self.offline_help: + buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ]) + else: + buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ]) + elif self.language == 'full': + if self.offline_help: + # We need also all help. Let's replace buildtarball with the + # whole bunch + buildtarballs = self.tarballs[arch] + else: + buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ]) + else: + # Looping for each language in self.language + for lang in self.language.split(","): + if self.offline_help: + buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ]) + else: + buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ]) + + os.chdir(self.appnamedir) + + # Unpacking the tarballs + for archive in buildtarballs: + subprocess.run(shlex.split(f"tar xzf {self.download_path}/{archive}")) + + # create appimagedir + self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir') + os.makedirs(self.appimagedir, exist_ok = True) + + # At this point, let's decompress the deb packages + subprocess.run(shlex.split("find .. -iname '*.deb' -exec dpkg -x {} . \;"), cwd=self.appimagedir) + + if self.portable: + subprocess.run(shlex.split("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version), cwd=self.appimagedir) + + # Changing desktop file + subprocess.run(shlex.split("find . -iname startcenter.desktop -exec cp {} . \;"), cwd=self.appimagedir) + subprocess.run(shlex.split("sed --in-place 's:^Name=.*$:Name=%s:' startcenter.desktop > startcenter.desktop" % self.appname), cwd=self.appimagedir) + + subprocess.run(shlex.split("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;"), cwd=self.appimagedir) + + # Find the name of the binary called in the desktop file. + binaryname = '' + with open(os.path.join(self.appimagedir, 'startcenter.desktop'), 'r') as d: + a = d.readlines() + for line in a: + if re.match(r'^Exec', line): + binaryname = line.split('=')[-1].split(' ')[0] + # Esci al primo match + break + #binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8') + #binaryname = binary_exec.stdout.strip("\n") + + bindir=os.path.join(self.appimagedir, 'usr', 'bin') + os.makedirs(bindir, exist_ok = True) + subprocess.run(shlex.split("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname), cwd=bindir) + + # Download AppRun from github + apprunurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}" + dest = os.path.join(self.appimagedir, 'AppRun') + urllib.request.urlretrieve(apprunurl, dest) + os.chmod(dest, 0o755) + + # Dealing with extra options + buildopts = [] + if self.sign: + buildopts.append('--sign') + + # adding zsync build if updatable + if self.updatable: + buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'") + + buildopts_str = str.join(' ', buildopts) + # Build the number-specific build + subprocess.run(shlex.split(f"{self.appnamedir}/appimagetool {buildopts_str} -v ./{self.appname}.AppDir/"), env={ "VERSION": self.appversion }) + + print(f"Built AppImage version {self.appversion}") + + # Cleanup phase, before new run. + for deb in glob.glob(self.appnamedir + '/*.deb'): + os.remove(deb) + subprocess.run(shlex.split("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+")) + + + def checksums(self): + """Create checksums of the built versions.""" + # Skip checksum if initally the build was already found in the storage directory + if all(self.built.values()): + return + + os.chdir(self.appnamedir) + for arch in self.arch: + for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]: + # For any built arch, find out if a file exist. + self.__create_checksum__(item) + + + def __create_checksum__(self, file): + """Internal function to create checksum file.""" + checksum = subprocess.run(shlex.split(f"md5sum {file}"), capture_output=True, text=True, encoding='utf-8') + if checksum.stdout: + with open(f"{file}.md5", 'w') as c: + c.write(checksum.stdout) + + def publish(self): + """Moves built versions to definitive storage.""" + if all(self.built.values()): + # All files are already present in the full_path + return + + os.chdir(self.appnamedir) + # Forcing creation of subfolders, in case there is a new build + os.makedirs(self.full_path, exist_ok = True) + for file in glob.glob("*.AppImage*"): + subprocess.run(shlex.split(f"cp -f {file} {self.full_path}")) + + + def generalize_and_link(self): + """Creates the needed generalized files if needed.""" + # If called with a pointed version, no generalize and link necessary. + if not self.branch_version: + return + appimagefilename = {} + zsyncfilename = {} + + # Creating versions for short version and query text + versions = [ self.short_version, self.branch_version ] + for arch in Build.ARCHSTD: + # If already built, do not do anything. + if self.built[arch]: + continue + + os.chdir(self.full_path) + # if the appimage for the reported arch is not found, skip to next + # arch + if not os.path.exists(self.appimagefilename[arch]): + continue + + # Doing it both for short_name and for branchname + for version in versions: + appimagefilename[arch] = self.appname + '-' + version + self.languagepart + self.helppart + f'-{arch}.AppImage' + zsyncfilename[arch] = appimagefilename[arch] + '.zsync' + + # Create the symlink + print(f"Creating {appimagefilename[arch]} and checksums.") + if os.path.exists(appimagefilename[arch]): + os.unlink(appimagefilename[arch]) + os.symlink(self.appimagefilename[arch], appimagefilename[arch]) + # Create the checksum for the AppImage + self.__create_checksum__(appimagefilename[arch]) + # Do not continue if no zsync file is provided. + if not self.updatable: + continue + + print(f"Creating zsync file for version {version}.") + if os.path.exists(zsyncfilename[arch]): + os.unlink(zsyncfilename[arch]) + shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch]) + # Editing the zsyncfile + subprocess.run(shlex.split(f"sed --in-place 's/^Filename:.*$/Filename: {appimagefilename[arch]}/' {zsyncfilename[arch]}")) + self.__create_checksum__(zsyncfilename[arch]) + + + def __del__(self): + """Destructor""" + # Cleaning up build directory + shutil.rmtree(self.builddir) diff --git a/loaih/build.py b/loaih/build.py deleted file mode 100644 index fec92d3..0000000 --- a/loaih/build.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python3 - -import urllib.request -import loaih -from lxml import etree -import tempfile, os, sys, glob, subprocess, shutil, re, shlex - -class Collection(list): - - def __init__(self, query, arch = ['x86', 'x86_64']): - """Build a list of version to check/build for this round.""" - super().__init__() - self.extend([ Build(query, arch, version) for version in loaih.Base.collectedbuilds(query) ]) - -class Build(loaih.RemoteBuild): - LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ] - LANGBASIC = [ 'en-GB' ] - ARCHSTD = [ u'x86', u'x86_64' ] - - def __init__(self, query, arch, version = None): - super().__init__(query, version) - self.arch = arch - self.short_version = str.join('.', self.version.split('.')[0:2]) - self.branch_version = None - if not '.' in self.query: - self.branch_version = self.query - self.url = self.basedirurl - - # Other default values - self.language = 'basic' - self.offline_help = False - self.portable = False - self.updatable = True - self.sign = True - self.storage_path = '/mnt/appimage' - self.download_path = '/var/tmp/downloads' - - # Specific build version - self.appversion = '' - self.appimagefilename = {} - self.zsyncfilename = {} - - # Creating a tempfile - self.builddir = tempfile.mkdtemp() - self.tarballs = {} - self.built = { u'x86': False, u'x86_64': False } - - # Preparing the default for the relative path on the storage for - # different versions. - # The path will evaluated as part of the check() function, as it is - # understood the storage_path can be changed before that phase. - self.relative_path = [] - self.full_path = '' - self.baseurl = '' - - def calculate(self): - """Calculate exclusions and other variables.""" - # AppName - self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev' - - # Calculating languagepart - self.languagepart = "." - if ',' in self.language: - self.languagepart += self.language.replace(',', '-') - else: - self.languagepart += self.language - - # Calculating help part - self.helppart = '.help' if self.offline_help else '' - - # Building the required names - for arch in Build.ARCHSTD: - self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch) - self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync' - - # Mandate to the private function to calculate the full_path available - # for the storage and the checks. - self.__calculate_full_path__() - - - def __gen_appimagefilename__(self, version, arch): - """Generalize the construction of the name of the app.""" - self.appversion = version + self.languagepart + self.helppart - return self.appname + f'-{self.appversion}-{arch}.AppImage' - - - def __calculate_full_path__(self): - """Calculate relative path of the build, based on internal other variables.""" - if len(self.relative_path) == 0: - if self.query == 'daily': - self.relative_path.append('daily') - elif self.query == 'prerelease': - self.relative_path.append('prerelease') - - # Not the same check, an additional one - if self.portable: - self.relative_path.append('portable') - - fullpath_arr = self.storage_path.split('/') - # Joining relative path only if it is not null - if len(self.relative_path) > 0: - fullpath_arr.extend(self.relative_path) - self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr)) - - - def check(self): - """Checking if the requested AppImage has been already built.""" - if not len(self.appimagefilename) == 2: - self.calculate() - - for arch in self.arch: - print(f"Searching for {self.appimagefilename[arch]}") - res = subprocess.run(shlex.split(f"find {self.full_path} -name {self.appimagefilename[arch]}"), capture_output=True, env={ "LC_ALL": "C" }, text=True, encoding='utf-8') - - if "No such file or directory" in res.stderr: - # Folder is not existent: so the version was not built - # Build stays false, and we go to the next arch - continue - - if res.stdout and len(res.stdout.strip("\n")) > 0: - # All good, the command was executed fine. - print(f"Build for {self.version} found.") - self.built[arch] = True - - if self.built[arch]: - print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.") - - - def download(self): - """Downloads the contents of the URL as it was a folder.""" - print(f"Started downloads for {self.version}. Please wait.") - for arch in self.arch: - # Checking if a valid path has been provided - if self.url[arch] == '-': - print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.") - # Faking already built it so to skip other checks. - self.built[arch] = True - continue - - if self.built[arch]: - print(f"A build for {arch} was already found. Skipping specific packages.") - continue - - # Identifying downloads - contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a") - self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ] - tarballs = self.tarballs[arch] - maintarball = tarballs[0] - - # Create and change directory to the download location - os.makedirs(self.download_path, exist_ok = True) - os.chdir(self.download_path) - for archive in tarballs: - # If the archive is already there, do not do anything. - if os.path.exists(archive): - continue - - # Download the archive - try: - urllib.request.urlretrieve(self.url[arch] + archive, archive) - except: - print(f"Failed to download {archive}.") - - print(f"Finished downloads for {self.version}.") - - def build(self): - """Building all the versions.""" - - for arch in self.arch: - if self.built[arch]: - # Already built for arch or path not available. User has already been warned. - continue - - # Preparation tasks - self.appnamedir = os.path.join(self.builddir, self.appname) - os.makedirs(self.appnamedir, exist_ok=True) - # And then cd to the appname folder. - os.chdir(self.appnamedir) - # Download appimagetool from github - appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage" - urllib.request.urlretrieve(appimagetoolurl, 'appimagetool') - os.chmod('appimagetool', 0o755) - - # Build the requested version. - self.__unpackbuild__(arch) - - - def __unpackbuild__(self, arch): - # We start by filtering out tarballs from the list - buildtarballs = [ self.tarballs[arch][0] ] - - # Let's process standard languages and append results to the - # buildtarball - if self.language == 'basic': - if self.offline_help: - buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ]) - else: - buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x]) - elif self.language == 'standard': - for lang in Build.LANGSTD: - if self.offline_help: - buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ]) - else: - buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ]) - elif self.language == 'full': - if self.offline_help: - # We need also all help. Let's replace buildtarball with the - # whole bunch - buildtarballs = self.tarballs[arch] - else: - buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ]) - else: - # Looping for each language in self.language - for lang in self.language.split(","): - if self.offline_help: - buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ]) - else: - buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ]) - - os.chdir(self.appnamedir) - - # Unpacking the tarballs - for archive in buildtarballs: - subprocess.run(shlex.split(f"tar xzf {self.download_path}/{archive}")) - - # create appimagedir - self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir') - os.makedirs(self.appimagedir, exist_ok = True) - - # At this point, let's decompress the deb packages - subprocess.run(shlex.split("find .. -iname '*.deb' -exec dpkg -x {} . \;"), cwd=self.appimagedir) - - if self.portable: - subprocess.run(shlex.split("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version), cwd=self.appimagedir) - - # Changing desktop file - subprocess.run(shlex.split("find . -iname startcenter.desktop -exec cp {} . \;"), cwd=self.appimagedir) - subprocess.run(shlex.split("sed --in-place 's:^Name=.*$:Name=%s:' startcenter.desktop > startcenter.desktop" % self.appname), cwd=self.appimagedir) - - subprocess.run(shlex.split("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;"), cwd=self.appimagedir) - - # Find the name of the binary called in the desktop file. - binaryname = '' - with open(os.path.join(self.appimagedir, 'startcenter.desktop'), 'r') as d: - a = d.readlines() - for line in a: - if re.match(r'^Exec', line): - binaryname = line.split('=')[-1].split(' ')[0] - # Esci al primo match - break - #binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8') - #binaryname = binary_exec.stdout.strip("\n") - - bindir=os.path.join(self.appimagedir, 'usr', 'bin') - os.makedirs(bindir, exist_ok = True) - subprocess.run(shlex.split("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname), cwd=bindir) - - # Download AppRun from github - apprunurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}" - dest = os.path.join(self.appimagedir, 'AppRun') - urllib.request.urlretrieve(apprunurl, dest) - os.chmod(dest, 0o755) - - # Dealing with extra options - buildopts = [] - if self.sign: - buildopts.append('--sign') - - # adding zsync build if updatable - if self.updatable: - buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'") - - buildopts_str = str.join(' ', buildopts) - # Build the number-specific build - subprocess.run(shlex.split(f"{self.appnamedir}/appimagetool {buildopts_str} -v ./{self.appname}.AppDir/"), env={ "VERSION": self.appversion }) - - print(f"Built AppImage version {self.appversion}") - - # Cleanup phase, before new run. - for deb in glob.glob(self.appnamedir + '/*.deb'): - os.remove(deb) - subprocess.run(shlex.split("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+")) - - - def checksums(self): - """Create checksums of the built versions.""" - # Skip checksum if initally the build was already found in the storage directory - if all(self.built.values()): - return - - os.chdir(self.appnamedir) - for arch in self.arch: - for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]: - # For any built arch, find out if a file exist. - self.__create_checksum__(item) - - - def __create_checksum__(self, file): - """Internal function to create checksum file.""" - checksum = subprocess.run(shlex.split(f"md5sum {file}"), capture_output=True, text=True, encoding='utf-8') - if checksum.stdout: - with open(f"{file}.md5", 'w') as c: - c.write(checksum.stdout) - - def publish(self): - """Moves built versions to definitive storage.""" - if all(self.built.values()): - # All files are already present in the full_path - return - - os.chdir(self.appnamedir) - # Forcing creation of subfolders, in case there is a new build - os.makedirs(self.full_path, exist_ok = True) - for file in glob.glob("*.AppImage*"): - subprocess.run(shlex.split(f"cp -f {file} {self.full_path}")) - - - def generalize_and_link(self): - """Creates the needed generalized files if needed.""" - # If called with a pointed version, no generalize and link necessary. - if not self.branch_version: - return - appimagefilename = {} - zsyncfilename = {} - - # Creating versions for short version and query text - versions = [ self.short_version, self.branch_version ] - for arch in Build.ARCHSTD: - # If already built, do not do anything. - if self.built[arch]: - continue - - os.chdir(self.full_path) - # if the appimage for the reported arch is not found, skip to next - # arch - if not os.path.exists(self.appimagefilename[arch]): - continue - - # Doing it both for short_name and for branchname - for version in versions: - appimagefilename[arch] = self.appname + '-' + version + self.languagepart + self.helppart + f'-{arch}.AppImage' - zsyncfilename[arch] = appimagefilename[arch] + '.zsync' - - # Create the symlink - print(f"Creating {appimagefilename[arch]} and checksums.") - if os.path.exists(appimagefilename[arch]): - os.unlink(appimagefilename[arch]) - os.symlink(self.appimagefilename[arch], appimagefilename[arch]) - # Create the checksum for the AppImage - self.__create_checksum__(appimagefilename[arch]) - # Do not continue if no zsync file is provided. - if not self.updatable: - continue - - print(f"Creating zsync file for version {version}.") - if os.path.exists(zsyncfilename[arch]): - os.unlink(zsyncfilename[arch]) - shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch]) - # Editing the zsyncfile - subprocess.run(shlex.split(f"sed --in-place 's/^Filename:.*$/Filename: {appimagefilename[arch]}/' {zsyncfilename[arch]}")) - self.__create_checksum__(zsyncfilename[arch]) - - - def __del__(self): - """Destructor""" - # Cleaning up build directory - shutil.rmtree(self.builddir) diff --git a/loaih/versions.py b/loaih/versions.py new file mode 100644 index 0000000..201e531 --- /dev/null +++ b/loaih/versions.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# encoding: utf-8 + +import urllib.request +from lxml import etree +from packaging.version import parse as parse_version + +class BuildVersion(object): + DOWNLOADPAGE = "https://www.libreoffice.org/download/download/" + ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/" + RELEASE = "https://download.documentfoundation.org/libreoffice/stable/" + DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/" + PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/" + + def __init__(self, query): + self.query = query + self.version = '' + self.basedirurl = {} + + # Parsing the query input. + if '.' in self.query: + # Numbered self.version. Let's check it is a 4 dotted release + if len(self.query.split('.')) == 4: + self.version = self.query + else: + # If not 4 dotted, let's search for the 4 dotted version + self.version = self.__getlatestrel(self.query) + + self.basedirurl = self.__getbaseurl(self.version) + else: + # String self.versions. + a = self.__getbranchrel(self.query) + self.version = a['version'] + self.basedirurl = a['basedirurl'] + + def __getlatestrel(self, basever): + """Search in downloadarchive for the latest version matching baseversion.""" + versionlist = etree.HTML(urllib.request.urlopen(BuildVersion.ARCHIVE).read()).xpath('//td/a') + # Getting a more polished matching list + cleanlist = list(dict.fromkeys([x.text.strip('/') for x in versionlist if x.text.startswith(basever)])) + + # Sorting, then returning the last version + return sorted(cleanlist)[-1] + + def __getbranchrel(self, branch): + """Based on branch names, get the release number.""" + basedirurl = {} + version = '' + if branch == 'daily': + # The daily builds can be mostly distinguished by the day of build + # (official version is constant. + + # The last built version is the next-to-last version [-2] on the page. + fulldailypath = etree.HTML(urllib.request.urlopen(BuildVersion.DAILY).read()).xpath('//td/a')[-2].text + dailyversion = fulldailypath.split('_')[0].replace('-', '') + version + newurl = str.join('/', [ BuildVersion.DAILY, fulldailypath, '' ]) + + basedirurl = { u'x86_64': newurl, u'x86': '-' } + version = etree.HTML(urllib.request.urlopen(newurl).read()).xpath('//td/a')[1].text.split('_')[1] + + return { 'version': version + '-' + dailyversion, 'basedirurl': basedirurl } + + if branch == 'prerelease': + version = etree.HTML(urllib.request.urlopen(BuildVersion.PRERELEASE).read()).xpath('//td/a')[1].text.split('_')[1] + basedirurl = { u'x86': '-', u'x86_64': BuildVersion.PRERELEASE } + + return { 'version': version, 'basedirurl': basedirurl } + + # Stable releases. + # Old approach - Doesn't really work because RelEng can screw order. + #versions = etree.HTML(urllib.request.urlopen(BuildVersion.RELEASE).read()).xpath('//td/a') + #index = 1 + #if branch == 'still': + # index = -2 + #elif branch == 'fresh': + # index = -1 + #version = self.__getlatestrel(versions[index].text.strip('/')) + + # Now I'll rely on DownloadPage + versions = etree.HTML(urllib.request.urlopen(BuildVersion.DOWNLOADPAGE).read()).xpath('//span[@class="dl_version_number"]') + index = 0 + if branch == 'still': + index = 1 + elif branch == 'fresh': + index = 0 + version = self.__getlatestrel(versions[index].text) + + return { 'version': version, 'basedirurl': self.__getbaseurl(version) } + + def __getbaseurl(self, version): + """Returns the links based on the numeric version.""" + basedirurl = {} + url = BuildVersion.ARCHIVE + '/' + version + '/deb/' + + # x86 binaries are not anymore offered after 6.3.0. + if parse_version(version) < parse_version('6.3.0'): + basedirurl[u'x86'] = url + 'x86/' + else: + basedirurl[u'x86'] = '-' + + basedirurl[u'x86_64'] = url + 'x86_64/' + + return basedirurl diff --git a/prerelease.yml b/prerelease.yml deleted file mode 100644 index 402f385..0000000 --- a/prerelease.yml +++ /dev/null @@ -1,67 +0,0 @@ ---- -data: - repo: /mnt/appimage - download: /var/tmp/downloads - force: no - sign: yes - -builds: - - query: prerelease - language: basic - offline_help: no - portable: no - - - query: prerelease - language: basic - offline_help: yes - portable: no - - - query: prerelease - language: basic - offline_help: no - portable: yes - - - query: prerelease - language: basic - offline_help: yes - portable: yes - - - query: prerelease - language: standard - offline_help: no - portable: no - - - query: prerelease - language: standard - offline_help: yes - portable: no - - - query: prerelease - language: standard - offline_help: no - portable: yes - - - query: prerelease - language: standard - offline_help: yes - portable: yes - - - query: prerelease - language: full - offline_help: no - portable: no - - - query: prerelease - language: full - offline_help: yes - portable: no - - - query: prerelease - language: full - offline_help: no - portable: yes - - - query: prerelease - language: full - offline_help: yes - portable: yes diff --git a/loaih/script.py b/scripts/loaih-build similarity index 56% rename from loaih/script.py rename to scripts/loaih-build index 40ddf36..94cda81 100644 --- a/loaih/script.py +++ b/scripts/loaih-build @@ -3,35 +3,9 @@ import click import yaml -import loaih, loaih.build -import re, sys, json +import loaih -@click.group() -def cli(): - pass - -@cli.command() -@click.option('-j', '--json', 'jsonout', default=False, is_flag=True, help="Output format in json.") -@click.argument('query') -def getversion(query, jsonout): - b = [] - queries = [] - if ',' in query: - queries.extend(query.split(',')) - else: - queries.append(query) - - for q in queries: - b.extend(loaih.Base.collectedbuilds(q)) - - if len(b) > 0: - if jsonout: - click.echo(json.dumps([x.todict() for x in b])) - else: - for v in b: - click.echo(v) - -@cli.command() +@click.command() @click.option('-a', '--arch', 'arch', type=click.Choice(['x86', 'x86_64', 'all'], case_sensitive=False), default='all', help="Build the AppImage for a specific architecture. If there is no specific options, the process will build for both architectures (if available). Default: all") @click.option('-c/-C', '--check/--no-check', 'check', default=True, help="Check in the final storage if the queried version is existent. Default: check") @click.option('-d', '--download-path', 'download_path', default = '/var/tmp/downloads', type=str, help="Path to the download folder. Default: /var/tmp/downloads") @@ -61,50 +35,22 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path # generic default. for build in config['builds']: # Loop a run for each build. - collection = loaih.build.Collection(build['query'], arches) + obj = loaih.Build(build['query'], arches) - for obj in collection: - # Configuration phase - obj.language = build['language'] - obj.offline_help = build['offline_help'] - obj.portable = build['portable'] - obj.updatable = True - obj.storage_path = config['data']['repo'] if 'repo' in config['data'] and config['data']['repo'] else '/srv/http/appimage.sys42.eu' - obj.download_path = config['data']['download'] if 'download' in config['data'] and config['data']['download'] else '/var/tmp/downloads' - - if 'sign' in config['data'] and config['data']['sign']: - obj.sign = True - - # Build phase - obj.calculate() - if not 'force' in config['data'] or not config['data']['force']: - obj.check() - - obj.download() - obj.build() - obj.checksums() - obj.publish() - obj.generalize_and_link() - del obj - - else: - collection = loaih.build.Collection(query, arches) - for obj in collection: # Configuration phase - obj.language = language - obj.offline_help = offline - obj.portable = portable - obj.updatable = updatable - obj.storage_path = repo_path - obj.download_path = download_path + obj.language = build['language'] + obj.offline_help = build['offline_help'] + obj.portable = build['portable'] + obj.updatable = True + obj.storage_path = config['data']['repo'] if 'repo' in config['data'] and config['data']['repo'] else '/srv/http/appimage.sys42.eu' + obj.download_path = config['data']['download'] if 'download' in config['data'] and config['data']['download'] else '/var/tmp/downloads' - if sign: + if 'sign' in config['data'] and config['data']['sign']: obj.sign = True - # Running phase + # Build phase obj.calculate() - - if check: + if not 'force' in config['data'] or not config['data']['force']: obj.check() obj.download() @@ -113,3 +59,33 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path obj.publish() obj.generalize_and_link() del obj + + else: + obj = loaih.Build(query, arches) + + # Configuration phase + obj.language = language + obj.offline_help = offline + obj.portable = portable + obj.updatable = updatable + obj.storage_path = repo_path + obj.download_path = download_path + + if sign: + obj.sign = True + + # Running phase + obj.calculate() + + if check: + obj.check() + + obj.download() + obj.build() + obj.checksums() + obj.publish() + obj.generalize_and_link() + del obj + +if __name__ == '__main__': + build() diff --git a/scripts/loaih-getversion b/scripts/loaih-getversion new file mode 100644 index 0000000..59d03ae --- /dev/null +++ b/scripts/loaih-getversion @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# encoding: utf-8 + +import click +from loaih.versions import BuildVersion +import re, sys, json + +@click.command() +@click.option('-o', '--output', default = 'rundeck', type=click.Choice(['rundeck', 'json', 'text' ], case_sensitive=False), help="Output format, defaulting to Rundeck Key/Value data format. Options: rundeck,json,text") +@click.argument('query') +def getversion(query, output): + b = BuildVersion(query) + + if output.lower() == 'rundeck': + print("""RUNDECK:DATA: query = {query} +RUNDECK:DATA: version = {version} +RUNDECK:DATA: x86 = {x86_url} +RUNDECK:DATA: x86_64 = {x86_64_url}""".format(query = query, version = b.version, x86_url = b.basedirurl['x86'], x86_64_url = b.basedirurl['x86_64'])) + elif output.lower() == 'json': + output = { + 'query': query, + 'version': b.version, + 'basedirurl': b.basedirurl + } + print(json.dumps(output)) + else: + print("""query: {query} +version: {version} +x86: {x86_url} +x86_64: {x86_64_url}""".format(query = query, version = b.version, x86_url = b.basedirurl['x86'], x86_64_url = b.basedirurl['x86_64'])) + + +if __name__ == '__main__': + getversion() diff --git a/setup.py b/setup.py index c41059f..4ee9309 100644 --- a/setup.py +++ b/setup.py @@ -6,16 +6,12 @@ from setuptools import setup,find_packages setup( name="loaih", - version="1.2.0", + version="1.1.0", description="LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage", author="Emiliano Vavassori", author_email="syntaxerrormmm@libreoffice.org", packages=find_packages(exclude=['contrib', 'docs', 'tests']), - entry_points={ - 'console_scripts': [ - 'loaih = loaih.script:cli', - ], - }, + scripts=[ 'scripts/loaih-getversion', 'scripts/loaih-build' ], install_requires=[ 'click', ], license='MIT', url='https://git.libreitalia.org/LibreItalia/loappimage-helpers/',