2022-04-30 17:46:46 +02:00
|
|
|
#!/usr/bin/env python3
|
2023-01-05 19:58:45 +01:00
|
|
|
# encoding: utf-8
|
|
|
|
"""Classes and functions to build an AppImage."""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import subprocess
|
|
|
|
import shutil
|
|
|
|
import re
|
|
|
|
import shlex
|
|
|
|
import tempfile
|
2022-04-30 17:46:46 +02:00
|
|
|
import urllib.request
|
2023-01-07 00:15:42 +01:00
|
|
|
import hashlib
|
2022-04-30 17:46:46 +02:00
|
|
|
from lxml import etree
|
2023-01-05 19:58:45 +01:00
|
|
|
import loaih
|
2022-04-30 17:46:46 +02:00
|
|
|
|
2022-04-30 23:31:06 +02:00
|
|
|
class Collection(list):
|
2023-01-05 19:58:45 +01:00
|
|
|
"""Aggregates metadata on a collection of builds."""
|
2022-04-30 23:31:06 +02:00
|
|
|
|
|
|
|
def __init__(self, query, arch = ['x86', 'x86_64']):
|
|
|
|
"""Build a list of version to check/build for this round."""
|
|
|
|
super().__init__()
|
2023-01-05 19:58:45 +01:00
|
|
|
self.extend([
|
|
|
|
Build(query, arch, version) for version in loaih.Base.collectedbuilds(query)
|
|
|
|
])
|
2022-04-30 23:31:06 +02:00
|
|
|
|
|
|
|
class Build(loaih.RemoteBuild):
|
2023-01-05 19:58:45 +01:00
|
|
|
"""Builds a single version."""
|
|
|
|
|
|
|
|
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt',
|
|
|
|
'pt-BR', 'ru', 'zh-CN', 'zh-TW' ]
|
2022-04-30 17:46:46 +02:00
|
|
|
LANGBASIC = [ 'en-GB' ]
|
2023-01-05 19:58:45 +01:00
|
|
|
ARCHSTD = [ 'x86', 'x86_64' ]
|
2022-04-30 17:46:46 +02:00
|
|
|
|
2022-04-30 23:31:06 +02:00
|
|
|
def __init__(self, query, arch, version = None):
|
|
|
|
super().__init__(query, version)
|
2022-04-30 17:46:46 +02:00
|
|
|
self.arch = arch
|
|
|
|
self.short_version = str.join('.', self.version.split('.')[0:2])
|
|
|
|
self.branch_version = None
|
|
|
|
if not '.' in self.query:
|
|
|
|
self.branch_version = self.query
|
2022-04-30 23:31:06 +02:00
|
|
|
self.url = self.basedirurl
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Other default values
|
|
|
|
self.language = 'basic'
|
|
|
|
self.offline_help = False
|
|
|
|
self.portable = False
|
|
|
|
self.updatable = True
|
|
|
|
self.sign = True
|
2023-01-05 01:07:12 +01:00
|
|
|
self.remoterepo = False
|
|
|
|
self.remote_host = ''
|
|
|
|
self.remote_path = ''
|
2022-04-30 17:46:46 +02:00
|
|
|
self.storage_path = '/mnt/appimage'
|
|
|
|
self.download_path = '/var/tmp/downloads'
|
2023-01-05 19:58:45 +01:00
|
|
|
self.appnamedir = ''
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Specific build version
|
2023-01-05 19:58:45 +01:00
|
|
|
self.appname = 'LibreOffice'
|
2022-04-30 17:46:46 +02:00
|
|
|
self.appversion = ''
|
2023-01-05 19:58:45 +01:00
|
|
|
self.appimagedir = ''
|
2022-04-30 17:46:46 +02:00
|
|
|
self.appimagefilename = {}
|
|
|
|
self.zsyncfilename = {}
|
|
|
|
|
2023-01-05 19:58:45 +01:00
|
|
|
# Other variables by build
|
|
|
|
self.languagepart = '.' + self.language
|
|
|
|
self.helppart = ''
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
# Creating a tempfile
|
|
|
|
self.builddir = tempfile.mkdtemp()
|
|
|
|
self.tarballs = {}
|
2023-01-05 19:58:45 +01:00
|
|
|
self.built = { 'x86': False, 'x86_64': False }
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
# Preparing the default for the relative path on the storage for
|
|
|
|
# different versions.
|
|
|
|
# The path will evaluated as part of the check() function, as it is
|
|
|
|
# understood the storage_path can be changed before that phase.
|
|
|
|
self.relative_path = []
|
|
|
|
self.full_path = ''
|
|
|
|
self.baseurl = ''
|
|
|
|
|
|
|
|
def calculate(self):
|
|
|
|
"""Calculate exclusions and other variables."""
|
2023-01-07 00:59:38 +01:00
|
|
|
|
|
|
|
print("--- Calculate Phase ---")
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
# AppName
|
2023-01-05 19:58:45 +01:00
|
|
|
if self.query in { 'prerelease', 'daily' }:
|
|
|
|
self.appname = 'LibreOfficeDev'
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Calculating languagepart
|
|
|
|
self.languagepart = "."
|
|
|
|
if ',' in self.language:
|
|
|
|
self.languagepart += self.language.replace(',', '-')
|
|
|
|
else:
|
|
|
|
self.languagepart += self.language
|
|
|
|
|
|
|
|
# Calculating help part
|
2023-01-05 19:58:45 +01:00
|
|
|
if self.offline_help:
|
|
|
|
self.helppart = '.help'
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Building the required names
|
|
|
|
for arch in Build.ARCHSTD:
|
|
|
|
self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch)
|
|
|
|
self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync'
|
|
|
|
|
|
|
|
# Mandate to the private function to calculate the full_path available
|
|
|
|
# for the storage and the checks.
|
|
|
|
self.__calculate_full_path__()
|
|
|
|
|
|
|
|
|
|
|
|
def __gen_appimagefilename__(self, version, arch):
|
|
|
|
"""Generalize the construction of the name of the app."""
|
|
|
|
self.appversion = version + self.languagepart + self.helppart
|
|
|
|
return self.appname + f'-{self.appversion}-{arch}.AppImage'
|
|
|
|
|
|
|
|
|
|
|
|
def __calculate_full_path__(self):
|
|
|
|
"""Calculate relative path of the build, based on internal other variables."""
|
|
|
|
if len(self.relative_path) == 0:
|
|
|
|
if self.query == 'daily':
|
|
|
|
self.relative_path.append('daily')
|
2022-04-30 23:31:06 +02:00
|
|
|
elif self.query == 'prerelease':
|
2023-01-05 01:07:12 +01:00
|
|
|
self.relative_path.append('prerelease')
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Not the same check, an additional one
|
|
|
|
if self.portable:
|
|
|
|
self.relative_path.append('portable')
|
|
|
|
|
|
|
|
fullpath_arr = self.storage_path.split('/')
|
|
|
|
# Joining relative path only if it is not null
|
|
|
|
if len(self.relative_path) > 0:
|
|
|
|
fullpath_arr.extend(self.relative_path)
|
|
|
|
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
|
|
|
|
|
|
|
|
|
|
|
|
def check(self):
|
|
|
|
"""Checking if the requested AppImage has been already built."""
|
2023-01-07 00:59:38 +01:00
|
|
|
|
|
|
|
print("--- Check Phase ---")
|
|
|
|
|
2023-01-05 01:07:12 +01:00
|
|
|
if len(self.appimagefilename) != 2:
|
2022-04-30 17:46:46 +02:00
|
|
|
self.calculate()
|
|
|
|
|
|
|
|
for arch in self.arch:
|
|
|
|
|
2023-01-05 01:07:12 +01:00
|
|
|
# First, check if by metadata the repo is remote or not.
|
|
|
|
if self.remoterepo or 'http' in self.storage_path:
|
|
|
|
self.remoterepo = True
|
|
|
|
# Remote storage. I have to query a remote site to know if it
|
|
|
|
# was already built.
|
|
|
|
name = self.appimagefilename[arch]
|
2023-01-07 02:03:31 +01:00
|
|
|
if len(self.relative_path) == 0:
|
2023-01-05 01:24:37 +01:00
|
|
|
path_arr = [ self.storage_path, '' ]
|
2023-01-07 02:03:31 +01:00
|
|
|
elif len(self.relative_path) == 1:
|
|
|
|
path_arr = [ self.storage_path, self.relative_path[0], '' ]
|
|
|
|
else:
|
|
|
|
path_arr = self.relative_path.insert(0, self.storage_path)
|
|
|
|
|
2023-01-05 01:30:06 +01:00
|
|
|
path = str.join('/', path_arr)
|
|
|
|
print(f"DEBUG - Name: {name}, URL: {path}")
|
2023-01-05 19:58:45 +01:00
|
|
|
matching = []
|
2023-01-07 02:23:34 +01:00
|
|
|
try:
|
|
|
|
with urllib.request.urlopen(path) as url:
|
|
|
|
matching = etree.HTML(url.read()).xpath(
|
|
|
|
f"//a[contains(@href, '{name}')]/@href"
|
|
|
|
)
|
|
|
|
|
|
|
|
if len(matching) > 0:
|
|
|
|
# Already built.
|
|
|
|
self.built[arch] = True
|
|
|
|
|
|
|
|
except urllib.error.HTTPError:
|
|
|
|
# The URL specified do not exist. So it is to build.
|
|
|
|
pass
|
2022-04-30 17:46:46 +02:00
|
|
|
|
2023-01-05 01:07:12 +01:00
|
|
|
else:
|
|
|
|
# Repo is local
|
|
|
|
print(f"Searching for {self.appimagefilename[arch]}")
|
2023-01-05 19:58:45 +01:00
|
|
|
command = f"find {self.full_path} -name {self.appimagefilename[arch]}"
|
|
|
|
res = subprocess.run(shlex.split(command),
|
|
|
|
capture_output=True,
|
|
|
|
env={ "LC_ALL": "C" },
|
|
|
|
text=True, encoding='utf-8', check=True)
|
2023-01-05 01:07:12 +01:00
|
|
|
|
|
|
|
if "No such file or directory" in res.stderr:
|
|
|
|
# Folder is not existent: so the version was not built
|
|
|
|
# Build stays false, and we go to the next arch
|
|
|
|
continue
|
|
|
|
|
|
|
|
if res.stdout and len(res.stdout.strip("\n")) > 0:
|
|
|
|
# All good, the command was executed fine.
|
|
|
|
print(f"Build for {self.version} found.")
|
|
|
|
self.built[arch] = True
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
if self.built[arch]:
|
2023-01-05 01:07:12 +01:00
|
|
|
print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.")
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
def download(self):
|
|
|
|
"""Downloads the contents of the URL as it was a folder."""
|
2023-01-07 00:59:38 +01:00
|
|
|
|
|
|
|
print("--- Download Phase ---")
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
print(f"Started downloads for {self.version}. Please wait.")
|
|
|
|
for arch in self.arch:
|
|
|
|
# Checking if a valid path has been provided
|
|
|
|
if self.url[arch] == '-':
|
|
|
|
print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.")
|
|
|
|
# Faking already built it so to skip other checks.
|
|
|
|
self.built[arch] = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
if self.built[arch]:
|
|
|
|
print(f"A build for {arch} was already found. Skipping specific packages.")
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Identifying downloads
|
2023-01-05 19:58:45 +01:00
|
|
|
contents = []
|
|
|
|
with urllib.request.urlopen(self.url[arch]) as url:
|
|
|
|
contents = etree.HTML(url.read()).xpath("//td/a")
|
|
|
|
|
|
|
|
self.tarballs[arch] = [ x.text
|
|
|
|
for x in contents
|
|
|
|
if x.text.endswith('tar.gz') and 'deb' in x.text
|
|
|
|
]
|
2022-04-30 17:46:46 +02:00
|
|
|
tarballs = self.tarballs[arch]
|
|
|
|
|
|
|
|
# Create and change directory to the download location
|
|
|
|
os.makedirs(self.download_path, exist_ok = True)
|
|
|
|
os.chdir(self.download_path)
|
|
|
|
for archive in tarballs:
|
|
|
|
# If the archive is already there, do not do anything.
|
|
|
|
if os.path.exists(archive):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Download the archive
|
|
|
|
try:
|
|
|
|
urllib.request.urlretrieve(self.url[arch] + archive, archive)
|
2023-01-05 19:58:45 +01:00
|
|
|
except Exception as error:
|
|
|
|
print(f"Failed to download {archive}: {error}.")
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
print(f"Finished downloads for {self.version}.")
|
|
|
|
|
|
|
|
def build(self):
|
|
|
|
"""Building all the versions."""
|
|
|
|
|
2023-01-07 00:59:38 +01:00
|
|
|
print("--- Building Phase ---")
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
for arch in self.arch:
|
|
|
|
if self.built[arch]:
|
|
|
|
# Already built for arch or path not available. User has already been warned.
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Preparation tasks
|
|
|
|
self.appnamedir = os.path.join(self.builddir, self.appname)
|
|
|
|
os.makedirs(self.appnamedir, exist_ok=True)
|
|
|
|
# And then cd to the appname folder.
|
|
|
|
os.chdir(self.appnamedir)
|
|
|
|
# Download appimagetool from github
|
|
|
|
appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage"
|
|
|
|
urllib.request.urlretrieve(appimagetoolurl, 'appimagetool')
|
|
|
|
os.chmod('appimagetool', 0o755)
|
|
|
|
|
|
|
|
# Build the requested version.
|
|
|
|
self.__unpackbuild__(arch)
|
|
|
|
|
|
|
|
|
|
|
|
def __unpackbuild__(self, arch):
|
|
|
|
# We start by filtering out tarballs from the list
|
2023-01-05 19:58:45 +01:00
|
|
|
buildtarballs = [ self.tarballs[arch][0] ]
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Let's process standard languages and append results to the
|
|
|
|
# buildtarball
|
|
|
|
if self.language == 'basic':
|
|
|
|
if self.offline_help:
|
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ])
|
|
|
|
else:
|
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x])
|
|
|
|
elif self.language == 'standard':
|
|
|
|
for lang in Build.LANGSTD:
|
|
|
|
if self.offline_help:
|
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ])
|
|
|
|
else:
|
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ])
|
|
|
|
elif self.language == 'full':
|
|
|
|
if self.offline_help:
|
|
|
|
# We need also all help. Let's replace buildtarball with the
|
|
|
|
# whole bunch
|
|
|
|
buildtarballs = self.tarballs[arch]
|
|
|
|
else:
|
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ])
|
|
|
|
else:
|
|
|
|
# Looping for each language in self.language
|
|
|
|
for lang in self.language.split(","):
|
|
|
|
if self.offline_help:
|
2023-01-05 19:58:45 +01:00
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch]
|
|
|
|
if 'pack' + lang in x ])
|
2022-04-30 17:46:46 +02:00
|
|
|
else:
|
2023-01-05 19:58:45 +01:00
|
|
|
buildtarballs.extend([ x for x in self.tarballs[arch]
|
|
|
|
if 'langpack' + lang in x ])
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
os.chdir(self.appnamedir)
|
|
|
|
|
|
|
|
# Unpacking the tarballs
|
|
|
|
for archive in buildtarballs:
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
f"tar xzf {self.download_path}/{archive}"), check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# create appimagedir
|
|
|
|
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
|
|
|
|
os.makedirs(self.appimagedir, exist_ok = True)
|
|
|
|
|
|
|
|
# At this point, let's decompress the deb packages
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find .. -iname '*.deb' -exec dpkg -x {} . \;"
|
|
|
|
), cwd=self.appimagedir, check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
if self.portable:
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find . -type f -iname 'bootstraprc' " +
|
|
|
|
r"-exec sed -i 's|^UserInstallation=.*|" +
|
|
|
|
r"UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version
|
|
|
|
), cwd=self.appimagedir, check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Changing desktop file
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find . -iname startcenter.desktop -exec cp {} . \;"
|
|
|
|
), cwd=self.appimagedir, check=True)
|
|
|
|
|
|
|
|
subprocess.run(shlex.split(
|
2023-01-06 23:23:30 +01:00
|
|
|
f"sed --in-place \'s:^Name=.*$:Name={self.appname}:\' " +
|
2023-01-06 23:39:29 +01:00
|
|
|
r"startcenter.desktop"
|
2023-01-06 23:29:27 +01:00
|
|
|
), cwd=self.appimagedir, check=False)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find . -name '*startcenter.png' -path '*hicolor*48x48*' " +
|
|
|
|
r"-exec cp {} . \;"
|
|
|
|
), cwd=self.appimagedir, check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Find the name of the binary called in the desktop file.
|
|
|
|
binaryname = ''
|
2023-01-05 19:58:45 +01:00
|
|
|
with open(
|
|
|
|
os.path.join(self.appimagedir, 'startcenter.desktop'),
|
|
|
|
'r', encoding="utf-8"
|
|
|
|
) as desktopfile:
|
|
|
|
for line in desktopfile.readlines():
|
2022-04-30 17:46:46 +02:00
|
|
|
if re.match(r'^Exec', line):
|
|
|
|
binaryname = line.split('=')[-1].split(' ')[0]
|
|
|
|
# Esci al primo match
|
|
|
|
break
|
2023-01-05 19:58:45 +01:00
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
|
|
|
|
#binaryname = binary_exec.stdout.strip("\n")
|
|
|
|
|
|
|
|
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
|
|
|
|
os.makedirs(bindir, exist_ok = True)
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find ../../opt -iname soffice -path '*program*' " +
|
|
|
|
r"-exec ln -sf {} ./%s \;" % binaryname
|
|
|
|
), cwd=bindir, check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
# Download AppRun from github
|
2023-01-05 19:58:45 +01:00
|
|
|
apprunurl = r"https://github.com/AppImage/AppImageKit/releases/"
|
|
|
|
apprunurl += f"download/continuous/AppRun-{arch}"
|
2022-04-30 17:46:46 +02:00
|
|
|
dest = os.path.join(self.appimagedir, 'AppRun')
|
|
|
|
urllib.request.urlretrieve(apprunurl, dest)
|
|
|
|
os.chmod(dest, 0o755)
|
|
|
|
|
|
|
|
# Dealing with extra options
|
|
|
|
buildopts = []
|
|
|
|
if self.sign:
|
|
|
|
buildopts.append('--sign')
|
|
|
|
|
|
|
|
# adding zsync build if updatable
|
|
|
|
if self.updatable:
|
|
|
|
buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'")
|
|
|
|
|
|
|
|
buildopts_str = str.join(' ', buildopts)
|
|
|
|
# Build the number-specific build
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
|
|
|
f"./{self.appname}.AppDir/"
|
|
|
|
), env={ "VERSION": self.appversion }, check=True)
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
print(f"Built AppImage version {self.appversion}")
|
|
|
|
|
|
|
|
# Cleanup phase, before new run.
|
|
|
|
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
|
|
|
os.remove(deb)
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"
|
|
|
|
), check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
def checksums(self):
|
|
|
|
"""Create checksums of the built versions."""
|
|
|
|
# Skip checksum if initally the build was already found in the storage directory
|
2023-01-07 00:59:38 +01:00
|
|
|
|
|
|
|
print("--- Checksum Phase ---")
|
|
|
|
|
2023-01-05 01:56:33 +01:00
|
|
|
if all(self.built[arch] for arch in self.arch):
|
2022-04-30 17:46:46 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
os.chdir(self.appnamedir)
|
|
|
|
for arch in self.arch:
|
2023-01-07 01:03:21 +01:00
|
|
|
if not self.built[arch]:
|
|
|
|
# Here's the contrary. A newly built package has not yet been
|
|
|
|
# marked as built.
|
2023-01-07 00:24:35 +01:00
|
|
|
for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]:
|
2023-01-07 01:03:21 +01:00
|
|
|
itempath = os.path.join(self.appnamedir, item)
|
|
|
|
if os.path.exists(itempath):
|
|
|
|
# For any built arch, find out if a file exist.
|
|
|
|
print(f"DEBUG: checkumming {item}.")
|
|
|
|
self.__create_checksum__(item)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
def __create_checksum__(self, file):
|
|
|
|
"""Internal function to create checksum file."""
|
2023-01-07 00:15:42 +01:00
|
|
|
|
2023-01-07 01:25:32 +01:00
|
|
|
checksum = subprocess.run(f"md5sum {file}", shell=True,
|
2023-01-07 00:52:21 +01:00
|
|
|
capture_output=True, text=True, encoding='utf-8', check=True,
|
|
|
|
cwd=self.appnamedir)
|
2023-01-07 00:15:42 +01:00
|
|
|
|
2023-01-07 00:30:56 +01:00
|
|
|
if checksum.stdout:
|
|
|
|
with open(f"{file}.md5", 'w', encoding='utf-8') as checkfile:
|
2023-01-07 00:52:21 +01:00
|
|
|
print(f"DEBUG: writing checksum for {file}.")
|
2023-01-07 00:30:56 +01:00
|
|
|
checkfile.write(checksum.stdout)
|
2023-01-07 00:15:42 +01:00
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
def publish(self):
|
|
|
|
"""Moves built versions to definitive storage."""
|
2023-01-07 00:52:21 +01:00
|
|
|
|
2023-01-07 01:03:21 +01:00
|
|
|
print("--- Publish Phase ---")
|
2023-01-07 00:52:21 +01:00
|
|
|
|
2023-01-05 01:56:33 +01:00
|
|
|
if all(self.built[arch] for arch in self.arch):
|
2022-04-30 17:46:46 +02:00
|
|
|
# All files are already present in the full_path
|
|
|
|
return
|
|
|
|
|
|
|
|
os.chdir(self.appnamedir)
|
2023-01-05 01:07:12 +01:00
|
|
|
# Two cases here: local and remote storage_path.
|
|
|
|
if self.remoterepo:
|
|
|
|
# Remote first.
|
|
|
|
# Build destination directory
|
2023-01-07 01:37:17 +01:00
|
|
|
if len(self.relative_path) == 0:
|
2023-01-05 01:07:12 +01:00
|
|
|
remotepath = str.join('/', [ self.remote_path, '' ])
|
2023-01-07 01:37:17 +01:00
|
|
|
elif len(self.relative_path) == 1:
|
|
|
|
remotepath = str.join('/', [ self.remote_path, self.relative_path[0], '' ])
|
|
|
|
else:
|
|
|
|
remotepath = str.join('/', self.relative_path.insert(0, self.remote_path))
|
2023-01-05 01:07:12 +01:00
|
|
|
try:
|
2023-01-06 23:55:30 +01:00
|
|
|
subprocess.run(
|
2023-01-05 19:58:45 +01:00
|
|
|
r"rsync -rlIvz --munge-links *.AppImage* " +
|
2023-01-06 23:55:30 +01:00
|
|
|
f"{self.remote_host}:{remotepath}",
|
|
|
|
cwd=self.appnamedir, shell=True, check=True
|
|
|
|
)
|
2023-01-05 01:07:12 +01:00
|
|
|
finally:
|
|
|
|
pass
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Local
|
|
|
|
# Forcing creation of subfolders, in case there is a new build
|
|
|
|
os.makedirs(self.full_path, exist_ok = True)
|
|
|
|
for file in glob.glob("*.AppImage*"):
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
f"cp -f {file} {self.full_path}"
|
|
|
|
), check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
|
|
|
|
|
2023-01-05 01:14:28 +01:00
|
|
|
def generalize_and_link(self, chdir = 'default'):
|
2022-04-30 17:46:46 +02:00
|
|
|
"""Creates the needed generalized files if needed."""
|
2023-01-07 01:03:21 +01:00
|
|
|
|
|
|
|
print("--- Generalize and Link Phase ---")
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
# If called with a pointed version, no generalize and link necessary.
|
|
|
|
if not self.branch_version:
|
|
|
|
return
|
2022-05-01 13:29:04 +02:00
|
|
|
|
|
|
|
# If a prerelease or a daily version, either.
|
2023-01-05 19:58:45 +01:00
|
|
|
if self.query in { 'daily', 'prerelease' }:
|
2022-05-01 13:29:04 +02:00
|
|
|
return
|
|
|
|
|
2023-01-05 01:14:28 +01:00
|
|
|
if chdir == 'default':
|
|
|
|
chdir = self.full_path
|
|
|
|
|
2022-04-30 17:46:46 +02:00
|
|
|
appimagefilename = {}
|
|
|
|
zsyncfilename = {}
|
|
|
|
|
|
|
|
# Creating versions for short version and query text
|
|
|
|
versions = [ self.short_version, self.branch_version ]
|
|
|
|
for arch in Build.ARCHSTD:
|
|
|
|
# If already built, do not do anything.
|
|
|
|
if self.built[arch]:
|
|
|
|
continue
|
|
|
|
|
2023-01-05 01:07:12 +01:00
|
|
|
os.chdir(chdir)
|
2022-04-30 17:46:46 +02:00
|
|
|
# if the appimage for the reported arch is not found, skip to next
|
|
|
|
# arch
|
|
|
|
if not os.path.exists(self.appimagefilename[arch]):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Doing it both for short_name and for branchname
|
|
|
|
for version in versions:
|
2023-01-05 19:58:45 +01:00
|
|
|
appimagefilename[arch] = self.appname + '-' + version
|
|
|
|
appimagefilename[arch] += self.languagepart + self.helppart
|
|
|
|
appimagefilename[arch] += f'-{arch}.AppImage'
|
2022-04-30 17:46:46 +02:00
|
|
|
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
|
|
|
|
|
|
|
|
# Create the symlink
|
|
|
|
print(f"Creating {appimagefilename[arch]} and checksums.")
|
|
|
|
if os.path.exists(appimagefilename[arch]):
|
|
|
|
os.unlink(appimagefilename[arch])
|
|
|
|
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
|
|
|
|
# Create the checksum for the AppImage
|
|
|
|
self.__create_checksum__(appimagefilename[arch])
|
|
|
|
# Do not continue if no zsync file is provided.
|
|
|
|
if not self.updatable:
|
|
|
|
continue
|
|
|
|
|
|
|
|
print(f"Creating zsync file for version {version}.")
|
|
|
|
if os.path.exists(zsyncfilename[arch]):
|
|
|
|
os.unlink(zsyncfilename[arch])
|
|
|
|
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
|
|
|
|
# Editing the zsyncfile
|
2023-01-05 19:58:45 +01:00
|
|
|
subprocess.run(shlex.split(
|
|
|
|
r"sed --in-place 's/^Filename:.*$/Filename: " +
|
|
|
|
f"{appimagefilename[arch]}/' {zsyncfilename[arch]}"
|
|
|
|
), check=True)
|
2022-04-30 17:46:46 +02:00
|
|
|
self.__create_checksum__(zsyncfilename[arch])
|
|
|
|
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
"""Destructor"""
|
|
|
|
# Cleaning up build directory
|
|
|
|
shutil.rmtree(self.builddir)
|