From 28ab4402b96ee0bbdb4f19f2a33c00eec83ccd97 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Fri, 23 Sep 2022 18:04:28 +0200 Subject: [PATCH 1/7] wip: add imagebuilder Signed-off-by: Paul Spooren --- asu/build.py | 444 +++++++++---------------------------- asu/common.py | 47 ---- asu/imagebuilder.py | 388 ++++++++++++++++++++++++++++++++ pyproject.toml | 5 +- tests/conftest.py | 4 +- tests/test_api.py | 15 +- tests/test_common.py | 24 -- tests/test_imagebuilder.py | 122 ++++++++++ 8 files changed, 624 insertions(+), 425 deletions(-) create mode 100644 asu/imagebuilder.py create mode 100644 tests/test_imagebuilder.py diff --git a/asu/build.py b/asu/build.py index 0cedee8b..7c08dfa6 100644 --- a/asu/build.py +++ b/asu/build.py @@ -1,23 +1,58 @@ -import json import logging -import re -import subprocess from datetime import datetime -from pathlib import Path -from shutil import copyfile, rmtree +from shutil import rmtree -import requests from rq import get_current_job -from .common import ( - fingerprint_pubkey_usign, - get_file_hash, - get_packages_hash, - verify_usign, -) +from .common import get_packages_hash +from .imagebuilder import ImageBuilder log = logging.getLogger("rq.worker") -log.setLevel(logging.DEBUG) + + +def set_stats(job, req): + job.connection.hincrby( + "stats:builds", + "#".join( + [req["branch_data"]["name"], req["version"], req["target"], req["profile"]] + ), + ) + + +def cleanup_imagebuilders(job, req): + now_timestamp = int(datetime.now().timestamp()) + + # Set last build timestamp for current target/subtarget to now + job.connection.hset( + f"worker:{job.worker_name}:last_build", req["target"], now_timestamp + ) + + # Iterate over all targets of the worker and remove the once inactive for a week + for target_subtarget, last_build_timestamp in job.connection.hgetall( + f"worker:{job.worker_name}:last_build" + ).items(): + target_subtarget = target_subtarget.decode() + + log.debug("now_timestamp %s %s", target_subtarget, now_timestamp) + log.debug( + "last_build_timestamp %s %s", + target_subtarget, + last_build_timestamp.decode(), + ) + + if now_timestamp - int(last_build_timestamp.decode()) > 60 * 60 * 24: + log.info("Removing unused ImageBuilder for %s", target_subtarget) + job.connection.hdel( + f"worker:{job.worker_name}:last_build", target_subtarget + ) + if (req["cache_path"] / target_subtarget).exists(): + rmtree(req["cache_path"] / target_subtarget) + for suffix in [".stamp", ".sha256sums", ".sha256sums.sig"]: + (req["cache_path"] / target_subtarget).with_suffix(suffix).unlink( + missing_ok=True + ) + else: + log.debug("Keeping ImageBuilder for %s", target_subtarget) def build(req: dict): @@ -43,220 +78,53 @@ def report_error(msg): job.save_meta() log.debug(f"Building {req}") - target, subtarget = req["target"].split("/") - cache = req.get("cache_path", Path.cwd()) / "cache" / req["version"] - cache_workdir = cache / target / subtarget - sums_file = Path(cache / target / f"{subtarget}.sha256sums") - sig_file = Path(cache / target / f"{subtarget}.sha256sums.sig") - - def setup_ib(): - """Setup ImageBuilder based on `req` - - This function downloads and verifies the ImageBuilder archive. Existing - setups are automatically updated if newer version are available - upstream. - """ - log.debug("Setting up ImageBuilder") - if (cache_workdir).is_dir(): - rmtree(cache_workdir) - - download_file("sha256sums.sig", sig_file) - download_file("sha256sums", sums_file) - - log.debug("Signatures downloaded" + sig_file.read_text()) - - if not verify_usign(sig_file, sums_file, req["branch_data"]["pubkey"]): - report_error("Bad signature of ImageBuilder archive") - - ib_search = re.search( - r"^(.{64}) \*(openwrt-imagebuilder-.+?\.Linux-x86_64\.tar\.xz)$", - sums_file.read_text(), - re.MULTILINE, - ) - - if not ib_search: - report_error("Missing Checksum") - - ib_hash, ib_archive = ib_search.groups() - - job.meta["imagebuilder_status"] = "download_imagebuilder" - job.save_meta() - download_file(ib_archive) + ib = ImageBuilder( + version=req["version"], + target=req["target"], + upstream_url=req["upstream_url"], + custom_public_key=req["branch_data"]["pubkey"], + cache=req["cache_path"], + ) - if ib_hash != get_file_hash(cache / target / ib_archive): - report_error("Bad Checksum") + log.info(f"Building {req}") - (cache_workdir).mkdir(parents=True, exist_ok=True) + err = ib.setup() - job.meta["imagebuilder_status"] = "unpack_imagebuilder" + if err: + job.meta["stdout"] = ib.stdout + job.meta["stderr"] = ib.stderr + job.meta["build_cmd"] = ib.build_cmd job.save_meta() + raise err - extract_archive = subprocess.run( - ["tar", "--strip-components=1", "-xf", ib_archive, "-C", subtarget], - cwd=cache / target, - ) - - if extract_archive.returncode: - report_error("Failed to unpack ImageBuilder archive") - - log.debug(f"Extracted TAR {ib_archive}") - - (cache / target / ib_archive).unlink() - - for key in req["branch_data"].get("extra_keys", []): - fingerprint = fingerprint_pubkey_usign(key) - (cache_workdir / "keys" / fingerprint).write_text( - f"untrusted comment: ASU extra key {fingerprint}\n{key}" - ) - - repos_path = cache_workdir / "repositories.conf" - repos = repos_path.read_text() - - extra_repos = req["branch_data"].get("extra_repos") - if extra_repos: - log.debug("Found extra repos") - for name, repo in extra_repos.items(): - repos += f"\nsrc/gz {name} {repo}" - - repos_path.write_text(repos) - log.debug(f"Repos:\n{repos}") - - if (Path.cwd() / "seckey").exists(): - # link key-build to imagebuilder - (cache_workdir / "key-build").symlink_to(Path.cwd() / "seckey") - if (Path.cwd() / "pubkey").exists(): - # link key-build.pub to imagebuilder - (cache_workdir / "key-build.pub").symlink_to(Path.cwd() / "pubkey") - if (Path.cwd() / "newcert").exists(): - # link key-build.ucert to imagebuilder - (cache_workdir / "key-build.ucert").symlink_to(Path.cwd() / "newcert") - - def download_file(filename: str, dest: str = None): - """Download file from upstream target path - - The URL points automatically to the targets folder upstream - - Args: - filename (str): File in upstream target folder - dest (str): Optional path to store the file, default to target - cache folder - """ - log.debug(f"Downloading {filename}") - r = requests.get( - req["upstream_url"] - + "/" - + req["branch_data"]["path"].format(version=req["version"]) - + "/targets/" - + req["target"] - + "/" - + filename - ) - - with open(dest or (cache / target / filename), "wb") as f: - f.write(r.content) - - (cache / target).mkdir(parents=True, exist_ok=True) - - stamp_file = cache / target / f"{subtarget}.stamp" - - sig_file_headers = requests.head( - req["upstream_url"] - + "/" - + req["branch_data"]["path"].format(version=req["version"]) - + "/targets/" - + req["target"] - + "/sha256sums.sig" - ).headers - log.debug(f"sig_file_headers: \n{sig_file_headers}") - - origin_modified = sig_file_headers.get("last-modified") - log.info("Origin %s", origin_modified) - - if stamp_file.is_file(): - local_modified = stamp_file.read_text() - log.info("Local %s", local_modified) - else: - local_modified = "" - - if origin_modified != local_modified: - log.debug("New ImageBuilder upstream available") - setup_ib() - stamp_file.write_text(origin_modified) - - if not (cache_workdir / ".config.orig").exists(): - # backup original configuration to keep default filesystems - copyfile( - cache_workdir / ".config", - cache_workdir / ".config.orig", - ) - - info_run = subprocess.run( - ["make", "info"], text=True, capture_output=True, cwd=cache_workdir - ) - - version_code = re.search('Current Revision: "(r.+)"', info_run.stdout).group(1) + log.debug("Config at %s", ib.workdir / ".config") if "version_code" in req: - if version_code != req.get("version_code"): + if ib.version_code != req.get("version_code"): report_error( - f"Received inncorrect version {version_code} (requested {req['version_code']})" + f"Received inncorrect version {ib.version_code} " + f"(requested {req['version_code']})" ) - default_packages = set( - re.search(r"Default Packages: (.*)\n", info_run.stdout).group(1).split() - ) - profile_packages = set( - re.search( - r"{}:\n .+\n Packages: (.*?)\n".format(req["profile"]), - info_run.stdout, - re.MULTILINE, - ) - .group(1) - .split() - ) - if req.get("diff_packages", False): - remove_packages = (default_packages | profile_packages) - req["packages"] + remove_packages = (ib.default_packages | ib.profile_packages) - req["packages"] req["packages"] = req["packages"] | set(map(lambda p: f"-{p}", remove_packages)) + else: + req["packages"] = [] job.meta["imagebuilder_status"] = "calculate_packages_hash" job.save_meta() - manifest_run = subprocess.run( - [ - "make", - "manifest", - f"PROFILE={req['profile']}", - f"PACKAGES={' '.join(sorted(req.get('packages', [])))}", - "STRIP_ABI=1", - ], - text=True, - cwd=cache_workdir, - capture_output=True, - ) - - job.meta["stdout"] = manifest_run.stdout - job.meta["stderr"] = manifest_run.stderr - job.save_meta() - - if manifest_run.returncode: - if "Package size mismatch" in manifest_run.stderr: - rmtree(cache_workdir) - return build(req) - else: - print(manifest_run.stdout) - print(manifest_run.stderr) - report_error("Impossible package selection") - - manifest = dict(map(lambda pv: pv.split(" - "), manifest_run.stdout.splitlines())) + manifest = ib.manifest(req["profile"], req["packages"]) for package, version in req.get("packages_versions", {}).items(): if package not in manifest: report_error(f"Impossible package selection: {package} not in manifest") if version != manifest[package]: report_error( - f"Impossible package selection: {package} version not as requested: {version} vs. {manifest[package]}" + f"Impossible package selection: {package} version not as requested: " + f"{version} vs. {manifest[package]}" ) manifest_packages = manifest.keys() @@ -266,163 +134,53 @@ def download_file(filename: str, dest: str = None): packages_hash = get_packages_hash(manifest_packages) log.debug(f"Packages Hash {packages_hash}") - bin_dir = req["request_hash"] - - (req["store_path"] / bin_dir).mkdir(parents=True, exist_ok=True) - - log.debug("Created store path: %s", req["store_path"] / bin_dir) - - if req.get("filesystem"): - config_path = cache_workdir / ".config" - config = config_path.read_text() - - for filesystem in ["squashfs", "ext4fs", "ubifs", "jffs2"]: - # this implementation uses `startswith` since a running device thinks - # it's running `ext4` while really there is `ext4fs` running - if not filesystem.startswith(req.get("filesystem", filesystem)): - log.debug(f"Disable {filesystem}") - config = config.replace( - f"CONFIG_TARGET_ROOTFS_{filesystem.upper()}=y", - f"# CONFIG_TARGET_ROOTFS_{filesystem.upper()} is not set", - ) - else: - log.debug(f"Enable {filesystem}") - config = config.replace( - f"# CONFIG_TARGET_ROOTFS_{filesystem.upper()} is not set", - f"CONFIG_TARGET_ROOTFS_{filesystem.upper()}=y", - ) - - config_path.write_text(config) - else: - log.debug("Enable default filesystems") - copyfile( - cache_workdir / ".config.orig", - cache_workdir / ".config", - ) - - build_cmd = [ - "make", - "image", - f"PROFILE={req['profile']}", - f"PACKAGES={' '.join(sorted(req.get('packages', [])))}", - f"EXTRA_IMAGE_NAME={packages_hash}", - f"BIN_DIR={req['store_path'] / bin_dir}", - ] + ib.bin_dir = req["store_path"] / req["request_hash"] + ib.bin_dir.mkdir(parents=True, exist_ok=True) - log.debug("Build command: %s", build_cmd) + log.debug("Build command: %s", ib.build_cmd) job.meta["imagebuilder_status"] = "building_image" job.save_meta() - if req.get("defaults"): - defaults_file = ( - Path(req["store_path"]) / bin_dir / "files/etc/uci-defaults/99-asu-defaults" - ) - defaults_file.parent.mkdir(parents=True) - defaults_file.write_text(req["defaults"]) - build_cmd.append(f"FILES={req['store_path'] / bin_dir / 'files'}") - - log.debug(f"Running {' '.join(build_cmd)}") + log.debug(f"Running {' '.join(ib.build_cmd)}") - image_build = subprocess.run( - build_cmd, - text=True, - cwd=cache_workdir, - capture_output=True, + ib.build( + req["profile"], + req["packages"], + packages_hash, + defaults=req.get("defaults"), + filesystem=req.get("filesystem"), ) - job.meta["stdout"] = image_build.stdout - job.meta["stderr"] = image_build.stderr - job.meta["build_cmd"] = build_cmd + job.meta["stdout"] = ib.stdout + job.meta["stderr"] = ib.stderr + job.meta["build_cmd"] = ib.build_cmd job.save_meta() - if image_build.returncode: - report_error("Error while building firmware. See stdout/stderr") - - if "is too big" in image_build.stderr: - report_error("Selected packages exceed device storage") - - kernel_build_dir_run = subprocess.run( - ["make", "val.KERNEL_BUILD_DIR"], - text=True, - cwd=cache_workdir, - capture_output=True, - ) - - if kernel_build_dir_run.returncode: - report_error("Couldn't determine KERNEL_BUILD_DIR") - - kernel_build_dir_tmp = Path(kernel_build_dir_run.stdout.strip()) / "tmp" - - if kernel_build_dir_tmp.exists(): - log.info("Removing KDIR_TMP at %s", kernel_build_dir_tmp) - rmtree(kernel_build_dir_tmp) - else: - log.warning("KDIR_TMP missing at %s", kernel_build_dir_tmp) - - json_file = Path(req["store_path"] / bin_dir / "profiles.json") - - if not json_file.is_file(): + if not ib.profiles_json: report_error("No JSON file found") - json_content = json.loads(json_file.read_text()) - - if req["profile"] not in json_content["profiles"]: + if req["profile"] not in ib.profiles_json["profiles"]: report_error("Profile not found in JSON file") - now_timestamp = int(datetime.now().timestamp()) - - json_content.update({"manifest": manifest}) - json_content.update(json_content["profiles"][req["profile"]]) - json_content["id"] = req["profile"] - json_content["bin_dir"] = str(bin_dir) - json_content.pop("profiles") - json_content["build_at"] = datetime.utcfromtimestamp( - int(json_content.get("source_date_epoch", 0)) + ib.profiles_json.update({"manifest": manifest}) + ib.profiles_json.update(ib.profiles_json["profiles"][req["profile"]]) + ib.profiles_json["id"] = req["profile"] + ib.profiles_json["bin_dir"] = str(ib.bin_dir) + ib.profiles_json.pop("profiles") + ib.profiles_json["build_at"] = datetime.utcfromtimestamp( + int(ib.profiles_json.get("source_date_epoch", 0)) ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - json_content["detail"] = "done" - - log.debug("JSON content %s", json_content) + ib.profiles_json["detail"] = "done" - job.connection.sadd(f"builds:{version_code}:{req['target']}", req["request_hash"]) + log.debug("JSON content %s", ib.profiles_json) - job.connection.hincrby( - "stats:builds", - "#".join( - [req["branch_data"]["name"], req["version"], req["target"], req["profile"]] - ), - ) - - # Set last build timestamp for current target/subtarget to now - job.connection.hset( - f"worker:{job.worker_name}:last_build", req["target"], now_timestamp + job.connection.sadd( + f"builds:{ib.version_code}:{req['target']}", req["request_hash"] ) - # Iterate over all targets/subtargets of the worker and remove the once inactive for a week - for target_subtarget, last_build_timestamp in job.connection.hgetall( - f"worker:{job.worker_name}:last_build" - ).items(): - target_subtarget = target_subtarget.decode() + set_stats(job, req) - log.debug("now_timestamp %s %s", target_subtarget, now_timestamp) - log.debug( - "last_build_timestamp %s %s", - target_subtarget, - last_build_timestamp.decode(), - ) - - if now_timestamp - int(last_build_timestamp.decode()) > 60 * 60 * 24: - log.info("Removing unused ImageBuilder for %s", target_subtarget) - job.connection.hdel( - f"worker:{job.worker_name}:last_build", target_subtarget - ) - if (cache / target_subtarget).exists(): - rmtree(cache / target_subtarget) - for suffix in [".stamp", ".sha256sums", ".sha256sums.sig"]: - (cache / target_subtarget).with_suffix(suffix).unlink( - missing_ok=True - ) - else: - log.debug("Keeping ImageBuilder for %s", target_subtarget) + cleanup_imagebuilders(job, req) - return json_content + return ib.profiles_json diff --git a/asu/common.py b/asu/common.py index 90213772..a7869308 100644 --- a/asu/common.py +++ b/asu/common.py @@ -1,10 +1,6 @@ -import base64 import hashlib import json -import struct -from pathlib import Path -import nacl.signing import requests from flask import current_app @@ -129,46 +125,3 @@ def get_packages_hash(packages: list) -> str: str: hash of `req` """ return get_str_hash(" ".join(sorted(list(set(packages)))), 12) - - -def fingerprint_pubkey_usign(pubkey: str) -> str: - """Return fingerprint of signify/usign public key - - Args: - pubkey (str): signify/usign public key - - Returns: - str: string containing the fingerprint - """ - keynum = base64.b64decode(pubkey.splitlines()[-1])[2:10] - return "".join(format(x, "02x") for x in keynum) - - -def verify_usign(sig_file: Path, msg_file: Path, pub_key: str) -> bool: - """Verify a signify/usign signature - - This implementation uses pynacl - - Args: - sig_file (Path): signature file - msg_file (Path): message file to be verified - pub_key (str): public key to use for verification - - Returns: - bool: Sucessfull verification - - Todo: - Currently ignores keynum and pkalg - - """ - pkalg, keynum, pubkey = struct.unpack("!2s8s32s", base64.b64decode(pub_key)) - sig = base64.b64decode(sig_file.read_text().splitlines()[-1]) - - pkalg, keynum, sig = struct.unpack("!2s8s64s", sig) - - verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.RawEncoder) - try: - verify_key.verify(msg_file.read_bytes(), sig) - return True - except nacl.exceptions.CryptoError: - return False diff --git a/asu/imagebuilder.py b/asu/imagebuilder.py new file mode 100644 index 00000000..dc87e1c7 --- /dev/null +++ b/asu/imagebuilder.py @@ -0,0 +1,388 @@ +import base64 +import hashlib +import json +import re +import struct +from datetime import datetime +from pathlib import Path +from shutil import copyfile, rmtree +from subprocess import run + +import nacl.signing +import requests +from urlpath import URL + + +def verify_usign(signature: str, message: str, public_key: str) -> bool: + """Verify a signify/usign signature + + This implementation uses pynacl + + Args: + sig (str): signature content in bytes + msg (str): message content in bytes + pub_key (str): public key to use for verification + + Returns: + bool: Sucessfull verification + + Todo: + Currently ignores keynum and pkalg + + """ + _pkalg, _keynum, pubkey = struct.unpack("!2s8s32s", base64.b64decode(public_key)) + sig = base64.b64decode(signature.splitlines()[-1]) + + _pkalg, _keynum, sig = struct.unpack("!2s8s64s", sig) + + verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.RawEncoder) + try: + verify_key.verify(bytes(message, "utf-8"), sig) + return True + except nacl.exceptions.CryptoError: + return False + + +def fingerprint_pubkey_usign(pubkey: str) -> str: + """Return fingerprint of signify/usign public key + + Args: + pubkey (str): signify/usign public key + + Returns: + str: string containing the fingerprint + """ + keynum = base64.b64decode(pubkey.splitlines()[-1])[2:10] + return "".join(format(x, "02x") for x in keynum) + + +def get_file_hash(path: Path) -> str: + """Return sha256sum of given path + + Args: + path (str): path to file + + Returns: + str: hash of file + """ + BLOCK_SIZE = 65536 + + h = hashlib.sha256() + with open(str(path), "rb") as f: + fb = f.read(BLOCK_SIZE) + while len(fb) > 0: + h.update(fb) + fb = f.read(BLOCK_SIZE) + + return h.hexdigest() + + +class ImageBuilder(object): + def __init__( + self, + distro="openwrt", + version="21.02.3", + target="x86/64", + cache=Path.cwd() / "cache", + bin_dir=Path.cwd() / "bin", + upstream_url="https://downloads.openwrt.org", + keys=Path.cwd(), + files=None, + custom_public_key=None, + ): + self.distro = distro + self.version = version + self.target = target.lower() + self.cache = Path(cache) + self.upstream_url = URL(upstream_url) + self.keys = Path(keys) + self.workdir = self.cache / self.version / self.target + self.sha256sums = None + self.sha256sums_sig = None + self.version_code = "" + self.default_packages = set() + self.profile_packages = set() + self.bin_dir = bin_dir + self.files = files or self.bin_dir + self.custom_public_key = custom_public_key + self.stdout = "" + self.stderr = "" + self.build_cmd = [] + self.profiles_json = None + + @property + def public_key(self): + if self.custom_public_key: + return self.custom_public_key + + if self.version == "SNAPSHOT": + return "RWS1BD5w+adc3j2Hqg9+b66CvLR7NlHbsj7wjNVj0XGt/othDgIAOJS+" + elif self.version.startswith("21.02"): + return "RWQviwuY4IMGvwLfs6842A0m4EZU1IjczTxKMSk3BQP8DAQLHBwdQiaU" + else: + return None + + @property + def version_folder(self): + if self.version != "SNAPSHOT": + return f"releases/{self.version}" + else: + return "snapshots" + + def get_sha256sums(self): + if not self.sha256sums: + self.sha256sums = self._download_file("sha256sums").text + + return self.sha256sums + + def get_sha256sums_sig(self): + if not self.sha256sums_sig: + self.sha256sums_sig = self._download_file("sha256sums.sig").content + + return self.sha256sums_sig + + def _download_header(self, filename): + return requests.head(self.imagebuilder_url / filename).headers + + def _download_file(self, filename, path: Path = None): + file_request = requests.get(self.imagebuilder_url / filename) + file_request.raise_for_status() + + if path: + path.write_bytes(file_request.content) + return True + else: + return file_request + + @property + def imagebuilder_url(self): + return self.upstream_url / self.version_folder / "targets" / self.target + + def is_outdated(self): + makefile = self.workdir / "Makefile" + if not makefile.exists(): + return True + + remote_stamp = datetime.strptime( + self._download_header("sha256sums.sig").get("last-modified"), + "%a, %d %b %Y %H:%M:%S %Z", + ) + + local_stamp = datetime.fromtimestamp(makefile.stat().st_mtime) + + if remote_stamp > local_stamp: + return True + + return False + + def _get_archive_sum_name(self): + return re.search( + r"^(.{64}) \*(openwrt-imagebuilder-.+?\.Linux-x86_64\.tar\.xz)$", + self.get_sha256sums(), + re.MULTILINE, + ).groups() + + @property + def config(self): + config_path = self.workdir / ".config" + if config_path.exists(): + return config_path + else: + return None + + @property + def archive_name(self): + return self._get_archive_sum_name()[1] + + @property + def archive_sum(self): + return self._get_archive_sum_name()[0] + + def valid_signature(self): + return verify_usign( + self.get_sha256sums_sig(), self.get_sha256sums(), self.public_key + ) + + def valid_checksum(self): + return self.archive_sum == get_file_hash(self.cache / self.archive_name) + + def download(self): + self.cache.mkdir(exist_ok=True, parents=True) + + return self._download_file( + self.archive_name, + self.cache / self.archive_name, + ) + + def unpack(self): + self.workdir.mkdir(parents=True, exist_ok=True) + run( + [ + "tar", + "--strip-components=1", + "-xf", + self.cache / self.archive_name, + ], + cwd=self.workdir, + ) + + (self.cache / self.archive_name).unlink() + + copyfile( + self.workdir / ".config", + self.workdir / ".config.orig", + ) + + return True + + def copy_keys(self): + for suffix in ["", ".pub", ".ucert"]: + file = (self.keys / "key-build").with_suffix(suffix) + if file.exists(): + (self.workdir / file.name).symlink_to(file) + + def setup(self, check_online=False): + if not self.is_outdated(): + return None + + if not self.valid_signature(): + return "Invalid signature" + + if not self.download(): + return "Failed to download" + + if not self.valid_checksum(): + return "Bad checksum of archive" + + if not self.unpack(): + return "Failed to unpack" + + self.parse_info() + + def info(self): + return run(["make", "info"], text=True, capture_output=True, cwd=self.workdir) + + def parse_info(self): + info_run = self.info() + + self.version_code = re.search( + 'Current Revision: "(r.+)"', info_run.stdout + ).group(1) + + self.default_packages = set( + re.search(r"Default Packages: (.*)\n", info_run.stdout).group(1).split() + ) + + self.profile_packages = set( + re.search( + r"(.*?):\n .+\n Packages: (.*?)\n", + info_run.stdout, + re.MULTILINE, + ) + .group(1) + .split() + ) + + def _packages(self, packages): + return sorted(list(set(packages))) + + def _make(self, cmd: list): + return run(cmd, text=True, cwd=self.workdir, capture_output=True) + + def cleanup(self): + kernel_build_dir_run = self._make(["make", "val.KERNEL_BUILD_DIR"]) + + kernel_build_dir_tmp = Path(kernel_build_dir_run.stdout.strip()) / "tmp" + + if kernel_build_dir_tmp.exists(): + # log.info("Removing KDIR_TMP at %s", kernel_build_dir_tmp) + rmtree(kernel_build_dir_tmp) + else: + pass + # log.warning("KDIR_TMP missing at %s", kernel_build_dir_tmp) + + def manifest(self, profile, packages): + manifest_run = self._make( + [ + "make", + "manifest", + f"PROFILE={profile}", + f"PACKAGES={' '.join(self._packages(packages))}", + "STRIP_ABI=1", + ] + ) + + self.stdout = manifest_run.stdout + self.stderr = manifest_run.stderr + + if manifest_run.returncode: + raise ValueError("Package selection caused error") + + return dict(map(lambda pv: pv.split(" - "), manifest_run.stdout.splitlines())) + + def set_filesystem(self, filesystem): + config = self.config.read_text() + + for available_filesystem in ["squashfs", "ext4fs", "ubifs", "jffs2"]: + # this implementation uses `startswith` since a running device thinks + # it's running `ext4` while really there is `ext4fs` running + if not available_filesystem.startswith(filesystem): + # log.debug(f"Disable {available_filesystem}") + config = config.replace( + f"CONFIG_TARGET_ROOTFS_{available_filesystem.upper()}=y", + f"# CONFIG_TARGET_ROOTFS_{available_filesystem.upper()} is not set", + ) + else: + # log.debug(f"Enable {available_filesystem}") + config = config.replace( + f"# CONFIG_TARGET_ROOTFS_{available_filesystem.upper()} is not set", + f"CONFIG_TARGET_ROOTFS_{available_filesystem.upper()}=y", + ) + + self.config.write_text(config) + + def build( + self, profile, packages, extra_image_name="", defaults="", filesystem=None + ): + if filesystem: + self.set_filesystem(filesystem) + else: + copyfile( + self.workdir / ".config.orig", + self.workdir / ".config", + ) + + self.build_cmd = [ + "make", + "image", + f"PROFILE={profile}", + f"PACKAGES={' '.join(self._packages(packages))}", + f"EXTRA_IMAGE_NAME={extra_image_name}", + f"BIN_DIR={self.bin_dir}", + ] + + defaults_file = self.files / "files/etc/uci-defaults/99-asu-defaults" + + if defaults: + defaults_file.parent.mkdir(parents=True) + defaults_file.write_text(defaults) + self.build_cmd.append(f"FILES={self.files / 'files'}") + else: + defaults_file.unlink(missing_ok=True) + + build_run = self._make(self.build_cmd) + + self.stdout = build_run.stdout + self.stderr = build_run.stderr + + if build_run.returncode: + raise ValueError("Error while building firmware. See stdout/stderr") + + if "is too big" in build_run.stderr: + raise ValueError("Selected packages exceed device storage") + + profiles_json_path = self.bin_dir / "profiles.json" + if profiles_json_path.exists(): + self.profiles_json = json.loads(profiles_json_path.read_text()) + + self.cleanup() diff --git a/pyproject.toml b/pyproject.toml index 67cdf109..c9742c62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,10 @@ requests = "^2.27.1" rq = "^1.10.1" connexion = {extras = ["swagger-ui"], version = "^2.12.0"} prometheus-client = "^0.13.1" -gunicorn = "^20.1.0" +pytest-cov = "^3.0.0" +urlpath = "^1.2.0" +setuptools = "^65.4.1" + [tool.poetry.dev-dependencies] pytest = "^6.2.5" diff --git a/tests/conftest.py b/tests/conftest.py index f1cc3689..76de8bdd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -81,7 +81,7 @@ def app(test_path, redis_server): "JSON_PATH": test_path + "/json", "REDIS_CONN": redis_server, "STORE_PATH": test_path + "/store", - "CACHE_PATH": test_path, + "CACHE_PATH": test_path + "/cache", "TESTING": True, "UPSTREAM_URL": "http://localhost:8001", "BRANCHES": { @@ -213,7 +213,7 @@ def httpserver_listen_address(): @pytest.fixture def upstream(httpserver): - base_url = "/snapshots/targets/testtarget/testsubtarget" + base_url = "/releases/TESTVERSION/targets/testtarget/testsubtarget" upstream_path = Path("./tests/upstream/snapshots/targets/testtarget/testsubtarget/") expected_file_requests = [ "sha256sums.sig", diff --git a/tests/test_api.py b/tests/test_api.py index 72ba9557..21e492b6 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -31,7 +31,7 @@ def test_api_build_filesystem_ext4(app, upstream): assert response.json.get("request_hash") == "daae6bc8045962aa86c8e9d885dae499" config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" + app.config["CACHE_PATH"] / "TESTVERSION/testtarget/testsubtarget/.config" ).read_text() assert "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" in config assert "CONFIG_TARGET_ROOTFS_EXT4FS=y" in config @@ -52,7 +52,7 @@ def test_api_build_filesystem_squashfs(app, upstream): assert response.status == "200 OK" assert response.json.get("request_hash") == "40cc1368f667923f3414914a2ccecc89" config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" + app.config["CACHE_PATH"] / "TESTVERSION/testtarget/testsubtarget/.config" ).read_text() assert "# CONFIG_TARGET_ROOTFS_EXT4FS is not set" in config assert "CONFIG_TARGET_ROOTFS_SQUASHFS=y" in config @@ -73,7 +73,7 @@ def test_api_build_filesystem_empty(app, upstream): assert response.status == "200 OK" assert response.json.get("request_hash") == "33377fbd91c50c4236343f1dfd67f9ae" config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" + app.config["CACHE_PATH"] / "TESTVERSION/testtarget/testsubtarget/.config" ).read_text() assert "CONFIG_TARGET_ROOTFS_EXT4FS=y" in config assert "CONFIG_TARGET_ROOTFS_SQUASHFS=y" in config @@ -96,8 +96,7 @@ def test_api_build_filesystem_reset(app, upstream): assert ( "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" in ( - app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget/.config" + app.config["CACHE_PATH"] / "TESTVERSION/testtarget/testsubtarget/.config" ).read_text() ) @@ -115,8 +114,7 @@ def test_api_build_filesystem_reset(app, upstream): assert ( "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" not in ( - app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget/.config" + app.config["CACHE_PATH"] / "TESTVERSION/testtarget/testsubtarget/.config" ).read_text() ) @@ -475,6 +473,7 @@ def test_api_build_bad_packages(client): assert response.json.get("detail") == "Unsupported package(s): test4" assert response.status == "422 UNPROCESSABLE ENTITY" + def test_api_build_package_to_remove_diff_packages_false(client, upstream): response = client.post( "/api/v1/build", @@ -504,7 +503,7 @@ def test_api_build_cleanup(app, upstream): assert response.status == "200 OK" assert not ( app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget" + / "TESTVERSION/testtarget/testsubtarget" / "pseudo_kernel_build_dir/tmp/" / "fake_trash" ).exists() diff --git a/tests/test_common.py b/tests/test_common.py index a528243e..492f637c 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -44,27 +44,3 @@ def test_get_request_hash_diff_packages(): } assert get_request_hash(request) == "caaa8f25efadb5456f8fd32b5a4ba032" - - -def test_fingerprint_pubkey_usign(): - pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" - assert fingerprint_pubkey_usign(pub_key) == "ab1df166947b2551" - - -def test_verify_usign(): - sig = b"\nRWSrHfFmlHslUQ9dCB1AJr/PoIIbBJJKtofZ5frLOuG03SlwAwgU1tYOaJs2eVGdo1C8S9LNcMBLPIfDDCWSdrLK3WJ6JV6HNQM=" - msg_fd, msg_path = tempfile.mkstemp() - sig_fd, sig_path = tempfile.mkstemp() - os.write(msg_fd, b"test\n") - os.write(sig_fd, sig) - - pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" - pub_key_bad = "rWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SXXX" - - assert verify_usign(Path(sig_path), Path(msg_path), pub_key) - assert not verify_usign(Path(sig_path), Path(msg_path), pub_key_bad) - - os.close(msg_fd) - os.close(sig_fd) - os.unlink(msg_path) - os.unlink(sig_path) diff --git a/tests/test_imagebuilder.py b/tests/test_imagebuilder.py new file mode 100644 index 00000000..15758d32 --- /dev/null +++ b/tests/test_imagebuilder.py @@ -0,0 +1,122 @@ +import os +import tempfile +from pathlib import Path + +import pytest +from urlpath import URL + +from asu.imagebuilder import ImageBuilder, fingerprint_pubkey_usign, verify_usign + + +def test_imagebuilder_url_21023(): + ib = ImageBuilder() + assert ib.imagebuilder_url == URL( + "https://downloads.openwrt.org/releases/21.02.3/targets/x86/64" + ) + + +def test_imagebuilder_url_2102_SNAPSHOT(): + ib = ImageBuilder(version="21.02-SNAPSHOT") + assert ib.imagebuilder_url == URL( + "https://downloads.openwrt.org/releases/21.02-SNAPSHOT/targets/x86/64" + ) + + +def test_imagebuilder_url_snapshot(): + ib = ImageBuilder(version="SNAPSHOT") + assert ib.imagebuilder_url == URL( + "https://downloads.openwrt.org/snapshots/targets/x86/64" + ) + + +def test_get_sha256sums(): + ib = ImageBuilder(version="21.02.3") + assert ib.get_sha256sums().splitlines()[0].endswith("*config.buildinfo") + + +def test_archive(): + ib = ImageBuilder(version="21.02.3") + assert ib._get_archive_sum_name() == ( + "4f6e8c06471f92db0d9cf0168da7213291bb7d1da2197a307528152e02e658ae", + "openwrt-imagebuilder-21.02.3-x86-64.Linux-x86_64.tar.xz", + ) + + +def test_archive_name(): + ib = ImageBuilder(version="21.02.3") + assert ib.archive_name == "openwrt-imagebuilder-21.02.3-x86-64.Linux-x86_64.tar.xz" + + +def test_archive_sum(): + ib = ImageBuilder(version="21.02.3") + assert ( + ib.archive_sum + == "4f6e8c06471f92db0d9cf0168da7213291bb7d1da2197a307528152e02e658ae" + ) + + +# def test_download_21_02_3(): +# ib = ImageBuilder(version="21.02.3", upstream_url="downloads.cdn.openwrt.org") +# ib.download() +# assert (ib.cache / ib.archive_name).exists() + + +# def test_download_snapshot(): +# ib = ImageBuilder(version="SNAPSHOT", upstream_url="downloads.cdn.openwrt.org") +# ib.download() +# assert (ib.cache / ib.archive_name).exists() + + +def test_verify_signature_snapshot(): + ib = ImageBuilder(version="SNAPSHOT") + assert ib.valid_signature() + + +def test_verify_signature_21_02_3(): + ib = ImageBuilder(version="21.02.3") + assert ib.valid_signature() + + +def test_verify_signature_99_99_99(): + ib = ImageBuilder(version="99_99_99") + with pytest.raises(Exception) as exc_info: + ib.valid_signature() + + assert ( + str(exc_info.value) + == "404 Client Error: Not Found for url: https://downloads.openwrt.org/releases/99_99_99/targets/x86/64/sha256sums.sig" + ) + + +def test_is_outdated(tmpdir): + ib = ImageBuilder(version="21.02.3", cache=tmpdir) + + assert ib.is_outdated() + + ib.workdir.mkdir(parents=True, exist_ok=True) + (ib.workdir / "Makefile").touch() + os.utime(str(ib.workdir / "Makefile"), (0, 0)) + assert ib.is_outdated() + + os.utime(str(ib.workdir / "Makefile"), (1650340906, 1650340906)) + assert not ib.is_outdated() + + +# def test_setup(tmpdir): +# ib = ImageBuilder(version="21.02.3", cache=tmpdir) +# assert ib.setup() is None + + +def test_fingerprint_pubkey_usign(): + pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" + assert fingerprint_pubkey_usign(pub_key) == "ab1df166947b2551" + + +def test_verify_usign(): + sig = "RWSrHfFmlHslUQ9dCB1AJr/PoIIbBJJKtofZ5frLOuG03SlwAwgU1tYOaJs2eVGdo1C8S9LNcMBLPIfDDCWSdrLK3WJ6JV6HNQM=" + + pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" + pub_key_bad = "rWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SXXX" + + assert verify_usign(sig, "test\n", pub_key) + assert not verify_usign(sig, "test\n", pub_key_bad) From 2076c2954f9983706b51e3e32da4a701ddfc479f Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Thu, 13 Oct 2022 17:53:10 +0200 Subject: [PATCH 2/7] CI debug Signed-off-by: Paul Spooren --- tests/test_imagebuilder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_imagebuilder.py b/tests/test_imagebuilder.py index 15758d32..b11e5c53 100644 --- a/tests/test_imagebuilder.py +++ b/tests/test_imagebuilder.py @@ -98,7 +98,7 @@ def test_is_outdated(tmpdir): os.utime(str(ib.workdir / "Makefile"), (0, 0)) assert ib.is_outdated() - os.utime(str(ib.workdir / "Makefile"), (1650340906, 1650340906)) + os.utime(str(ib.workdir / "Makefile"), (2650340906, 2650340906)) assert not ib.is_outdated() From 5e2313a4824806d4ffc726ccb15d3a4b6384e369 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Sun, 6 Nov 2022 19:14:48 +0100 Subject: [PATCH 3/7] wip docker --- asu/api.py | 2 +- asu/build.py | 34 ++++++++++++---------- asu/imagebuilder.py | 59 ++++++++++++++++++++++++++++---------- pyproject.toml | 11 +++---- tests/test_imagebuilder.py | 5 ++++ 5 files changed, 75 insertions(+), 36 deletions(-) diff --git a/asu/api.py b/asu/api.py index b0e060f5..433e6697 100644 --- a/asu/api.py +++ b/asu/api.py @@ -122,7 +122,7 @@ def validate_request(req): """ - if "defaults" in req and not current_app.config["ALLOW_DEFAULTS"]: + if req.get("defaults") and not current_app.config["ALLOW_DEFAULTS"]: return ( {"detail": "Handling `defaults` not enabled on server", "status": 400}, 400, diff --git a/asu/build.py b/asu/build.py index 7c08dfa6..1c05a81d 100644 --- a/asu/build.py +++ b/asu/build.py @@ -10,7 +10,7 @@ log = logging.getLogger("rq.worker") -def set_stats(job, req): +def set_stats(job, ib, req): job.connection.hincrby( "stats:builds", "#".join( @@ -18,6 +18,22 @@ def set_stats(job, req): ), ) + job.connection.sadd( + f"builds:{ib.version_code}:{req['target']}", req["request_hash"] + ) + + +def create_build_json(ib, req, manifest): + ib.profiles_json.update({"manifest": manifest}) + ib.profiles_json.update(ib.profiles_json["profiles"][req["profile"]]) + ib.profiles_json["id"] = req["profile"] + ib.profiles_json["bin_dir"] = str(ib.bin_dir) + ib.profiles_json.pop("profiles") + ib.profiles_json["build_at"] = datetime.utcfromtimestamp( + int(ib.profiles_json.get("source_date_epoch", 0)) + ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + ib.profiles_json["detail"] = "done" + def cleanup_imagebuilders(job, req): now_timestamp = int(datetime.now().timestamp()) @@ -163,23 +179,11 @@ def report_error(msg): if req["profile"] not in ib.profiles_json["profiles"]: report_error("Profile not found in JSON file") - ib.profiles_json.update({"manifest": manifest}) - ib.profiles_json.update(ib.profiles_json["profiles"][req["profile"]]) - ib.profiles_json["id"] = req["profile"] - ib.profiles_json["bin_dir"] = str(ib.bin_dir) - ib.profiles_json.pop("profiles") - ib.profiles_json["build_at"] = datetime.utcfromtimestamp( - int(ib.profiles_json.get("source_date_epoch", 0)) - ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - ib.profiles_json["detail"] = "done" + create_build_json(ib, req, manifest) log.debug("JSON content %s", ib.profiles_json) - job.connection.sadd( - f"builds:{ib.version_code}:{req['target']}", req["request_hash"] - ) - - set_stats(job, req) + set_stats(job, ib, req) cleanup_imagebuilders(job, req) diff --git a/asu/imagebuilder.py b/asu/imagebuilder.py index dc87e1c7..9926b65b 100644 --- a/asu/imagebuilder.py +++ b/asu/imagebuilder.py @@ -89,6 +89,7 @@ def __init__( keys=Path.cwd(), files=None, custom_public_key=None, + use_docker=True, ): self.distro = distro self.version = version @@ -110,6 +111,15 @@ def __init__( self.build_cmd = [] self.profiles_json = None + if use_docker: + import docker + + self.docker = docker.DockerClient( + base_url="unix:///Users/user/.colima/default/docker.sock" + ) + else: + self.docker = None + @property def public_key(self): if self.custom_public_key: @@ -289,6 +299,17 @@ def _packages(self, packages): def _make(self, cmd: list): return run(cmd, text=True, cwd=self.workdir, capture_output=True) + def _docker(self, cmd: list): + return self.docker.containers.run( + image="openwrt/imagebuilder", + command=" ".join(cmd), + volumes={ + str(self.workdir): {"bind": str(self.workdir), "mode": "ro"}, + str(self.bin_dir): {"bind": str(self.bin_dir), "mode": "rw"}, + }, + working_dir=str(self.workdir), + ) + def cleanup(self): kernel_build_dir_run = self._make(["make", "val.KERNEL_BUILD_DIR"]) @@ -301,24 +322,32 @@ def cleanup(self): pass # log.warning("KDIR_TMP missing at %s", kernel_build_dir_tmp) - def manifest(self, profile, packages): - manifest_run = self._make( - [ - "make", - "manifest", - f"PROFILE={profile}", - f"PACKAGES={' '.join(self._packages(packages))}", - "STRIP_ABI=1", - ] - ) + def manifest(self, profile: str, packages: list): + manifest_cmd = [ + "make", + "manifest", + f"PROFILE={profile}", + f"PACKAGES={' '.join(self._packages(packages))}", + "STRIP_ABI=1", + ] + + if self.docker: + self.stdout = self._docker(manifest_cmd) + + try: + pass + except docker.errors.ContainerError: + raise ValueError("Package selection caused error") + else: + manifest_run = self._make(manifest_cmd) - self.stdout = manifest_run.stdout - self.stderr = manifest_run.stderr + self.stdout = manifest_run.stdout + self.stderr = manifest_run.stderr - if manifest_run.returncode: - raise ValueError("Package selection caused error") + if manifest_run.returncode: + raise ValueError("Package selection caused error") - return dict(map(lambda pv: pv.split(" - "), manifest_run.stdout.splitlines())) + return dict(map(lambda pv: pv.split(" - "), self.stdout.splitlines())) def set_filesystem(self, filesystem): config = self.config.read_text() diff --git a/pyproject.toml b/pyproject.toml index c9742c62..2b065e2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" include = ["asu/branches.yml"] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.7.2" PyNaCl = "^1.5.0" redis = "^4.1.1" requests = "^2.27.1" @@ -18,16 +18,17 @@ prometheus-client = "^0.13.1" pytest-cov = "^3.0.0" urlpath = "^1.2.0" setuptools = "^65.4.1" +docker = "^6.0.0" - -[tool.poetry.dev-dependencies] -pytest = "^6.2.5" +[tool.poetry.group.dev.dependencies] +pytest = "^7.1.3" pytest-httpserver = "^1.0.3" fakeredis = "^1.7.1" -flake8 = "^4.0.1" black = "^22.1.0" coverage = "^6.3.2" isort = "^5.10.1" +bandit = "^1.7.4" +pylint = "^2.15.4" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/test_imagebuilder.py b/tests/test_imagebuilder.py index b11e5c53..b60b0e73 100644 --- a/tests/test_imagebuilder.py +++ b/tests/test_imagebuilder.py @@ -120,3 +120,8 @@ def test_verify_usign(): assert verify_usign(sig, "test\n", pub_key) assert not verify_usign(sig, "test\n", pub_key_bad) + +def test_manifest(): + ib = ImageBuilder(version="21.02.3") + # ib.setup() + ib.manifest("generic", []) From 08e10b89eb9a3912a5416c27c5e501e592893856 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Sat, 18 Feb 2023 02:34:00 +0100 Subject: [PATCH 4/7] first image build inside docker container Signed-off-by: Paul Spooren --- asu/api.py | 3 +- asu/imagebuilder.py | 73 +++++++++++++++++++++++++++++---------------- pyproject.toml | 3 +- 3 files changed, 51 insertions(+), 28 deletions(-) diff --git a/asu/api.py b/asu/api.py index 433e6697..90778ab6 100644 --- a/asu/api.py +++ b/asu/api.py @@ -280,8 +280,7 @@ def api_v1_build_post(): return response, status req["store_path"] = current_app.config["STORE_PATH"] - if current_app.config.get("CACHE_PATH"): - req["cache_path"] = current_app.config.get("CACHE_PATH") + req["cache_path"] = current_app.config.get("CACHE_PATH", None) req["upstream_url"] = current_app.config["UPSTREAM_URL"] req["branch_data"] = current_app.config["BRANCHES"][req["branch"]] req["request_hash"] = request_hash diff --git a/asu/imagebuilder.py b/asu/imagebuilder.py index 9926b65b..2d4e7f5a 100644 --- a/asu/imagebuilder.py +++ b/asu/imagebuilder.py @@ -11,6 +11,9 @@ import nacl.signing import requests from urlpath import URL +import logging + +logging.getLogger().setLevel(logging.DEBUG) def verify_usign(signature: str, message: str, public_key: str) -> bool: @@ -94,7 +97,10 @@ def __init__( self.distro = distro self.version = version self.target = target.lower() - self.cache = Path(cache) + if cache: + self.cache = Path(cache) + else: + self.cache = Path.cwd() / "cache" self.upstream_url = URL(upstream_url) self.keys = Path(keys) self.workdir = self.cache / self.version / self.target @@ -217,6 +223,7 @@ def valid_checksum(self): return self.archive_sum == get_file_hash(self.cache / self.archive_name) def download(self): + logging.info(f"Download { self.version}/{self.target}") self.cache.mkdir(exist_ok=True, parents=True) return self._download_file( @@ -252,6 +259,9 @@ def copy_keys(self): (self.workdir / file.name).symlink_to(file) def setup(self, check_online=False): + if self.docker: + return None + if not self.is_outdated(): return None @@ -297,19 +307,34 @@ def _packages(self, packages): return sorted(list(set(packages))) def _make(self, cmd: list): - return run(cmd, text=True, cwd=self.workdir, capture_output=True) + make_run = run(cmd, text=True, cwd=self.workdir, capture_output=True) + self.stdout = make_run.stdout + self.stderr = make_run.stderr + return make_run.returncode def _docker(self, cmd: list): - return self.docker.containers.run( - image="openwrt/imagebuilder", + container = self.docker.containers.run( + # image=f"openwrt/imagebuilder", + image=f"openwrt/imagebuilder:{ self.target.replace('/', '-') }-{ self.version.lower() }", command=" ".join(cmd), + detach=True, volumes={ - str(self.workdir): {"bind": str(self.workdir), "mode": "ro"}, + # f"{self.workdir}/.config": {"bind": f"{self.workdir}/.config", "mode": "ro"}, + f"{self.workdir}/files/": { + "bind": f"{self.workdir}/files/", + "mode": "ro", + }, str(self.bin_dir): {"bind": str(self.bin_dir), "mode": "rw"}, }, - working_dir=str(self.workdir), + # working_dir=str(self.workdir), ) + returncode = container.wait()["StatusCode"] + self.stdout = container.logs(stdout=True, stderr=False).decode("utf-8") + self.stderr = container.logs(stdout=False, stderr=True).decode("utf-8") + container.remove() + return returncode + def cleanup(self): kernel_build_dir_run = self._make(["make", "val.KERNEL_BUILD_DIR"]) @@ -322,7 +347,7 @@ def cleanup(self): pass # log.warning("KDIR_TMP missing at %s", kernel_build_dir_tmp) - def manifest(self, profile: str, packages: list): + def manifest(self, profile: str, packages: list) -> dict: manifest_cmd = [ "make", "manifest", @@ -332,20 +357,17 @@ def manifest(self, profile: str, packages: list): ] if self.docker: - self.stdout = self._docker(manifest_cmd) + returncode = self._docker(manifest_cmd) - try: - pass - except docker.errors.ContainerError: - raise ValueError("Package selection caused error") else: - manifest_run = self._make(manifest_cmd) + returncode = self._make(manifest_cmd) - self.stdout = manifest_run.stdout - self.stderr = manifest_run.stderr + logging.debug(self.stderr) + logging.debug(self.stdout) + logging.debug(returncode) - if manifest_run.returncode: - raise ValueError("Package selection caused error") + if returncode: + raise ValueError("Package selection caused error") return dict(map(lambda pv: pv.split(" - "), self.stdout.splitlines())) @@ -391,27 +413,28 @@ def build( ] defaults_file = self.files / "files/etc/uci-defaults/99-asu-defaults" + defaults_file.parent.mkdir(parents=True) if defaults: - defaults_file.parent.mkdir(parents=True) defaults_file.write_text(defaults) self.build_cmd.append(f"FILES={self.files / 'files'}") else: defaults_file.unlink(missing_ok=True) - build_run = self._make(self.build_cmd) - - self.stdout = build_run.stdout - self.stderr = build_run.stderr + if self.docker: + returncode = self._docker(self.build_cmd) + else: + returncode = self._make(self.build_cmd) - if build_run.returncode: + if returncode: raise ValueError("Error while building firmware. See stdout/stderr") - if "is too big" in build_run.stderr: + if "is too big" in self.stderr: raise ValueError("Selected packages exceed device storage") profiles_json_path = self.bin_dir / "profiles.json" if profiles_json_path.exists(): self.profiles_json = json.loads(profiles_json_path.read_text()) - self.cleanup() + if not self.docker: + self.cleanup() diff --git a/pyproject.toml b/pyproject.toml index 2b065e2b..d56ce472 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" include = ["asu/branches.yml"] [tool.poetry.dependencies] -python = "^3.7.2" +python = "^3.8.1" PyNaCl = "^1.5.0" redis = "^4.1.1" requests = "^2.27.1" @@ -29,6 +29,7 @@ coverage = "^6.3.2" isort = "^5.10.1" bandit = "^1.7.4" pylint = "^2.15.4" +flake8 = "^6.0.0" [build-system] requires = ["poetry-core>=1.0.0"] From 1422c7ccddc336c77c6356cc5d962bf26a61165a Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Sat, 18 Feb 2023 02:52:11 +0100 Subject: [PATCH 5/7] api: allow to prefix packages with a + When adding a + to the beginning of a package name it will modify the order of packages and thereby fix dependency issues of OPKG. This is more of a advanced user feature. Signed-off-by: Paul Spooren --- asu/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asu/api.py b/asu/api.py index 90778ab6..fc234e52 100644 --- a/asu/api.py +++ b/asu/api.py @@ -84,7 +84,7 @@ def validate_packages(req): else: tr.add(p) - req["packages"] = tr + req["packages"] = list(map(lambda x: x.removeprefix("+"), sorted(tr))) # store request packages temporary in Redis and create a diff temp = str(uuid4()) From 1f4250f5313930013e8380076c690bbae40398b5 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Sun, 12 Mar 2023 18:25:20 +0100 Subject: [PATCH 6/7] podman --- asu/api.py | 2 +- asu/build.py | 5 +- asu/imagebuilder.py | 95 ++++++++++++++++++++++++++------------ pyproject.toml | 2 +- tests/test_imagebuilder.py | 1 + 5 files changed, 72 insertions(+), 33 deletions(-) diff --git a/asu/api.py b/asu/api.py index fc234e52..1255fe16 100644 --- a/asu/api.py +++ b/asu/api.py @@ -288,7 +288,7 @@ def api_v1_build_post(): job = get_queue().enqueue( build, req, - job_id=request_hash, + # job_id=request_hash, result_ttl=result_ttl, failure_ttl=failure_ttl, job_timeout="10m", diff --git a/asu/build.py b/asu/build.py index 1c05a81d..7a5c0c4c 100644 --- a/asu/build.py +++ b/asu/build.py @@ -80,6 +80,8 @@ def build(req: dict): request (dict): Contains all properties of requested image """ + job = get_current_job() + def report_error(msg): log.warning(f"Error: {msg}") job.meta["detail"] = f"Error: {msg}" @@ -87,9 +89,8 @@ def report_error(msg): raise if not req["store_path"].is_dir(): - report_error("Store path missing") + report_error(f"Store path missing: {req['store_path']}") - job = get_current_job() job.meta["detail"] = "init" job.save_meta() diff --git a/asu/imagebuilder.py b/asu/imagebuilder.py index 2d4e7f5a..0d81e87d 100644 --- a/asu/imagebuilder.py +++ b/asu/imagebuilder.py @@ -92,7 +92,7 @@ def __init__( keys=Path.cwd(), files=None, custom_public_key=None, - use_docker=True, + use_podman=True, ): self.distro = distro self.version = version @@ -117,14 +117,16 @@ def __init__( self.build_cmd = [] self.profiles_json = None - if use_docker: - import docker + if use_podman: + from podman import PodmanClient - self.docker = docker.DockerClient( - base_url="unix:///Users/user/.colima/default/docker.sock" + self.podman = PodmanClient( + # base_url="unix:///Users/user/.colima/default/podman.sock" + # base_url="unix:///Users/user/podman.sock" + base_url="unix:///run/user/1000/podman/podman.sock" ) else: - self.docker = None + self.podman = None @property def public_key(self): @@ -145,6 +147,10 @@ def version_folder(self): else: return "snapshots" + # create function that returns root of number + def root(self, x, n): + return x ** (1 / n) + def get_sha256sums(self): if not self.sha256sums: self.sha256sums = self._download_file("sha256sums").text @@ -152,12 +158,21 @@ def get_sha256sums(self): return self.sha256sums def get_sha256sums_sig(self): + """Return sha256sums.sig file + + :return: sha256sums.sig file""" if not self.sha256sums_sig: self.sha256sums_sig = self._download_file("sha256sums.sig").content return self.sha256sums_sig def _download_header(self, filename): + """Return header of file + + :param filename: filename to download + :return: header of file + """ + print(self.imagebuilder_url ) return requests.head(self.imagebuilder_url / filename).headers def _download_file(self, filename, path: Path = None): @@ -186,6 +201,8 @@ def is_outdated(self): local_stamp = datetime.fromtimestamp(makefile.stat().st_mtime) + logging.debug(f"{local_stamp} vs {remote_stamp}") + if remote_stamp > local_stamp: return True @@ -236,6 +253,7 @@ def unpack(self): run( [ "tar", + "--modification-time", "--strip-components=1", "-xf", self.cache / self.archive_name, @@ -259,8 +277,8 @@ def copy_keys(self): (self.workdir / file.name).symlink_to(file) def setup(self, check_online=False): - if self.docker: - return None + # if self.podman: + # return None if not self.is_outdated(): return None @@ -312,26 +330,46 @@ def _make(self, cmd: list): self.stderr = make_run.stderr return make_run.returncode - def _docker(self, cmd: list): - container = self.docker.containers.run( + def _podman(self, cmd: list): + # self.podman.containers.pull(f"openwrt/imagebuilder") + + self.podman.images.pull("openwrt/imagebuilder", tag=f"{ self.target.replace('/', '-') }-{ self.version.lower() }") + print(str(self.workdir)) + print(str(self.bin_dir)) + container = self.podman.containers.run( # image=f"openwrt/imagebuilder", image=f"openwrt/imagebuilder:{ self.target.replace('/', '-') }-{ self.version.lower() }", - command=" ".join(cmd), + command=cmd, detach=True, - volumes={ - # f"{self.workdir}/.config": {"bind": f"{self.workdir}/.config", "mode": "ro"}, - f"{self.workdir}/files/": { - "bind": f"{self.workdir}/files/", - "mode": "ro", - }, - str(self.bin_dir): {"bind": str(self.bin_dir), "mode": "rw"}, - }, + # overlay_volumes=[ + # { "destination": str(self.workdir), "soruce": str(self.workdir)} + # ], + mounts=[ + { + "type": "bind", + "source": str(self.bin_dir), + "target": str(self.bin_dir), + "read_only": False, + }, + ], + # volumes={ + # str(self.bin_dir): {"bind": str(self.bin_dir), "mode": "rw"}, + # }, + # f"{self.workdir}/.config": {"bind": f"{self.workdir}/.config" }, + # f"{self.workdir}/files/": { + # "bind": f"{self.workdir}/files/", + # "mode": "ro", + # }, + # str(self.bin_dir): {"bind": f"/home/build/openwrt/bin/targets/{self.target}/", "mode": "rw"}, + # "./": {"bind": str(self.bin_dir), "mode": "rw"}, + # }, # working_dir=str(self.workdir), ) - returncode = container.wait()["StatusCode"] - self.stdout = container.logs(stdout=True, stderr=False).decode("utf-8") - self.stderr = container.logs(stdout=False, stderr=True).decode("utf-8") + returncode = container.wait() + print(returncode) + self.stdout = b"\n".join(container.logs(stdout=True, stderr=False)).decode("utf-8") + self.stderr = b"\n".join(container.logs(stdout=False, stderr=True)).decode("utf-8") container.remove() return returncode @@ -356,8 +394,8 @@ def manifest(self, profile: str, packages: list) -> dict: "STRIP_ABI=1", ] - if self.docker: - returncode = self._docker(manifest_cmd) + if self.podman: + returncode = self._podman(manifest_cmd) else: returncode = self._make(manifest_cmd) @@ -409,11 +447,10 @@ def build( f"PROFILE={profile}", f"PACKAGES={' '.join(self._packages(packages))}", f"EXTRA_IMAGE_NAME={extra_image_name}", - f"BIN_DIR={self.bin_dir}", ] defaults_file = self.files / "files/etc/uci-defaults/99-asu-defaults" - defaults_file.parent.mkdir(parents=True) + defaults_file.parent.mkdir(parents=True, exist_ok=True) if defaults: defaults_file.write_text(defaults) @@ -421,8 +458,8 @@ def build( else: defaults_file.unlink(missing_ok=True) - if self.docker: - returncode = self._docker(self.build_cmd) + if self.podman: + returncode = self._podman(self.build_cmd) else: returncode = self._make(self.build_cmd) @@ -436,5 +473,5 @@ def build( if profiles_json_path.exists(): self.profiles_json = json.loads(profiles_json_path.read_text()) - if not self.docker: + if not self.podman: self.cleanup() diff --git a/pyproject.toml b/pyproject.toml index d56ce472..0cb2fac3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ prometheus-client = "^0.13.1" pytest-cov = "^3.0.0" urlpath = "^1.2.0" setuptools = "^65.4.1" -docker = "^6.0.0" +podman = "^4.4.0" [tool.poetry.group.dev.dependencies] pytest = "^7.1.3" diff --git a/tests/test_imagebuilder.py b/tests/test_imagebuilder.py index b60b0e73..5fb61121 100644 --- a/tests/test_imagebuilder.py +++ b/tests/test_imagebuilder.py @@ -121,6 +121,7 @@ def test_verify_usign(): assert verify_usign(sig, "test\n", pub_key) assert not verify_usign(sig, "test\n", pub_key_bad) + def test_manifest(): ib = ImageBuilder(version="21.02.3") # ib.setup() From 92911fd59d9e0b19d445200bf992fe6b89a7a307 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Fri, 31 Mar 2023 22:31:33 +0200 Subject: [PATCH 7/7] wip2 --- asu/imagebuilder.py | 45 ++++++++++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/asu/imagebuilder.py b/asu/imagebuilder.py index 0d81e87d..c1e9262d 100644 --- a/asu/imagebuilder.py +++ b/asu/imagebuilder.py @@ -159,7 +159,7 @@ def get_sha256sums(self): def get_sha256sums_sig(self): """Return sha256sums.sig file - + :return: sha256sums.sig file""" if not self.sha256sums_sig: self.sha256sums_sig = self._download_file("sha256sums.sig").content @@ -168,11 +168,11 @@ def get_sha256sums_sig(self): def _download_header(self, filename): """Return header of file - + :param filename: filename to download :return: header of file """ - print(self.imagebuilder_url ) + print(self.imagebuilder_url) return requests.head(self.imagebuilder_url / filename).headers def _download_file(self, filename, path: Path = None): @@ -333,7 +333,10 @@ def _make(self, cmd: list): def _podman(self, cmd: list): # self.podman.containers.pull(f"openwrt/imagebuilder") - self.podman.images.pull("openwrt/imagebuilder", tag=f"{ self.target.replace('/', '-') }-{ self.version.lower() }") + self.podman.images.pull( + "openwrt/imagebuilder", + tag=f"{ self.target.replace('/', '-') }-{ self.version.lower() }", + ) print(str(self.workdir)) print(str(self.bin_dir)) container = self.podman.containers.run( @@ -345,31 +348,35 @@ def _podman(self, cmd: list): # { "destination": str(self.workdir), "soruce": str(self.workdir)} # ], mounts=[ - { - "type": "bind", - "source": str(self.bin_dir), - "target": str(self.bin_dir), - "read_only": False, - }, + { + "type": "bind", + "source": str(self.bin_dir), + "target": str(self.bin_dir), + "read_only": False, + }, ], # volumes={ # str(self.bin_dir): {"bind": str(self.bin_dir), "mode": "rw"}, # }, - # f"{self.workdir}/.config": {"bind": f"{self.workdir}/.config" }, - # f"{self.workdir}/files/": { - # "bind": f"{self.workdir}/files/", - # "mode": "ro", - # }, - # str(self.bin_dir): {"bind": f"/home/build/openwrt/bin/targets/{self.target}/", "mode": "rw"}, - # "./": {"bind": str(self.bin_dir), "mode": "rw"}, + # f"{self.workdir}/.config": {"bind": f"{self.workdir}/.config" }, + # f"{self.workdir}/files/": { + # "bind": f"{self.workdir}/files/", + # "mode": "ro", + # }, + # str(self.bin_dir): {"bind": f"/home/build/openwrt/bin/targets/{self.target}/", "mode": "rw"}, + # "./": {"bind": str(self.bin_dir), "mode": "rw"}, # }, # working_dir=str(self.workdir), ) returncode = container.wait() print(returncode) - self.stdout = b"\n".join(container.logs(stdout=True, stderr=False)).decode("utf-8") - self.stderr = b"\n".join(container.logs(stdout=False, stderr=True)).decode("utf-8") + self.stdout = b"\n".join(container.logs(stdout=True, stderr=False)).decode( + "utf-8" + ) + self.stderr = b"\n".join(container.logs(stdout=False, stderr=True)).decode( + "utf-8" + ) container.remove() return returncode