From 919d10d381490a5de658a45a0d365349a048f893 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Sat, 8 Apr 2023 01:12:26 +0200 Subject: [PATCH] build: use podman for image creation Signed-off-by: Paul Spooren --- .github/workflows/test.yml | 2 + asu/api.py | 61 +-- asu/build.py | 440 ++++++------------ asu/common.py | 142 ++++++ asu/janitor.py | 12 +- asu/metrics.py | 2 +- asu/openapi.yml | 158 ++++--- misc/config.py | 11 +- pyproject.toml | 1 + tests/conftest.py | 42 +- tests/test_api.py | 238 +++------- tests/test_common.py | 37 +- tests/test_janitor.py | 4 +- tests/test_stats.py | 36 +- ...sttarget-testsubtarget.Linux-x86_64.tar.xz | Bin 2475 -> 0 bytes .../Makefile | 1 + .../testtarget/testsubtarget/sha256sums | 1 - .../testtarget/testsubtarget/sha256sums.sig | 2 - .../testsubtarget/update_imagebuilder.sh | 6 - 19 files changed, 506 insertions(+), 690 deletions(-) delete mode 100644 tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64.tar.xz delete mode 100644 tests/upstream/snapshots/targets/testtarget/testsubtarget/sha256sums delete mode 100644 tests/upstream/snapshots/targets/testtarget/testsubtarget/sha256sums.sig delete mode 100755 tests/upstream/snapshots/targets/testtarget/testsubtarget/update_imagebuilder.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1e348396..67bc8a22 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -48,6 +48,8 @@ jobs: - name: Test with pytest run: | + podman system service --time=0 unix://tmp/podman.sock & + export CONTAINER_HOST="unix:///tmp/podman.sock" poetry run coverage run -m pytest --runslow poetry run coverage xml diff --git a/asu/api.py b/asu/api.py index fb923838..401e986c 100644 --- a/asu/api.py +++ b/asu/api.py @@ -1,5 +1,3 @@ -from uuid import uuid4 - from flask import Blueprint, current_app, g, jsonify, redirect, request from rq import Connection, Queue @@ -64,50 +62,6 @@ def api_v1_overview(): return redirect("/json/v1/overview.json") -def validate_packages(req): - if req.get("packages_versions") and not req.get("packages"): - req["packages"] = req["packages_versions"].keys() - - if not req.get("packages"): - return - - req["packages"] = set(req["packages"]) - {"kernel", "libc", "libgcc"} - - r = get_redis() - - # translate packages to remove their ABI version for 19.07.x compatibility - tr = set() - for p in req["packages"]: - p_tr = r.hget("mapping-abi", p) - if p_tr: - tr.add(p_tr.decode()) - else: - tr.add(p) - - req["packages"] = tr - - # store request packages temporary in Redis and create a diff - temp = str(uuid4()) - pipeline = r.pipeline(True) - pipeline.sadd(temp, *set(map(lambda p: p.strip("-"), req["packages"]))) - pipeline.expire(temp, 5) - pipeline.sdiff( - temp, - f"packages:{req['branch']}:{req['version']}:{req['target']}", - f"packages:{req['branch']}:{req['arch']}", - ) - unknown_packages = list(map(lambda p: p.decode(), pipeline.execute()[-1])) - - if unknown_packages: - return ( - { - "detail": f"Unsupported package(s): {', '.join(unknown_packages)}", - "status": 422, - }, - 422, - ) - - def validate_request(req): """Validate an image request and return found errors with status code @@ -154,6 +108,11 @@ def validate_request(req): 400, ) + if req.get("packages_versions") and not req.get("packages"): + req["packages"] = set(req["packages_versions"].keys()) + else: + req["packages"] = set(req.get("packages", [])) + r = get_redis() current_app.logger.debug("Profile before mapping " + req["profile"]) @@ -194,10 +153,6 @@ def validate_request(req): 400, ) - package_problems = validate_packages(req) - if package_problems: - return package_problems - return ({}, None) @@ -208,7 +163,7 @@ def return_job_v1(job): response.update(job.meta) if job.is_failed: - response.update({"status": 500}) + response.update({"status": 500, "error": job.latest_result().exc_string}) elif job.is_queued: response.update( @@ -282,10 +237,8 @@ def api_v1_build_post(): return response, status req["store_path"] = current_app.config["STORE_PATH"] - if current_app.config.get("CACHE_PATH"): - req["cache_path"] = current_app.config.get("CACHE_PATH") - req["upstream_url"] = current_app.config["UPSTREAM_URL"] req["branch_data"] = current_app.config["BRANCHES"][req["branch"]] + req["repository_allow_list"] = current_app.config["REPOSITORY_ALLOW_LIST"] req["request_hash"] = request_hash job = get_queue().enqueue( diff --git a/asu/build.py b/asu/build.py index bebd23ca..b039d157 100644 --- a/asu/build.py +++ b/asu/build.py @@ -1,26 +1,28 @@ import json import logging import re -import subprocess from datetime import datetime from pathlib import Path -from shutil import copyfile, rmtree -import requests +from podman import PodmanClient from rq import get_current_job -from .common import ( +from asu.common import ( + check_manifest, + diff_packages, fingerprint_pubkey_usign, - get_file_hash, + get_container_version_tag, get_packages_hash, - verify_usign, + parse_manifest, + report_error, + run_container, ) log = logging.getLogger("rq.worker") log.setLevel(logging.DEBUG) -def build(req: dict): +def build(req: dict, job=None): """Build image request and setup ImageBuilders automatically The `request` dict contains properties of the requested image. @@ -28,202 +30,121 @@ def build(req: dict): Args: request (dict): Contains all properties of requested image """ + req["store_path"].mkdir(parents=True, exist_ok=True) + log.debug(f"Store path: {req['store_path']}") - def report_error(msg): - log.warning(f"Error: {msg}") - job.meta["detail"] = f"Error: {msg}" - job.save_meta() - raise - - if not req["store_path"].is_dir(): - report_error("Store path missing") - - job = get_current_job() + job = job or get_current_job() job.meta["detail"] = "init" job.save_meta() log.debug(f"Building {req}") - target, subtarget = req["target"].split("/") - cache = req.get("cache_path", Path.cwd()) / "cache" / req["version"] - cache_workdir = cache / target / subtarget - sums_file = Path(cache / target / f"{subtarget}.sha256sums") - sig_file = Path(cache / target / f"{subtarget}.sha256sums.sig") - - def setup_ib(): - """Setup ImageBuilder based on `req` - - This function downloads and verifies the ImageBuilder archive. Existing - setups are automatically updated if newer version are available - upstream. - """ - log.debug("Setting up ImageBuilder") - if (cache_workdir).is_dir(): - rmtree(cache_workdir) - - download_file("sha256sums.sig", sig_file) - download_file("sha256sums", sums_file) - - log.debug("Signatures downloaded" + sig_file.read_text()) - - if not verify_usign(sig_file, sums_file, req["branch_data"]["pubkey"]): - report_error("Bad signature of ImageBuilder archive") - - ib_search = re.search( - r"^(.{64}) \*([a-z]+-imagebuilder-.+?\.Linux-x86_64\.tar\.xz)$", - sums_file.read_text(), - re.MULTILINE, - ) - - if not ib_search: - report_error("Missing Checksum") - - ib_hash, ib_archive = ib_search.groups() - - job.meta["imagebuilder_status"] = "download_imagebuilder" - job.save_meta() - - download_file(ib_archive) - - if ib_hash != get_file_hash(cache / target / ib_archive): - report_error("Bad Checksum") - (cache_workdir).mkdir(parents=True, exist_ok=True) + podman = PodmanClient().from_env() + log.debug(f"Podman version: {podman.version()}") - job.meta["imagebuilder_status"] = "unpack_imagebuilder" - job.save_meta() - - extract_archive = subprocess.run( - ["tar", "--strip-components=1", "-xf", ib_archive, "-C", subtarget], - cwd=cache / target, - ) - - if extract_archive.returncode: - report_error("Failed to unpack ImageBuilder archive") - - log.debug(f"Extracted TAR {ib_archive}") - - (cache / target / ib_archive).unlink() - - for key in req["branch_data"].get("extra_keys", []): - fingerprint = fingerprint_pubkey_usign(key) - (cache_workdir / "keys" / fingerprint).write_text( - f"untrusted comment: ASU extra key {fingerprint}\n{key}" - ) + container_version_tag = get_container_version_tag(req["version"]) + log.debug( + f"Container version: {container_version_tag} (requested {req['version']})" + ) - repos_path = cache_workdir / "repositories.conf" - repos = repos_path.read_text() - - extra_repos = req["branch_data"].get("extra_repos") - if extra_repos: - log.debug("Found extra repos") - for name, repo in extra_repos.items(): - repos += f"\nsrc/gz {name} {repo}" - - repos_path.write_text(repos) - log.debug(f"Repos:\n{repos}") - - if (Path.cwd() / "seckey").exists(): - # link key-build to imagebuilder - (cache_workdir / "key-build").symlink_to(Path.cwd() / "seckey") - if (Path.cwd() / "pubkey").exists(): - # link key-build.pub to imagebuilder - (cache_workdir / "key-build.pub").symlink_to(Path.cwd() / "pubkey") - if (Path.cwd() / "newcert").exists(): - # link key-build.ucert to imagebuilder - (cache_workdir / "key-build.ucert").symlink_to(Path.cwd() / "newcert") - - def download_file(filename: str, dest: str = None): - """Download file from upstream target path - - The URL points automatically to the targets folder upstream - - Args: - filename (str): File in upstream target folder - dest (str): Optional path to store the file, default to target - cache folder - """ - log.debug(f"Downloading {filename}") - r = requests.get( - req["upstream_url"] - + "/" - + req["branch_data"]["path"].format(version=req["version"]) - + "/targets/" - + req["target"] - + "/" - + filename - ) + BASE_CONTAINER = "ghcr.io/openwrt/imagebuilder" + image = ( + f"{BASE_CONTAINER}:{req['target'].replace('/', '-')}-{container_version_tag}" + ) - with open(dest or (cache / target / filename), "wb") as f: - f.write(r.content) - - (cache / target).mkdir(parents=True, exist_ok=True) - - stamp_file = cache / target / f"{subtarget}.stamp" - - sig_file_headers = requests.head( - req["upstream_url"] - + "/" - + req["branch_data"]["path"].format(version=req["version"]) - + "/targets/" - + req["target"] - + "/sha256sums.sig" - ).headers - log.debug(f"sig_file_headers: \n{sig_file_headers}") - - origin_modified = sig_file_headers.get("last-modified") - log.info("Origin %s", origin_modified) - - if stamp_file.is_file(): - local_modified = stamp_file.read_text() - log.info("Local %s", local_modified) - else: - local_modified = "" - - if origin_modified != local_modified: - log.debug("New ImageBuilder upstream available") - setup_ib() - stamp_file.write_text(origin_modified) - - if not (cache_workdir / ".config.orig").exists(): - # backup original configuration to keep default filesystems - copyfile( - cache_workdir / ".config", - cache_workdir / ".config.orig", - ) + log.info(f"Pulling {image}...") + podman.images.pull(image) + log.info(f"Pulling {image}... done") - info_run = subprocess.run( - ["make", "info"], text=True, capture_output=True, cwd=cache_workdir + returncode, job.meta["stdout"], job.meta["stderr"] = run_container( + image, ["make", "info"] ) - version_code = re.search('Current Revision: "(r.+)"', info_run.stdout).group(1) + job.save_meta() + + version_code = re.search('Current Revision: "(r.+)"', job.meta["stdout"]).group(1) if "version_code" in req: if version_code != req.get("version_code"): report_error( - f"Received inncorrect version {version_code} (requested {req['version_code']})" + job, + f"Received inncorrect version {version_code} (requested {req['version_code']})", ) default_packages = set( - re.search(r"Default Packages: (.*)\n", info_run.stdout).group(1).split() + re.search(r"Default Packages: (.*)\n", job.meta["stdout"]).group(1).split() ) + log.debug(f"Default packages: {default_packages}") + profile_packages = set( re.search( r"{}:\n .+\n Packages: (.*?)\n".format(req["profile"]), - info_run.stdout, + job.meta["stdout"], re.MULTILINE, ) .group(1) .split() ) - if req.get("diff_packages", False): - remove_packages = (default_packages | profile_packages) - req["packages"] - req["packages"] = req["packages"] | set(map(lambda p: f"-{p}", remove_packages)) + if req.get("diff_packages"): + req["packages"] = diff_packages( + req["packages"], default_packages | profile_packages + ) + log.debug(f"Diffed packages: {req['packages']}") job.meta["imagebuilder_status"] = "calculate_packages_hash" job.save_meta() - manifest_run = subprocess.run( + mounts = [] + + bin_dir = req["request_hash"] + (req["store_path"] / bin_dir / "keys").mkdir(parents=True, exist_ok=True) + log.debug("Created store path: %s", req["store_path"] / bin_dir) + + if "repository_keys" in req: + log.debug("Found extra keys") + + for key in req.get("repository_keys"): + fingerprint = fingerprint_pubkey_usign(key) + log.debug(f"Found key {fingerprint}") + + (req["store_path"] / bin_dir / "keys" / fingerprint).write_text( + f"untrusted comment: {fingerprint}\n{key}" + ) + + mounts.append( + { + "type": "bind", + "source": str(req["store_path"] / bin_dir / "keys" / fingerprint), + "target": "/builder/keys/" + fingerprint, + "read_only": True, + }, + ) + + if "repositories" in req: + log.debug("Found extra repos") + repositories = "" + for name, repo in req.get("repositories").items(): + if repo.startswith(tuple(req["repository_allow_list"])): + repositories += f"src/gz {name} {repo}\n" + else: + report_error(job, f"Repository {repo} not allowed") + + repositories += "src imagebuilder file:packages\noption check_signature" + + (req["store_path"] / bin_dir / "repositories.conf").write_text(repositories) + + mounts.append( + { + "type": "bind", + "source": str(req["store_path"] / bin_dir / "repositories.conf"), + "target": "/builder/repositories.conf", + "read_only": True, + }, + ) + + returncode, job.meta["stdout"], job.meta["stderr"] = run_container( + image, [ "make", "manifest", @@ -231,148 +152,84 @@ def download_file(filename: str, dest: str = None): f"PACKAGES={' '.join(sorted(req.get('packages', [])))}", "STRIP_ABI=1", ], - text=True, - cwd=cache_workdir, - capture_output=True, + mounts=mounts, ) - job.meta["stdout"] = manifest_run.stdout - job.meta["stderr"] = manifest_run.stderr job.save_meta() - if manifest_run.returncode: - if "Package size mismatch" in manifest_run.stderr: - rmtree(cache_workdir) - return build(req) - else: - print(manifest_run.stdout) - print(manifest_run.stderr) - report_error("Impossible package selection") - - manifest = dict(map(lambda pv: pv.split(" - "), manifest_run.stdout.splitlines())) - - for package, version in req.get("packages_versions", {}).items(): - if package not in manifest: - report_error(f"Impossible package selection: {package} not in manifest") - if version != manifest[package]: - report_error( - f"Impossible package selection: {package} version not as requested: {version} vs. {manifest[package]}" - ) - - manifest_packages = manifest.keys() - - log.debug(f"Manifest Packages: {manifest_packages}") - - packages_hash = get_packages_hash(manifest_packages) - log.debug(f"Packages Hash {packages_hash}") + if returncode: + report_error(job, "Impossible package selection") - bin_dir = req["request_hash"] + manifest = parse_manifest(job.meta["stdout"]) + log.debug(f"Manifest: {manifest}") - (req["store_path"] / bin_dir).mkdir(parents=True, exist_ok=True) + # Check if all requested packages are in the manifest + if err := check_manifest(manifest, req.get("packages_versions", {})): + report_error(job, err) - log.debug("Created store path: %s", req["store_path"] / bin_dir) + packages_hash = get_packages_hash(manifest.keys()) + log.debug(f"Packages Hash: {packages_hash}") - if req.get("filesystem"): - config_path = cache_workdir / ".config" - config = config_path.read_text() - - for filesystem in ["squashfs", "ext4fs", "ubifs", "jffs2"]: - # this implementation uses `startswith` since a running device thinks - # it's running `ext4` while really there is `ext4fs` running - if not filesystem.startswith(req.get("filesystem", filesystem)): - log.debug(f"Disable {filesystem}") - config = config.replace( - f"CONFIG_TARGET_ROOTFS_{filesystem.upper()}=y", - f"# CONFIG_TARGET_ROOTFS_{filesystem.upper()} is not set", - ) - else: - log.debug(f"Enable {filesystem}") - config = config.replace( - f"# CONFIG_TARGET_ROOTFS_{filesystem.upper()} is not set", - f"CONFIG_TARGET_ROOTFS_{filesystem.upper()}=y", - ) - - config_path.write_text(config) - else: - log.debug("Enable default filesystems") - copyfile( - cache_workdir / ".config.orig", - cache_workdir / ".config", - ) - - build_cmd = [ + job.meta["build_cmd"] = [ "make", "image", f"PROFILE={req['profile']}", f"PACKAGES={' '.join(sorted(req.get('packages', [])))}", f"EXTRA_IMAGE_NAME={packages_hash}", - f"BIN_DIR={req['store_path'] / bin_dir}", + f"BIN_DIR=/builder/{bin_dir}", ] + + # Check if custom rootfs size is requested if rootfs_size_mb := req.get("rootfs_size_mb"): - build_cmd.append(f"ROOTFS_PARTSIZE={rootfs_size_mb}") + job.meta["build_cmd"].append(f"ROOTFS_PARTSIZE={rootfs_size_mb}") - log.debug("Build command: %s", build_cmd) + log.debug("Build command: %s", job.meta["build_cmd"]) job.meta["imagebuilder_status"] = "building_image" job.save_meta() if req.get("defaults"): + log.debug("Found defaults") defaults_file = ( - Path(req["store_path"]) / bin_dir / "files/etc/uci-defaults/99-asu-defaults" + req["store_path"] / bin_dir / "files/etc/uci-defaults/99-asu-defaults" ) defaults_file.parent.mkdir(parents=True) defaults_file.write_text(req["defaults"]) - build_cmd.append(f"FILES={req['store_path'] / bin_dir / 'files'}") - - log.debug(f"Running {' '.join(build_cmd)}") + job.meta["build_cmd"].append(f"FILES={req['store_path'] / bin_dir / 'files'}") + mounts.append( + { + "type": "bind", + "source": str(req["store_path"] / bin_dir / "files"), + "target": str(req["store_path"] / bin_dir / "files"), + "read_only": True, + }, + ) - image_build = subprocess.run( - build_cmd, - text=True, - cwd=cache_workdir, - capture_output=True, + returncode, job.meta["stdout"], job.meta["stderr"] = run_container( + image, + job.meta["build_cmd"], + mounts=mounts, + copy=["/builder/" + bin_dir, req["store_path"]], ) - job.meta["stdout"] = image_build.stdout - job.meta["stderr"] = image_build.stderr - job.meta["build_cmd"] = build_cmd job.save_meta() - if image_build.returncode: - report_error("Error while building firmware. See stdout/stderr") - - if "is too big" in image_build.stderr: - report_error("Selected packages exceed device storage") - - kernel_build_dir_run = subprocess.run( - ["make", "val.KERNEL_BUILD_DIR"], - text=True, - cwd=cache_workdir, - capture_output=True, - ) - - if kernel_build_dir_run.returncode: - report_error("Couldn't determine KERNEL_BUILD_DIR") + if returncode: + report_error(job, "Error while building firmware. See stdout/stderr") - kernel_build_dir_tmp = Path(kernel_build_dir_run.stdout.strip()) / "tmp" - - if kernel_build_dir_tmp.exists(): - log.info("Removing KDIR_TMP at %s", kernel_build_dir_tmp) - rmtree(kernel_build_dir_tmp) - else: - log.warning("KDIR_TMP missing at %s", kernel_build_dir_tmp) + if "is too big" in job.meta["stderr"]: + report_error(job, "Selected packages exceed device storage") json_file = Path(req["store_path"] / bin_dir / "profiles.json") if not json_file.is_file(): - report_error("No JSON file found") + report_error(job, "No JSON file found") json_content = json.loads(json_file.read_text()) + # Check if profile is in JSON file if req["profile"] not in json_content["profiles"]: - report_error("Profile not found in JSON file") - - now_timestamp = int(datetime.now().timestamp()) + report_error(job, "Profile not found in JSON file") json_content.update({"manifest": manifest}) json_content.update(json_content["profiles"][req["profile"]]) @@ -388,43 +245,10 @@ def download_file(filename: str, dest: str = None): job.connection.sadd(f"builds:{version_code}:{req['target']}", req["request_hash"]) + # Increment stats job.connection.hincrby( "stats:builds", - "#".join( - [req["branch_data"]["name"], req["version"], req["target"], req["profile"]] - ), - ) - - # Set last build timestamp for current target/subtarget to now - job.connection.hset( - f"worker:{job.worker_name}:last_build", req["target"], now_timestamp + "#".join([req["version"], req["target"], req["profile"]]), ) - # Iterate over all targets/subtargets of the worker and remove the once inactive for a week - for target_subtarget, last_build_timestamp in job.connection.hgetall( - f"worker:{job.worker_name}:last_build" - ).items(): - target_subtarget = target_subtarget.decode() - - log.debug("now_timestamp %s %s", target_subtarget, now_timestamp) - log.debug( - "last_build_timestamp %s %s", - target_subtarget, - last_build_timestamp.decode(), - ) - - if now_timestamp - int(last_build_timestamp.decode()) > 60 * 60 * 24: - log.info("Removing unused ImageBuilder for %s", target_subtarget) - job.connection.hdel( - f"worker:{job.worker_name}:last_build", target_subtarget - ) - if (cache / target_subtarget).exists(): - rmtree(cache / target_subtarget) - for suffix in [".stamp", ".sha256sums", ".sha256sums.sig"]: - (cache / target_subtarget).with_suffix(suffix).unlink( - missing_ok=True - ) - else: - log.debug("Keeping ImageBuilder for %s", target_subtarget) - return json_content diff --git a/asu/common.py b/asu/common.py index 3191ef52..f929719f 100644 --- a/asu/common.py +++ b/asu/common.py @@ -1,12 +1,17 @@ import base64 import hashlib import json +import logging import struct from pathlib import Path +from re import match +from shutil import unpack_archive +from tempfile import NamedTemporaryFile import nacl.signing import requests from flask import current_app +from podman import PodmanClient def get_redis(): @@ -112,6 +117,8 @@ def get_request_hash(req: dict) -> str: req.get("filesystem", ""), get_str_hash(req.get("defaults", "")), str(req.get("rootfs_size_mb", "")), + str(req.get("repository_keys", "")), + str(req.get("repositories", "")), ] ), 32, @@ -174,3 +181,138 @@ def verify_usign(sig_file: Path, msg_file: Path, pub_key: str) -> bool: return True except nacl.exceptions.CryptoError: return False + + +def get_container_version_tag(version: str) -> str: + if match(r"^\d+\.\d+\.\d+$", version): + logging.debug("Version is a release version") + version: str = "v" + version + else: + logging.info(f"Version {version} is a branch") + if version == "SNAPSHOT": + version: str = "master" + else: + version: str = "openwrt-" + version.rstrip("-SNAPSHOT") + + return version + + +def diff_packages(requested_packages: set, default_packages: set): + """Return a list of packages to install and remove + + Args: + requested_packages (set): Set of requested packages + default_packages (set): Set of default packages + + Returns: + set: Set of packages to install and remove""" + remove_packages = default_packages - requested_packages + return requested_packages | set( + map(lambda p: f"-{p}".replace("--", "-"), remove_packages) + ) + + +def run_container(image, command, mounts=[], copy=[]): + """Run a container and return the returncode, stdout and stderr + + Args: + image (str): Image to run + command (list): Command to run + mounts (list, optional): List of mounts. Defaults to []. + + Returns: + tuple: (returncode, stdout, stderr) + """ + podman = PodmanClient().from_env() + + logging.info(f"Running {image} {command} {mounts}") + container = podman.containers.run( + image=image, + command=command, + detach=True, + mounts=mounts, + userns_mode="keep-id", + cap_drop=["all"], + no_new_privileges=True, + privileged=False, + ) + + returncode = container.wait() + + # Podman 4.x changed the way logs are returned + if podman.version()["Version"].startswith("3"): + delimiter = b"\n" + else: + delimiter = b"" + + stdout = delimiter.join(container.logs(stdout=True, stderr=False)).decode("utf-8") + stderr = delimiter.join(container.logs(stdout=False, stderr=True)).decode("utf-8") + + logging.debug(f"returncode: {returncode}") + logging.debug(f"stdout: {stdout}") + logging.debug(f"stderr: {stderr}") + + if copy: + logging.debug(f"Copying {copy[0]} from container to {copy[1]}") + container_tar, _ = container.get_archive(copy[0]) + logging.debug(f"Container tar: {container_tar}") + + host_tar = NamedTemporaryFile(delete=True) + logging.debug(f"Host tar: {host_tar}") + + host_tar.write(b"".join(container_tar)) + + logging.debug(f"Copied {container_tar} to {host_tar}") + + unpack_archive( + host_tar.name, + copy[1], + "tar", + ) + logging.debug(f"Unpacked {host_tar} to {copy[1]}") + + host_tar.close() + logging.debug(f"Closed {host_tar}") + + container.remove(v=True) + + return returncode, stdout, stderr + + +def report_error(job, msg): + logging.warning(f"Error: {msg}") + job.meta["detail"] = f"Error: {msg}" + job.save_meta() + raise + + +def parse_manifest(manifest_content: str): + """Parse a manifest file and return a dictionary + + Args: + manifest (str): Manifest file content + + Returns: + dict: Dictionary of packages and versions + """ + return dict(map(lambda pv: pv.split(" - "), manifest_content.splitlines())) + + +def check_manifest(manifest, packages_versions): + """Validate a manifest file + + Args: + manifest (str): Manifest file content + packages_versions (dict): Dictionary of packages and versions + + Returns: + str: Error message or None + """ + for package, version in packages_versions.items(): + if package not in manifest: + return f"Impossible package selection: {package} not in manifest" + if version != manifest[package]: + return ( + f"Impossible package selection: {package} version not as requested: " + f"{version} vs. {manifest[package]}" + ) diff --git a/asu/janitor.py b/asu/janitor.py index 70f12817..81041204 100644 --- a/asu/janitor.py +++ b/asu/janitor.py @@ -417,20 +417,12 @@ def update_meta_json(): (current_app.config["JSON_PATH"] / "overview.json").write_text( json.dumps( - current_app.config["OVERVIEW"], - indent=2, - sort_keys=False, - default=str + current_app.config["OVERVIEW"], indent=2, sort_keys=False, default=str ) ) (current_app.config["JSON_PATH"] / "branches.json").write_text( - json.dumps( - list(branches.values()), - indent=2, - sort_keys=False, - default=str - ) + json.dumps(list(branches.values()), indent=2, sort_keys=False, default=str) ) (current_app.config["JSON_PATH"] / "latest.json").write_text( diff --git a/asu/metrics.py b/asu/metrics.py index 359cf5b3..643e3048 100644 --- a/asu/metrics.py +++ b/asu/metrics.py @@ -9,7 +9,7 @@ def collect(self): stats_builds = CounterMetricFamily( "builds", "Total number of built images", - labels=["branch", "version", "target", "profile"], + labels=["version", "target", "profile"], ) for build, count in self.connection.hgetall("stats:builds").items(): stats_builds.add_metric(build.decode().split("#"), count) diff --git a/asu/openapi.yml b/asu/openapi.yml index 44ec6d9d..3ca66506 100644 --- a/asu/openapi.yml +++ b/asu/openapi.yml @@ -13,8 +13,8 @@ externalDocs: description: README.md url: https://github.com/aparcar/asu/blob/master/README.md servers: -- url: https://asu.aparcar.org - description: Running instance of ASU + - url: https://asu.aparcar.org + description: Running instance of ASU paths: /api/v1/overview: get: @@ -30,7 +30,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/JsonSchemaOverview' + $ref: "#/components/schemas/JsonSchemaOverview" /api/v1/build: post: summary: Request a custom firmware image @@ -56,22 +56,22 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/BuildRequest' + $ref: "#/components/schemas/BuildRequest" responses: "200": - $ref: '#/components/responses/ResponseSuccessfull' + $ref: "#/components/responses/ResponseSuccessfull" "202": - $ref: '#/components/responses/ResponseActive' + $ref: "#/components/responses/ResponseActive" "400": - $ref: '#/components/responses/ResponseError' + $ref: "#/components/responses/ResponseError" "422": - $ref: '#/components/responses/ResponseBadPackage' + $ref: "#/components/responses/ResponseBadPackage" "500": - $ref: '#/components/responses/ResponseError' + $ref: "#/components/responses/ResponseError" /api/v1/build/{request_hash}: get: @@ -83,62 +83,62 @@ paths: Ideally clients requests status updates no more then every 5 seconds. operationId: asu.api.api_v1_build_get parameters: - - name: request_hash - in: path - description: | - The hashed request is responded after a successful build request at - `/api/v1/build`. - required: true - style: simple - explode: false - schema: - type: string + - name: request_hash + in: path + description: | + The hashed request is responded after a successful build request at + `/api/v1/build`. + required: true + style: simple + explode: false + schema: + type: string responses: "200": - $ref: '#/components/responses/ResponseSuccessfull' + $ref: "#/components/responses/ResponseSuccessfull" "202": - $ref: '#/components/responses/ResponseActive' + $ref: "#/components/responses/ResponseActive" "404": - $ref: '#/components/responses/ResponseError' + $ref: "#/components/responses/ResponseError" /api/v1/revision/{version}/{target}/{subtarget}: get: summary: receive revision of current target operationId: asu.api.api_v1_revision parameters: - - name: version - in: path - description: Version in question - required: true - style: simple - explode: false - schema: - type: string - - name: target - in: path - description: Target used on device - required: true - style: simple - explode: false - schema: - type: string - - name: subtarget - in: path - description: Target used on device - required: true - style: simple - explode: false - schema: - type: string + - name: version + in: path + description: Version in question + required: true + style: simple + explode: false + schema: + type: string + - name: target + in: path + description: Target used on device + required: true + style: simple + explode: false + schema: + type: string + - name: subtarget + in: path + description: Target used on device + required: true + style: simple + explode: false + schema: + type: string responses: "200": description: Revision found content: application/json: schema: - $ref: '#/components/schemas/JsonSchemaRevision' + $ref: "#/components/schemas/JsonSchemaRevision" components: responses: @@ -147,7 +147,7 @@ components: content: application/json: schema: - $ref: '#/components/schemas/BuildResponseSuccess' + $ref: "#/components/schemas/BuildResponseSuccess" ResponseActive: description: | @@ -159,30 +159,30 @@ components: type: integer description: Current position in build queue content: - application/json: - schema: - $ref: '#/components/schemas/BuildResponseActive' + application/json: + schema: + $ref: "#/components/schemas/BuildResponseActive" ResponseError: description: Invalid build request content: application/json: schema: - $ref: '#/components/schemas/BuildResponseError' + $ref: "#/components/schemas/BuildResponseError" ResponseBadPackage: description: Unknown package(s) in request content: application/json: schema: - $ref: '#/components/schemas/BuildResponseError' + $ref: "#/components/schemas/BuildResponseError" schemas: BuildRequest: required: - - profile - - target - - version + - profile + - target + - version type: object additionalProperties: false properties: @@ -229,8 +229,8 @@ components: packages: type: array example: - - vim - - tmux + - vim + - tmux items: type: string description: | @@ -281,6 +281,27 @@ components: boot. This feature might be dropped in the future. Size is limited to 10kB and can not be exceeded. maxLength: 20480 + repository_keys: + type: array + example: + - RWRNAX5vHtXWFmt+n5di7XX8rTu0w+c8X7Ihv4oCyD6tzsUwmH0A6kO0 + items: + type: string + description: | + List of signify/usign keys for repositories + repositories: + type: object + additionalProperties: + type: string + example: + openwrt_core: https://downloads.openwrt.org/snapshots/targets/x86/64/packages + openwrt_base: https://downloads.openwrt.org/snapshots/packages/x86_64/base + openwrt_luci: https://downloads.openwrt.org/snapshots/packages/x86_64/luci + openwrt_packages: https://downloads.openwrt.org/snapshots/packages/x86_64/packages + openwrt_routing: https://downloads.openwrt.org/snapshots/packages/x86_64/routing + openwrt_telephony: https://downloads.openwrt.org/snapshots/packages/x86_64/telephony + description: | + List of repositories to load packages from example: version: 19.07.8 @@ -308,14 +329,12 @@ components: status: type: integer example: 500 - description: - Always the same as the responding HTTP status code. + description: Always the same as the responding HTTP status code. enqueued_at: type: string format: date-time example: "2021-08-15T09:59:27.754430Z" - description: - Time and date of the build request. + description: Time and date of the build request. request_hash: type: string example: "5992c73895fb" @@ -342,8 +361,7 @@ components: type: string format: date-time example: "2021-08-15T09:59:27.754430Z" - description: - Time and date of the build request. + description: Time and date of the build request. request_hash: type: string example: "5992c73895fb" @@ -353,8 +371,7 @@ components: status: type: integer example: 202 - description: - Always the same as the responding HTTP status code. + description: Always the same as the responding HTTP status code. queue_position: type: integer example: 2 @@ -447,8 +464,8 @@ components: oneOf: - type: object required: - - model - - vendor + - model + - vendor properties: vendor: type: string @@ -530,8 +547,7 @@ components: type: string format: date example: "2021-08-15" - description: - Date of branch end of life + description: Date of branch end of life path: type: string example: "releases/{version}" @@ -580,7 +596,7 @@ components: branches: type: object additionalProperties: - $ref: '#/components/schemas/JsonSchemaBranch' + $ref: "#/components/schemas/JsonSchemaBranch" server: type: object properties: diff --git a/misc/config.py b/misc/config.py index 794f1677..f1ce1fcb 100644 --- a/misc/config.py +++ b/misc/config.py @@ -10,9 +10,6 @@ # where to store created images STORE_PATH = Path.cwd() / "public/store/" -# where to store ImageBuilders. Do not set when multiple workers run -CACHE_PATH = None - # where to store JSON files JSON_PATH = Path.cwd() / "public/json/v1/" @@ -22,6 +19,14 @@ # manual mapping of package ABI changes MAPPING_ABI = {"libubus20191227": "libubus"} +# External repositories to allow +REPOSITORY_ALLOW_LIST = [ + "http://downloads.openwrt.org", + "https://downloads.openwrt.org", + "http://feed.libremesh.org", + "https://feed.libremesh.org", +] + # connection string for Redis # REDIS_CONN = Redis(host=redis_host, port=redis_port, password=redis_password) diff --git a/pyproject.toml b/pyproject.toml index 7701034f..30f6f135 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ rq = "^1.13.0" connexion = {extras = ["swagger-ui"], version = "^2.14.2"} prometheus-client = "^0.13.1" gunicorn = "^20.1.0" +podman = "^4.4.1" [tool.poetry.dev-dependencies] pytest = "^6.2.5" diff --git a/tests/conftest.py b/tests/conftest.py index f1cc3689..df9fe6e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ import shutil import tempfile -from pathlib import Path import prometheus_client import pytest @@ -31,24 +30,22 @@ def pytest_collection_modifyitems(config, items): def redis_load_mock_data(redis): redis.sadd( - "packages:TESTVERSION:TESTVERSION:testtarget/testsubtarget", + "packages:1.2:1.2.3:testtarget/testsubtarget", "test1", "test2", "test3", "valid_new_package", ) - redis.sadd( - "profiles:TESTVERSION:TESTVERSION:testtarget/testsubtarget", "testprofile" - ) + redis.sadd("profiles:1.2:1.2.3:testtarget/testsubtarget", "testprofile") redis.sadd("profiles:SNAPSHOT:SNAPSHOT:ath79/generic", "tplink_tl-wdr4300-v1") redis.sadd("packages:SNAPSHOT:SNAPSHOT:ath79/generic", "vim", "tmux") redis.sadd("packages:SNAPSHOT:SNAPSHOT:x86/64", "vim", "tmux") redis.hset( - "mapping:TESTVERSION:TESTVERSION:testtarget/testsubtarget", + "mapping:1.2:1.2.3:testtarget/testsubtarget", mapping={"testvendor,testprofile": "testprofile"}, ) - redis.sadd("targets:TESTVERSION", "testtarget/testsubtarget") + redis.sadd("targets:1.2", "testtarget/testsubtarget") redis.sadd("targets:SNAPSHOT", "ath79/generic", "x86/64") redis.sadd("targets:21.02", "testtarget/testsubtarget") redis.hset("mapping-abi", mapping={"test1-1": "test1"}) @@ -84,6 +81,7 @@ def app(test_path, redis_server): "CACHE_PATH": test_path, "TESTING": True, "UPSTREAM_URL": "http://localhost:8001", + "REPOSITORY_ALLOW_LIST": [], "BRANCHES": { "SNAPSHOT": { "name": "SNAPSHOT", @@ -99,11 +97,11 @@ def app(test_path, redis_server): "extra_repos": {}, "extra_keys": [], }, - "TESTVERSION": { - "name": "TESTVERSION", + "1.2": { + "name": "1.2", "enabled": True, "snapshot": True, - "versions": ["TESTVERSION"], + "versions": ["1.2.3"], "git_branch": "master", "path": "snapshots", "path_packages": "snapshots/packages", @@ -201,30 +199,6 @@ def client(app): return app.test_client() -@pytest.fixture -def runner(app): - return app.test_cli_runner() - - @pytest.fixture(scope="session") def httpserver_listen_address(): return ("127.0.0.1", 8001) - - -@pytest.fixture -def upstream(httpserver): - base_url = "/snapshots/targets/testtarget/testsubtarget" - upstream_path = Path("./tests/upstream/snapshots/targets/testtarget/testsubtarget/") - expected_file_requests = [ - "sha256sums.sig", - "sha256sums", - "openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64.tar.xz", - ] - - for f in expected_file_requests: - httpserver.expect_request(f"{base_url}/{f}").respond_with_data( - (upstream_path / f).read_bytes(), - headers={"Last-Modified": "Thu, 19 Mar 2020 20:27:41 GMT"}, - ) - - httpserver.check_assertions() diff --git a/tests/test_api.py b/tests/test_api.py index 8ab0e083..d9ac5e1b 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,179 +1,119 @@ import pytest -def test_api_build(client, upstream): +def test_api_build(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" + assert response.json.get("manifest").get("test1") == "1.0" -def test_api_build_filesystem_ext4(app, upstream): - client = app.test_client() +def test_api_build_version_code(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", + version_code="r12647-cb44ab4f5d", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], - filesystem="ext4", ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "34df61de58ef879888f91d75ccd381f2" - - config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" - ).read_text() - assert "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" in config - assert "CONFIG_TARGET_ROOTFS_EXT4FS=y" in config -def test_api_build_filesystem_squashfs(app, upstream): - client = app.test_client() +def test_api_build_rootfs_size(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], - filesystem="squashfs", + rootfs_size_mb=100, ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "8f9718015c027664b0a8245e39f21d09" - config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" - ).read_text() - assert "# CONFIG_TARGET_ROOTFS_EXT4FS is not set" in config - assert "CONFIG_TARGET_ROOTFS_SQUASHFS=y" in config + assert response.json.get("build_cmd")[6] == "ROOTFS_PARTSIZE=100" -def test_api_build_filesystem_empty(app, upstream): - client = app.test_client() +def test_api_build_version_code_bad(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", + version_code="some-bad-version-code", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], - filesystem="", ), ) - assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" - config = ( - app.config["CACHE_PATH"] / "cache/TESTVERSION/testtarget/testsubtarget/.config" - ).read_text() - assert "CONFIG_TARGET_ROOTFS_EXT4FS=y" in config - assert "CONFIG_TARGET_ROOTFS_SQUASHFS=y" in config + assert response.status == "500 INTERNAL SERVER ERROR" -def test_api_build_filesystem_reset(app, upstream): - client = app.test_client() +def test_api_build_diff_packages(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], - filesystem="ext4", + diff_packages=True, ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "34df61de58ef879888f91d75ccd381f2" + # TODO shorten for testing assert ( - "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" - in ( - app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget/.config" - ).read_text() - ) - - response = client.post( - "/api/v1/build", - json=dict( - version="TESTVERSION", - target="testtarget/testsubtarget", - profile="testprofile", - packages=["test1", "test2"], - ), - ) - assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" - assert ( - "# CONFIG_TARGET_ROOTFS_SQUASHFS is not set" - not in ( - app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget/.config" - ).read_text() + response.json.get("build_cmd")[3] + == "PACKAGES=-base-files -busybox -dnsmasq -dropbear -firewall -fstools -ip6tables -iptables -kmod-ath9k -kmod-gpio-button-hotplug -kmod-ipt-offload -kmod-usb-chipidea2 -kmod-usb-storage -kmod-usb2 -libc -libgcc -logd -mtd -netifd -odhcp6c -odhcpd-ipv6only -opkg -ppp -ppp-mod-pppoe -swconfig -uboot-envtools -uci -uclient-fetch -urandom-seed -urngd -wpad-basic test1 test2" ) -def test_api_build_filesystem_bad(client, upstream): - response = client.post( - "/api/v1/build", - json=dict( - version="TESTVERSION", - target="testtarget/testsubtarget", - profile="testprofile", - packages=["test1", "test2"], - filesystem="bad", - ), - ) - assert response.status == "400 BAD REQUEST" - - def test_api_latest_default(client): response = client.get("/api/latest") assert response.status == "302 FOUND" -def test_api_build_mapping(client, upstream): +def test_api_build_mapping(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testvendor,testprofile", packages=["test1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "697a3aa34dcc7e2577a69960287c3b9b" -def test_api_build_mapping_abi(client, upstream): +def test_api_build_mapping_abi(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testvendor,testprofile", packages=["test1-1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "4c1e7161dd3f0c4ca2ba04a65c6bf0fb" def test_api_build_bad_target(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtargetbad", profile="testvendor,testprofile", packages=["test1", "test2"], @@ -185,43 +125,62 @@ def test_api_build_bad_target(client): ) -def test_api_build_get(client, upstream): +def test_api_build_get(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], ), ) - assert response.json["request_hash"] == "df1dfbb6f6deca36b389e4b2917cb8f0" - response = client.get("/api/v1/build/df1dfbb6f6deca36b389e4b2917cb8f0") + request_hash = response.json["request_hash"] + response = client.get(f"/api/v1/build/{request_hash}") assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" + assert response.json.get("request_hash") == request_hash -def test_api_build_packages_versions(client, upstream): +def test_api_build_packages_versions(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages_versions={"test1": "1.0", "test2": "2.0"}, ), ) - assert response.json["request_hash"] == "bb873a96483917da5b320a7a90b75985" - response = client.get("/api/v1/build/bb873a96483917da5b320a7a90b75985") + request_hash = response.json["request_hash"] + response = client.get(f"/api/v1/build/{request_hash}") assert response.status == "200 OK" - assert response.json.get("request_hash") == "bb873a96483917da5b320a7a90b75985" + assert response.json.get("request_hash") == request_hash -def test_api_build_packages_duplicate(client, upstream): +def test_api_build_packages_versions_bad(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", + target="testtarget/testsubtarget", + profile="testprofile", + packages_versions={"test1": "0.0", "test2": "2.0"}, + ), + ) + request_hash = response.json["request_hash"] + response = client.get(f"/api/v1/build/{request_hash}") + assert response.status == "500 INTERNAL SERVER ERROR" + assert ( + response.json.get("detail") + == "Error: Impossible package selection: test1 version not as requested: 0.0 vs. 1.0" + ) + + +def test_api_build_packages_duplicate(client): + response = client.post( + "/api/v1/build", + json=dict( + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], @@ -241,31 +200,29 @@ def test_api_build_get_no_post(client): assert response.status == "405 METHOD NOT ALLOWED" -def test_api_build_empty_packages_list(client, upstream): +def test_api_build_empty_packages_list(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=[], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "c1175efc86abda8d1b03f38204e7dc02" -def test_api_build_withouth_packages_list(client, upstream): +def test_api_build_withouth_packages_list(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "c1175efc86abda8d1b03f38204e7dc02" def test_api_build_prerelease_snapshot(client): @@ -296,11 +253,11 @@ def test_api_build_prerelease_rc(client): assert response.json.get("detail") == "Unsupported profile: testprofile" -def test_api_build_bad_packages_str(client, upstream): +def test_api_build_bad_packages_str(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages="testpackage", @@ -393,13 +350,13 @@ def test_api_build_needed(client): assert response.json.get("title") == "Bad Request" response = client.post( "/api/v1/build", - json=dict(version="TESTVERSION", target="testtarget/testsubtarget"), + json=dict(version="1.2.3", target="testtarget/testsubtarget"), ) assert response.status == "400 BAD REQUEST" assert response.json.get("detail") == "'profile' is a required property" assert response.json.get("title") == "Bad Request" response = client.post( - "/api/v1/build", json=dict(version="TESTVERSION", profile="testprofile") + "/api/v1/build", json=dict(version="1.2.3", profile="testprofile") ) assert response.status == "400 BAD REQUEST" assert response.json.get("detail") == "'target' is a required property" @@ -412,7 +369,7 @@ def test_api_build_bad_distro(client): json=dict( distro="Foobar", target="testtarget/testsubtarget", - version="TESTVERSION", + version="1.2.3", profile="testprofile", packages=["test1", "test2"], ), @@ -453,7 +410,7 @@ def test_api_build_bad_profile(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="Foobar", packages=["test1", "test2"], @@ -463,74 +420,24 @@ def test_api_build_bad_profile(client): assert response.json.get("detail") == "Unsupported profile: Foobar" -def test_api_build_bad_packages(client): - response = client.post( - "/api/v1/build", - json=dict( - version="TESTVERSION", - target="testtarget/testsubtarget", - profile="testprofile", - packages=["test4"], - ), - ) - assert response.json.get("detail") == "Unsupported package(s): test4" - assert response.status == "422 UNPROCESSABLE ENTITY" - - -def test_api_build_package_to_remove_diff_packages_false(client, upstream): - response = client.post( - "/api/v1/build", - json=dict( - version="TESTVERSION", - target="testtarget/testsubtarget", - profile="testprofile", - packages=["test1", "test2", "package_to_remove"], - diff_packages=False, - ), - ) - assert response.status == "422 UNPROCESSABLE ENTITY" - - -def test_api_build_cleanup(app, upstream): - client = app.test_client() - response = client.post( - "/api/v1/build", - json=dict( - version="TESTVERSION", - target="testtarget/testsubtarget", - profile="testprofile", - packages=["test1", "test2"], - filesystem="ext4", - ), - ) - assert response.status == "200 OK" - assert not ( - app.config["CACHE_PATH"] - / "cache/TESTVERSION/testtarget/testsubtarget" - / "pseudo_kernel_build_dir/tmp/" - / "fake_trash" - ).exists() - - -def test_api_build_defaults_empty(client, upstream): +def test_api_build_defaults_empty(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", defaults="", ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "c1175efc86abda8d1b03f38204e7dc02" -def test_api_build_defaults_filled_not_allowed(client, upstream): +def test_api_build_defaults_filled_not_allowed(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", defaults="echo", @@ -540,17 +447,16 @@ def test_api_build_defaults_filled_not_allowed(client, upstream): assert response.status == "400 BAD REQUEST" -def test_api_build_defaults_filled_allowed(app, upstream): +def test_api_build_defaults_filled_allowed(app): app.config["ALLOW_DEFAULTS"] = True client = app.test_client() response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", defaults="echo", ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "95850740d931c460d77f8de35f298b9a" diff --git a/tests/test_common.py b/tests/test_common.py index af1fe31b..804a8f4b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -31,20 +31,17 @@ def test_get_request_hash(): "package_hash": get_packages_hash(["test"]), } - assert get_request_hash(request) == "289a492f0ed178ab35cdd24f9b6b01cf" - -def test_get_request_hash_diff_packages(): - request = { - "distro": "test", - "version": "test", - "profile": "test", - "package_hash": get_packages_hash(["test"]), - "diff_packages": True, +def test_diff_packages(): + assert diff_packages({"test1"}, {"test1", "test2"}) == {"test1", "-test2"} + assert diff_packages({"test1"}, {"test1"}) == {"test1"} + assert diff_packages({"test1"}, {"test2", "test3"}) == {"test1", "-test2", "-test3"} + assert diff_packages({"test1"}, {"test2", "-test3"}) == { + "test1", + "-test2", + "-test3", } - assert get_request_hash(request) == "fe8893a4c872d14e7da222b0810bfd99" - def test_fingerprint_pubkey_usign(): pub_key = "RWSrHfFmlHslUcLbXFIRp+eEikWF9z1N77IJiX5Bt/nJd1a/x+L+SU89" @@ -68,3 +65,21 @@ def test_verify_usign(): os.close(sig_fd) os.unlink(msg_path) os.unlink(sig_path) + + +def test_get_version_container_tag(): + assert get_container_version_tag("1.0.0") == "v1.0.0" + assert get_container_version_tag("SNAPSHOT") == "master" + assert get_container_version_tag("1.0.0-SNAPSHOT") == "openwrt-1.0.0" + + +def test_check_manifest(): + assert check_manifest({"test": "1.0"}, {"test": "1.0"}) == None + assert ( + check_manifest({"test": "1.0"}, {"test": "2.0"}) + == "Impossible package selection: test version not as requested: 2.0 vs. 1.0" + ) + assert ( + check_manifest({"test": "1.0"}, {"test2": "1.0"}) + == "Impossible package selection: test2 not in manifest" + ) diff --git a/tests/test_janitor.py b/tests/test_janitor.py index e1fc5163..44413121 100644 --- a/tests/test_janitor.py +++ b/tests/test_janitor.py @@ -1,9 +1,7 @@ from pathlib import Path import pytest -from pytest_httpserver import HTTPServer -from asu.build import build from asu.janitor import * @@ -46,7 +44,7 @@ def test_update_meta_overview_json(app): with app.app_context(): update_meta_json() overview_json = json.loads((app.config["JSON_PATH"] / "overview.json").read_text()) - assert "package_changes" in overview_json["branches"]["TESTVERSION"] + assert "package_changes" in overview_json["branches"]["1.2"] def test_parse_packages_file(app, upstream): diff --git a/tests/test_stats.py b/tests/test_stats.py index 99c1cb82..4e96282b 100644 --- a/tests/test_stats.py +++ b/tests/test_stats.py @@ -1,58 +1,55 @@ from prometheus_client import REGISTRY -def test_stats_image_builds(client, upstream): +def test_stats_image_builds(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" response = client.get("/metrics") print(response.get_data(as_text=True)) assert ( - 'builds_total{branch="TESTVERSION",profile="testprofile",target="testtarget/testsubtarget",version="TESTVERSION"} 1.0' + 'builds_total{profile="testprofile",target="testtarget/testsubtarget",version="1.2.3"} 1.0' in response.get_data(as_text=True) ) response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "39bfd960818b32759982b09989e62809" response = client.get("/metrics") print(response.get_data(as_text=True)) assert ( - 'builds_total{branch="TESTVERSION",profile="testprofile",target="testtarget/testsubtarget",version="TESTVERSION"} 2.0' + 'builds_total{profile="testprofile",target="testtarget/testsubtarget",version="1.2.3"} 2.0' in response.get_data(as_text=True) ) -def test_stats_cache(client, upstream): +def test_stats_cache(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" response = client.get("/metrics") print(response.get_data(as_text=True)) @@ -61,25 +58,24 @@ def test_stats_cache(client, upstream): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], ), ) assert response.status == "200 OK" - assert response.json.get("request_hash") == "df1dfbb6f6deca36b389e4b2917cb8f0" response = client.get("/metrics") print(response.get_data(as_text=True)) assert "cache_hits 1.0" in response.get_data(as_text=True) -def test_stats_clients_luci(client, upstream): +def test_stats_clients_luci(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], @@ -95,11 +91,11 @@ def test_stats_clients_luci(client, upstream): ) -def test_stats_clients_unknown(client, upstream): +def test_stats_clients_unknown(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], @@ -113,11 +109,11 @@ def test_stats_clients_unknown(client, upstream): ) -def test_stats_clients_auc(client, upstream): +def test_stats_clients_auc(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], @@ -132,11 +128,11 @@ def test_stats_clients_auc(client, upstream): ) -def test_stats_clients_auc_possible_new_format(client, upstream): +def test_stats_clients_auc_possible_new_format(client): response = client.post( "/api/v1/build", json=dict( - version="TESTVERSION", + version="1.2.3", target="testtarget/testsubtarget", profile="testprofile", packages=["test1", "test2"], diff --git a/tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64.tar.xz b/tests/upstream/snapshots/targets/testtarget/testsubtarget/openwrt-imagebuilder-testtarget-testsubtarget.Linux-x86_64.tar.xz deleted file mode 100644 index 254ae6452288c4423e1f39e1a0d41478569811c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2475 zcmV;c2~_qUiwFP;>>OhN1MOSskK4Ev&)4X$5C-{>bVG-iM0$V%&F0t*x|?h>vp|8O zFer+&wPQ=FjyX{5|K3N+hdi;z_DpxS*_s%{65o&SJs$5MsT8Hjuc`)RGc`6zn@v+w zp~lorqpGoK{A1lF-P_=YEN`#TwTwqGfAwVFf-x3R6#Cz|`^A)Zzw;6j&afacj|B^X z$RqY7jP{oBfYR2`H&CUmO?4LzVAkG2pZ8<=`<0a9{_|g%vZ%ABs4`Ozbdhfz9T<<8 z^Ih{#v7i`76o;5$O!$)!KgIwY=l{O*@8>)l@0PK1{zdHOpMcdzI3i&Gks$Oq|Bq4L zeERtQ!*`?e*Pp+8cRu?3>C^f9v(dZ1pY!);f4&JeQqTVO^Xs$kJ^lCA{#YJb9$^1J zstaSYso5!Gm;J|tEaHC*Z(#p1!Q^QFk5K|Y@u$I4U50~Ki`B4g0r-H}IK5>cKB{u`VZAw1N)&MXTn%LW3a7 zZE;T~u4xcls_Edb?>>Kg_rvJhpFjNY)@kS%8a*k(lV{JKmsjbFS7mM5v>08ODmT-S zx3^K6Rj*EdFIV~i`(K-uyJYOJe}WoTac{@u?<#1;oU;lLW{ikzJUMx2tfYMJ-dL}rp0*%|UEULzUQJKq3 zd+(dKNFO*uRQ+W z-?0A69lX0ZHAsj;GQfd*rtnPRIZ?GiKHj@$*M3A)gO;TL@#*7RI zB7q)fC@Wlz2-3Wssrp}@pTvmBAgzirF{*M^5c&r~A{N-JGFNIkomYYYL@5IPToSH; zr>&c!n0hrZ#RvnPpwuLSM^-xem818X6KHOQHA5N!8Ai8K5Y=W&1VxM>Y#3LTaL`~< zz}%K8(#32B`-?dYOZTUh)HF#cii50-8=@MrZ39DmcQ^IV- zO=PWAga;Q8-;uUsi76qyw%o?lu@2Sm`$~@m zt%0G-a~NiQv6Wt#Eumy0)?V{Xv@Aio(n#y9*~EFK^zKoaH6p&P z^f%oUQa6P*l|!0n+{a%7X}N^1YB*Vzyxh2Y+<%}rh!nI zUyQz!wvpqsM$SuZAeCjgxwhDX_Fn9EebrEjv0G4gu9Z1#Hd|YcTXA)vcfqdrlX|_6 z)OtG68~WDP$u^^OBs0<37E-a5MAxgK2g$TZX7w2PbDM2G5OCW2Pcon!945SvwRZLi z4t0RLnyloFrad?7>*6o#uH0!1nzpJZ4@W4mR1>_=t3x?5_|WJsq|+Q}4iG#b%*VJz zZLO=$2)1?$w1}Wrk&gxNi$qtevq{sGOC?>f5l*y%kVHfpEn0Utd>1I6q9)64maz?n z3xJdiF_yZ6l+C0v-i`K$xP{2=nz*QbS07r4@?Q>T%A(Xtm98*zUgA!i&H%<>%z0m= zKt`sL2?l|=gH1D&Hvo+>p=_O_H8xIJ$~unMT~s1#tDqRCTVs9KljUr*el3r)20A{# zTnw0ui9pnW=*WQ2Z!a56A z=`70^xy?Qea?@ND)y1A@$ibSn-WPmRH)gg67UbC#zz<>nk3FfigcpOST7BpxOCQ#;ZS0pAQE?g)c2>zr#F2@ z9K~tE2vspG^Tp=l!rW9@KJIxC$cZJE*jUD$z;2l?O))|m53Qltxxa28=ztGo_lEjW zbXi{&detN9G}a6!lo@BERjJc;12)z~#BvbkiJqt1XY59GUVArgZq?o+5lDuYgsOr) zNk%>8Q)m>pb??Hps%_cJW_E>h82b@mtEo0~RAB3%c z|HCn3;&}e^7{#g!6M7www)y{94!-HWhm2kKzdRnf^s~9l+(oUw$fYqCNGF`Dgxe@R3Emp3+G!L1r1XWm7d;JoN6CJ#-)->2#qsg- zRM=(7E+O(s={!?j9R98+Lw<|5mfCbF&DImmWuoU1s+kNg!dvNHFL_y|SI3_& zCtlBlmm9;|x%ncQzkQw=yuMV~)UgYHnqO(328&@k4MHf(G3@B!bH_ieGIvA%!Xs9r z+hxVRa-!-FZngb5zt2AnaUfH3Int_9v!s|R+VPk3+wt(tl5mUb>P%;KTr|nbEH!iJ zm$HbjKU>_uzwJbvdSNeD8icFIpyyjU#m-k12Pm|vg4I$1K17QNKTubvPP(+N*Gq*pAO0(+~HIpq@|o#qkt1OQ5sng1e<_l7)2Zu z5eoAp874*$EtTXlk(z5Mh`|hG#s|t=e&lD9m}vsOHG<_ATi}}xwKj2VqhzS8B*Kz3 zinNeSX-PRX1oTOm)sk2x50rarxAZ^iIDUigKbl#&{td|C-+xeY{Ql!{%5OCPZd*BO pDpgOO?EC+Jy8r*h35WTIfAP`(AIq^E%dz~H sha256sums -signify -S -m sha256sums -s ../../../../../keys/testkey.sec -#usign -S -m sha256sums -s ../../../../../keys/testkey.sec