pipeline: `debs-to-repo` (v7, working with reprepro)

- pipeline: artifacts: use better description for artifacts (artifact_name+artifact_version instead of artifact_final_file_basename)
This commit is contained in:
Ricardo Pardini 2023-05-05 14:03:18 +02:00
parent 52fffdda75
commit eff56a7909
7 changed files with 403 additions and 6 deletions

View File

@ -66,6 +66,42 @@ function cli_json_info_run() {
return 0 # stop here.
fi
# debs-to-repo-download is also isolated from the rest. It does depend on the debs-to-repo-info, but that's prepared beforehand in a standard pipeline run.
if [[ "${ARMBIAN_COMMAND}" == "debs-to-repo-download" ]]; then
display_alert "Downloading debs" "debs-to-repo-download" "info"
declare DEBS_TO_REPO_INFO_FILE="${BASE_INFO_OUTPUT_DIR}/debs-to-repo-info.json"
if [[ ! -f "${DEBS_TO_REPO_INFO_FILE}" ]]; then
exit_with_error "debs-to-repo-download :: no ${DEBS_TO_REPO_INFO_FILE} file found; did you restore the pipeline artifacts correctly?"
fi
declare DEBS_OUTPUT_DIR="${DEST}/debs"
run_host_command_logged mkdir -pv "${DEBS_OUTPUT_DIR}"
run_host_command_logged "${PYTHON3_VARS[@]}" "${PYTHON3_INFO[BIN]}" "${INFO_TOOLS_DIR}"/download-debs.py "${DEBS_TO_REPO_INFO_FILE}" "${DEBS_OUTPUT_DIR}"
display_alert "Done with" "debs-to-repo-download" "ext"
return 0 # stop here.
fi
# debs-to-repo-download is also isolated from the rest. It does depend on the debs-to-repo-info, but that's prepared beforehand in a standard pipeline run.
if [[ "${ARMBIAN_COMMAND}" == "debs-to-repo-reprepro" ]]; then
display_alert "Generating rerepro publishing script" "debs-to-repo-reprepro" "info"
declare DEBS_TO_REPO_INFO_FILE="${BASE_INFO_OUTPUT_DIR}/debs-to-repo-info.json"
if [[ ! -f "${DEBS_TO_REPO_INFO_FILE}" ]]; then
exit_with_error "debs-to-repo-reprepro :: no ${DEBS_TO_REPO_INFO_FILE} file found; did you restore the pipeline artifacts correctly?"
fi
declare OUTPUT_INFO_REPREPRO_DIR="${BASE_INFO_OUTPUT_DIR}/reprepro"
declare OUTPUT_INFO_REPREPRO_CONF_DIR="${OUTPUT_INFO_REPREPRO_DIR}/conf"
run_host_command_logged mkdir -pv "${OUTPUT_INFO_REPREPRO_DIR}" "${OUTPUT_INFO_REPREPRO_CONF_DIR}"
# Export params so Python can see them
export REPO_GPG_KEYID="${REPO_GPG_KEYID}"
run_host_command_logged "${PYTHON3_VARS[@]}" "${PYTHON3_INFO[BIN]}" "${INFO_TOOLS_DIR}"/repo-reprepro.py "${DEBS_TO_REPO_INFO_FILE}" "${OUTPUT_INFO_REPREPRO_DIR}" "${OUTPUT_INFO_REPREPRO_CONF_DIR}"
display_alert "Done with" "debs-to-repo-reprepro" "ext"
return 0 # stop here.
fi
### --- inventory --- ###
declare ALL_BOARDS_ALL_BRANCHES_INVENTORY_FILE="${BASE_INFO_OUTPUT_DIR}/all_boards_all_branches.json"
@ -168,6 +204,17 @@ function cli_json_info_run() {
### CI/CD Outputs.
# output stage: deploy debs to repo.
# Artifacts-to-repo output. Takes all artifacts, and produces info necessary for:
# 1) getting the artifact from OCI only (not build it)
# 2) getting the list of .deb's to be published to the repo for that artifact
display_alert "Generating deb-to-repo JSON output" "output-debs-to-repo-json" "info"
run_host_command_logged "${PYTHON3_VARS[@]}" "${PYTHON3_INFO[BIN]}" "${INFO_TOOLS_DIR}"/output-debs-to-repo-json.py "${BASE_INFO_OUTPUT_DIR}" "${OUTDATED_ARTIFACTS_IMAGES_FILE}"
if [[ "${ARMBIAN_COMMAND}" == "debs-to-repo-json" ]]; then
display_alert "Done with" "output-debs-to-repo-json" "ext"
return 0
fi
# Output stage: GHA simplest possible two-matrix worflow.
# A prepare job running this, prepares two matrixes:
# One for artifacts. One for images.

View File

@ -25,11 +25,16 @@ function armbian_register_commands() {
["config-dump-json"]="config_dump_json" # implemented in cli_config_dump_json_pre_run and cli_config_dump_json_run
["config-dump-no-json"]="config_dump_json" # implemented in cli_config_dump_json_pre_run and cli_config_dump_json_run
["inventory"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["targets"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-matrix"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-workflow"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-template"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["inventory"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["targets"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["debs-to-repo-json"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-matrix"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-workflow"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["gha-template"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
# These probably should be in their own separate CLI commands file, but for now they're together in jsoninfo.
["debs-to-repo-download"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["debs-to-repo-reprepro"]="json_info" # implemented in cli_json_info_pre_run and cli_json_info_run
["kernel-patches-to-git"]="patch_kernel" # implemented in cli_patch_kernel_pre_run and cli_patch_kernel_run

View File

@ -218,6 +218,7 @@ def armbian_run_command_and_parse_json_from_stdout(exec_cmd: list[str], params:
result = None
logs = []
try:
log.debug(f"Start calling Armbian command: {' '.join(exec_cmd)}")
result = subprocess.run(
exec_cmd,
stdout=subprocess.PIPE,

View File

@ -0,0 +1,97 @@
#!/usr/bin/env python3
#
# SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2023 Ricardo Pardini <ricardo@pardini.net>
# This file is a part of the Armbian Build Framework https://github.com/armbian/build/
#
import json
import logging
import os
import subprocess
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common import armbian_utils
# Prepare logging
armbian_utils.setup_logging()
log: logging.Logger = logging.getLogger("download-debs")
def download_using_armbian(exec_cmd: list[str], params: dict):
result = None
logs = []
try:
log.debug(f"Start calling Armbian command: {' '.join(exec_cmd)}")
result = subprocess.run(
exec_cmd,
stdout=subprocess.PIPE,
check=True,
universal_newlines=False, # universal_newlines messes up bash encoding, don't use, instead decode utf8 manually;
bufsize=-1, # full buffering
# Early (pre-param-parsing) optimizations for those in Armbian bash code, so use an ENV (not PARAM)
env={
"ANSI_COLOR": "none", # Do not use ANSI colors in logging output, don't write to log files
"WRITE_EXTENSIONS_METADATA": "no", # Not interested in ext meta here
"ALLOW_ROOT": "yes", # We're gonna be calling it as root, so allow it @TODO not the best option
"PRE_PREPARED_HOST": "yes" # We're gonna be calling it as root, so allow it @TODO not the best option
},
stderr=subprocess.PIPE
)
except subprocess.CalledProcessError as e:
# decode utf8 manually, universal_newlines messes up bash encoding
logs = armbian_utils.parse_log_lines_from_stderr(e.stderr)
log.error(f"Error calling Armbian command: {' '.join(exec_cmd)}")
log.error(f"Error details: params: {params} - return code: {e.returncode} - stderr: {'; '.join(logs)}")
return {"in": params, "logs": logs, "download_ok": False}
if result is not None:
if result.stderr:
logs = armbian_utils.parse_log_lines_from_stderr(result.stderr)
info = {"in": params, "download_ok": True}
info["logs"] = logs
return info
# This is called like this:
# /usr/bin/python3 /armbian/lib/tools/info/download-debs.py /armbian/output/info/debs-to-repo-info.json /armbian/output/debs
debs_info_json_path = sys.argv[1]
debs_output_dir = sys.argv[2]
# read the json file
with open(debs_info_json_path) as f:
artifact_debs = json.load(f)
log.info("Downloading debs...")
# loop over the debs. if we're missing any, download them.
missing_debs = []
missing_invocations = []
for artifact in artifact_debs:
is_missing_deb = False
for key in artifact["debs"]:
deb = artifact["debs"][key]
relative_deb_path = deb["relative_deb_path"]
deb_path = os.path.join(debs_output_dir, relative_deb_path)
if not os.path.isfile(deb_path):
log.info(f"Missing deb: {deb_path}")
missing_debs.append(deb_path)
is_missing_deb = True
if is_missing_deb:
missing_invocations.append(artifact["download_invocation"])
log.info(f"Missing debs: {len(missing_debs)}")
log.info(f"Missing invocations: {len(missing_invocations)}")
# only actually invoke anything if we're in a container
# run ./compile.sh <invocation> for each missing invocation
for invocation in missing_invocations:
cmds = ["/armbian/compile.sh"] + invocation
log.info(f"Running: {' '.join(cmds)}")
if armbian_utils.get_from_env("ARMBIAN_RUNNING_IN_CONTAINER") == "yes":
dl_info = download_using_armbian(cmds, {"missing": "deb"})
log.info(f"Download info: {dl_info}")

View File

@ -0,0 +1,118 @@
#!/usr/bin/env python3
#
# SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2023 Ricardo Pardini <ricardo@pardini.net>
# This file is a part of the Armbian Build Framework https://github.com/armbian/build/
#
import json
import logging
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common import armbian_utils
# Prepare logging
armbian_utils.setup_logging()
log: logging.Logger = logging.getLogger("output-debs-to-repo-json")
def generate_deb_summary(info):
ret = []
for artifact_id in info["artifacts"]:
artifact = info["artifacts"][artifact_id]
# skip = not not artifact["oci"]["up-to-date"]
# if skip:
# continue
artifact_name = artifact['in']['artifact_name']
desc = f"{artifact['out']['artifact_name']}={artifact['out']['artifact_version']}"
out = artifact["out"]
artifact_type = out["artifact_type"]
artifact_version = out["artifact_version"]
if not (artifact_type == "deb" or artifact_type == "deb-tar"):
continue
all_debs: dict[str, dict] = {}
artifact_map_debs_keys = out["artifact_map_debs_keys_ARRAY"]
artifact_map_debs_values = out["artifact_map_debs_values_ARRAY"]
artifact_map_packages_keys = out["artifact_map_packages_keys_ARRAY"]
artifact_map_packages_values = out["artifact_map_packages_values_ARRAY"]
# Sanity check: all those array should have the same amount of elements.
if not (len(artifact_map_debs_keys) == len(artifact_map_debs_values) == len(artifact_map_packages_keys) == len(artifact_map_packages_values)):
log.error(f"Error: artifact {artifact_id} has different amount of keys and values in the map: {artifact}")
continue
# Sanity check: artifact_map_debs_keys and artifact_map_packages_keys should be the same
if not (artifact_map_debs_keys == artifact_map_packages_keys):
log.error(f"Error: artifact {artifact_id} has different keys in the map: {artifact}")
continue
for i in range(len(artifact_map_debs_keys)):
key = artifact_map_debs_keys[i]
# add to all_debs, but check if it's already there
if key in all_debs:
log.error(f"Error: artifact {artifact_id} has duplicated key {key} in the map: {artifact}")
continue
relative_deb_path = artifact_map_debs_values[i]
repo_target = "armbian"
# if the relative_deb_path has a slash "/"...
if "/" in relative_deb_path:
# ...then it's a repo target
first_part = relative_deb_path.split("/")[0]
repo_target = f'armbian-{first_part}'
all_debs[key] = {"relative_deb_path": relative_deb_path, "package_name": (artifact_map_packages_values[i]), "repo_target": repo_target}
# Aggregate all repo_targets from their debs. There can be only one. Eg: each artifact can only be in one repo_target, no matter how many debs.
repo_targets = set()
for key in all_debs:
repo_targets.add(all_debs[key]["repo_target"])
if len(repo_targets) > 1:
log.error(f"Error: artifact {artifact_id} has debs in different repo_targets: {artifact}")
continue
repo_target = repo_targets.pop()
inputs = artifact["in"]["original_inputs"]
# get the invocation, in array format. "what do I run to download the debs" for this artifact. args are NOT quoted.
invocation = (["download-artifact"] + armbian_utils.map_to_armbian_params(inputs["vars"], False) + inputs["configs"])
item = {
"id": artifact_id, "desc": desc, "artifact_name": artifact_name, "artifact_type": artifact_type, "artifact_version": artifact_version,
"repo_target": repo_target,
"download_invocation": invocation,
"debs": all_debs
}
ret.append(item)
return ret
# This is called like this:
# /usr/bin/python3 /armbian/lib/tools/info/output-debs-to-repo.py /armbian/output/info /armbian/output/info/outdated-artifacts-images.json
# first arg the output directory (output/info)
info_output_dir = sys.argv[1]
output_json_file = os.path.join(info_output_dir, "debs-to-repo-info.json")
outdated_artifacts_image_json_filepath = sys.argv[2]
# read the json file passed as second argument as a json object
with open(outdated_artifacts_image_json_filepath) as f:
info = json.load(f)
artifact_debs = generate_deb_summary(info)
# dump the json to a debs-to-repo-info.json file in the output directory
with open(output_json_file, "w") as f:
json.dump(artifact_debs, f, indent=4)
log.info(f"Done writing {output_json_file}")

View File

@ -112,7 +112,7 @@ def generate_matrix_artifacts(info):
artifact_name = artifact['in']['artifact_name']
desc = f"{artifact['out']['artifact_final_file_basename']}"
desc = f"{artifact['out']['artifact_name']}={artifact['out']['artifact_version']}"
inputs = artifact['in']['original_inputs']

View File

@ -0,0 +1,129 @@
#!/usr/bin/env python3
#
# SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2023 Ricardo Pardini <ricardo@pardini.net>
# This file is a part of the Armbian Build Framework https://github.com/armbian/build/
#
import json
import logging
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common import armbian_utils
# Prepare logging
armbian_utils.setup_logging()
log: logging.Logger = logging.getLogger("repo-reprepro")
# This is called like this:
# /usr/bin/python3 /armbian/lib/tools/info/repo-reprero.py /armbian/output/info/debs-to-repo-info.json /armbian/output/info/reprepro /armbian/output/info/reprepro/conf
debs_info_json_path = sys.argv[1]
reprepro_script_output_dir = sys.argv[2]
reprepro_conf_output_dir = sys.argv[3]
reprepro_conf_distributions_fn = os.path.join(reprepro_conf_output_dir, f"distributions")
reprepro_conf_options_fn = os.path.join(reprepro_conf_output_dir, f"options")
reprepro_output_script_fn = os.path.join(reprepro_script_output_dir, f"reprepro.sh")
# From the environment...
gpg_keyid = armbian_utils.get_from_env("REPO_GPG_KEYID")
# read the json file
with open(debs_info_json_path) as f:
artifact_debs = json.load(f)
# Now aggregate all repo_targets and their artifacts.
# This will be used to generate the reprepro config file.
repo_targets: dict[str, list] = {}
for artifact in artifact_debs:
one_repo_target = artifact["repo_target"]
if one_repo_target not in repo_targets:
repo_targets[one_repo_target] = []
repo_targets[one_repo_target].append(artifact)
# for each target
log.info(f"Generating repo config...")
all_distro_lines: list[str] = []
for one_repo_target in repo_targets:
distro_dict: dict[str, str] = {}
distro_dict["Origin"] = f"Armbian origin {one_repo_target}"
distro_dict["Label"] = f"Armbian label {one_repo_target}"
distro_dict["Codename"] = f"{one_repo_target}"
distro_dict["Suite"] = f"{one_repo_target}"
distro_dict["Architectures"] = "amd64 armhf arm64 riscv64"
distro_dict["Components"] = "main"
distro_dict["Description"] = f"Apt repository for Armbian"
if (gpg_keyid is not None) and (gpg_keyid != ""):
log.warning(f'Using REPO_GPG_KEYID from environment: {gpg_keyid}')
distro_dict["SignWith"] = gpg_keyid
else:
log.warning(f"Didn't get REPO_GPG_KEYID from environment. Will not sign the repo.")
for key in distro_dict:
all_distro_lines.append(f"{key}: {distro_dict[key]}")
all_distro_lines.append("")
# create the reprerepo distributions file for the target
with open(reprepro_conf_distributions_fn, "w") as f:
for line in all_distro_lines:
log.info(f"| {line}")
f.write(f"{line}\n")
log.info(f"Wrote {reprepro_conf_distributions_fn}")
options: list[str] = []
options.append("verbose")
# options.append(f"basedir /armbian/output/repos/single-dir")
# create the reprerepo options file for the target
with open(reprepro_conf_options_fn, "w") as f:
for option in options:
f.write(f"{option}\n")
log.info(f"Wrote {reprepro_conf_options_fn}")
# Prepare the reprepro-invoking bash script
bash_lines = [
"#!/bin/bash",
"set",
'ls -laR "${INCOMING_DEBS_DIR}"'
]
# Copy the config files to the repo dir (from REPREPRO_INFO_DIR/conf to REPO_CONF_LOCATION script-side)
bash_lines.append('mkdir -p "${REPO_CONF_LOCATION}"')
bash_lines.append('cp -rv "${REPREPRO_INFO_DIR}/conf"/* "${REPO_CONF_LOCATION}"/')
for one_repo_target in repo_targets:
artifacts = repo_targets[one_repo_target]
log.info(f"Artifacts for target '{one_repo_target}': {len(artifacts)}")
all_debs_to_include: list[str] = []
# for each artifact
for artifact in artifacts:
# for each deb
for key in artifact["debs"]:
deb = artifact["debs"][key]
relative_deb_path = deb["relative_deb_path"]
all_debs_to_include.append(relative_deb_path)
all_debs_to_include_quoted = ['"${INCOMING_DEBS_DIR}/' + x + '"' for x in all_debs_to_include]
if len(all_debs_to_include) > 0:
# add all debs to the repop
cmds = ["reprepro", "-b", '"${REPO_LOCATION}"', "--component", "main", "includedeb", one_repo_target] + all_debs_to_include_quoted
bash_lines.append(f"echo 'reprepro importing {len(all_debs_to_include_quoted)} debs for target {one_repo_target}...' ")
bash_lines.append(" ".join(cmds))
# Always export at the end
export_cmds = ["reprepro", "-b", '"${REPO_LOCATION}"', "export"]
bash_lines.append(f"echo 'reprepro exporting...' ")
bash_lines.append(" ".join(export_cmds))
with open(reprepro_output_script_fn, "w") as f:
for line in bash_lines:
f.write(f"{line}\n")
log.info(f"Wrote {reprepro_output_script_fn}")