2023-03-09 19:52:06 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
#
|
|
|
|
|
# SPDX-License-Identifier: GPL-2.0
|
|
|
|
|
#
|
|
|
|
|
# Copyright (c) 2013-2023 Igor Pecovnik, igor@armbian.com
|
|
|
|
|
#
|
|
|
|
|
# This file is a part of the Armbian Build Framework
|
|
|
|
|
# https://github.com/armbian/build/
|
|
|
|
|
#
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
import concurrent
|
|
|
|
|
import concurrent.futures
|
|
|
|
|
import glob
|
|
|
|
|
import json
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
import logging
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
import multiprocessing
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
import os
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
import re
|
|
|
|
|
import subprocess
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
import sys
|
|
|
|
|
|
2023-05-04 11:23:23 +00:00
|
|
|
REGEX_WHITESPACE_LINEBREAK_COMMA_SEMICOLON = r"[\s,;\n]+"
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
ARMBIAN_BOARD_CONFIG_REGEX_GENERIC = r"^(?!\s)(?:[export |declare \-g]?)+([A-Z0-9_]+)=(?:'|\")(.*)(?:'|\")"
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
log: logging.Logger = logging.getLogger("armbian_utils")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_env_for_tokens(env_name):
|
|
|
|
|
result = []
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
# Read the environment; if None, return an empty list.
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
val = os.environ.get(env_name, None)
|
|
|
|
|
if val is None:
|
|
|
|
|
return result
|
|
|
|
|
# tokenize val; split by whitespace, line breaks, commas, and semicolons.
|
2023-05-04 11:23:23 +00:00
|
|
|
tokens = re.split(REGEX_WHITESPACE_LINEBREAK_COMMA_SEMICOLON, val)
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
# trim whitespace from tokens.
|
2023-05-04 11:23:23 +00:00
|
|
|
return [token for token in [token.strip() for token in (tokens)] if token != ""]
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
|
|
|
|
|
|
2023-06-07 18:22:43 +00:00
|
|
|
def get_from_env(env_name, default=None):
|
|
|
|
|
value = os.environ.get(env_name, default)
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
if value is not None:
|
|
|
|
|
value = value.strip()
|
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_from_env_or_bomb(env_name):
|
|
|
|
|
value = get_from_env(env_name)
|
|
|
|
|
if value is None:
|
|
|
|
|
raise Exception(f"{env_name} environment var not set")
|
|
|
|
|
if value == "":
|
|
|
|
|
raise Exception(f"{env_name} environment var is empty")
|
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def yes_or_no_or_bomb(value):
|
|
|
|
|
if value == "yes":
|
|
|
|
|
return True
|
|
|
|
|
if value == "no":
|
|
|
|
|
return False
|
|
|
|
|
raise Exception(f"Expected yes or no, got {value}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_incoming_environment():
|
|
|
|
|
log.debug("--ENV-- Environment:")
|
|
|
|
|
for key in os.environ:
|
|
|
|
|
log.debug(f"--ENV-- {key}={os.environ[key]}")
|
|
|
|
|
|
|
|
|
|
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
def is_debug():
|
|
|
|
|
return get_from_env("LOG_DEBUG") == "yes"
|
|
|
|
|
|
|
|
|
|
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
def setup_logging():
|
|
|
|
|
try:
|
|
|
|
|
import coloredlogs
|
|
|
|
|
level = "INFO"
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
if is_debug():
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
level = "DEBUG"
|
|
|
|
|
format = "%(message)s"
|
|
|
|
|
styles = {
|
2022-12-24 14:33:59 +00:00
|
|
|
'trace': {'color': 'white', 'bold': False},
|
|
|
|
|
'debug': {'color': 'white', 'bold': False},
|
|
|
|
|
'info': {'color': 'green', 'bold': True},
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
'warning': {'color': 'yellow', 'bold': True},
|
|
|
|
|
'error': {'color': 'red'},
|
|
|
|
|
'critical': {'bold': True, 'color': 'red'}
|
|
|
|
|
}
|
|
|
|
|
coloredlogs.install(level=level, stream=sys.stderr, isatty=True, fmt=format, level_styles=styles)
|
|
|
|
|
except ImportError:
|
|
|
|
|
level = logging.INFO
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
if is_debug():
|
armbian-next: Python patching delusion, pt1 & pt2 & pt3
- WiP: Python patching delusion, pt 1: finding & parsing patches; apply & git commit with pygit2; Markdown summaries (also for aggregation); git-to-patches tool
- Python: Markdown aggregation and patching summaries; collapsible; SummarizedMarkdownWriter
- Python: Markdown aggregation and patching summaries
- Python: reorg a bit into common/armbian_utils; define the `ASSET_LOG_BASE` in preparation for Markdown delusion
- Python patching: initial apply patches & initial commit patches to git (using pygit2)
- Python patching: add basic `series.conf` support
- Python patching: force use of utf-8; better error handling; use realpath of dirs
- Python patching: `git-to-patches` initial hack. not proud. half-reused some of the patches-to-git
- Python patching: "tag" the git commits with info for extracting later; introduce REWRITE_PATCHES/rewrite_patches_in_place
- Python patching: commented-out, recover-bad-patches hacks
- Python patching: shorten the signature
- Python patching: allow BASE_GIT_TAG as well as BASE_GIT_REVISION
- Python patching: git-archeology for patches missing descriptions; avoid UTF-8 in header/desc (not diff)
- Python patching: use modern-er email.utils.parsedate_to_datetime to parse commit date
- Python patching: unify PatchInPatchFile; better git-commiting; re-exporting patches from Git (directly)
- Python patching: switch to GitPython
- GitPython is like 100x slower than pygit2, but actually allows for date & committer
- also allows to remove untracked files before starting
- Python aggregation: fix missing `AGGREGATED_APT_SOURCES_DICT`
- Python patching: add `unidecode` dependency to pip3 install
- Python patching: don't try archeology if SRC is not a Git Repo (eg, in Docker)
- Python patching: don't try archeology if not applying patches to git
- WiP: Python patching delusion, pt2: actually use for u-boot & kernel patching
- Python patching: much better problem handling/logging; lenient with recreations (kernel)
- Python patching: don't force SHOW_LOG for u-boot patching
- Python patching: don't bomb for no reason when there are no patches to apply
- Python patching: fully (?) switch kernel patching to Python
- Python patching: more logging fixups
- Python patching: capture `kernel_git_revision` from `fetch_from_repo()`'s `checked_out_revision`
- Python patching: fully switch u-boot patching to Python
- Python aggregation/patching: colored logging; patching: always reset to git revision
- Python aggregation/patching: better logging; introduce u-boot Python patching
- Python patching pt3: recovers and better Markdown
- Python patching: detect, and rescue, `wrong_strip_level` problem; don't try to export patches that didn't apply, bitch instead
- Python patching: Markdown patching summary table, complete with emoji
- Python patching: include the problem breakdown in Markdown summary
- Python patching: sanity check against half-bare, half-mbox patches
- Python patching: try to recover from 1) bad utf-8 encoded patches; 2) bad unidiff patches; add a few sanity checks
- Python patching: try, and fail, to apply badly utf-8 encoded patches directly as bytes [reverted]
- Python patching: try to recover from patch *parse* failures; show summary; better logging
- set `GIT_ARCHEOLOGY=yes` to do archeology, default not
- armbian-next: Python `pip` dependencies handling, similar to `hostdeps`
- same scheme for Dockerfile caching
- @TODO: still using global/shared environment; should move to a dir under `cache` or some kinda venv
- WiP: add `python3-pip` to hostdeps; remove `python-setuptools`
- remove `python-setuptools` (Python2, no longer exists in Sid) from hostdeps
- add `python3-pip` to hostdeps; part of virtualenv saga
- WiP: split `kernel.sh` a bit, into `kernel-patching.sh`, `kernel-config.sh` and `kernel-make.sh`
- `advanced_patch()`: rename vars for clarity; no real changes
- Python patching: introduce FAST_ARCHEOLOGY; still trying for Markdown links
2022-12-01 15:09:26 +00:00
|
|
|
level = logging.DEBUG
|
|
|
|
|
logging.basicConfig(level=level, stream=sys.stderr)
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_json(json_contents_str):
|
|
|
|
|
import json
|
|
|
|
|
return json.loads(json_contents_str)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def to_yaml(gha_workflow):
|
|
|
|
|
import yaml
|
|
|
|
|
return yaml.safe_dump(gha_workflow, explicit_start=True, default_flow_style=False, sort_keys=False, allow_unicode=True, indent=2, width=1000)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# I've to read the first line from the board file, that's the hardware description in a pound comment.
|
|
|
|
|
# Also, 'KERNEL_TARGET="legacy,current,edge"' which we need to parse.
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
def armbian_parse_board_file_for_static_info(board_file, board_id, core_or_userpatched):
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
file_handle = open(board_file, 'r')
|
|
|
|
|
file_lines = file_handle.readlines()
|
|
|
|
|
file_handle.close()
|
|
|
|
|
|
|
|
|
|
file_lines.reverse()
|
|
|
|
|
hw_desc_line = file_lines.pop()
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
file_lines.reverse()
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
hw_desc_clean = None
|
|
|
|
|
if hw_desc_line.startswith("# "):
|
|
|
|
|
hw_desc_clean = hw_desc_line.strip("# ").strip("\n")
|
|
|
|
|
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
# Parse generic bash vars, with a horrendous regex.
|
|
|
|
|
generic_vars = {}
|
|
|
|
|
generic_var_matches = re.findall(ARMBIAN_BOARD_CONFIG_REGEX_GENERIC, "\n".join(file_lines), re.MULTILINE)
|
|
|
|
|
for generic_var_match in generic_var_matches:
|
|
|
|
|
generic_vars[generic_var_match[0]] = generic_var_match[1]
|
|
|
|
|
|
|
|
|
|
kernel_targets = []
|
|
|
|
|
if "KERNEL_TARGET" in generic_vars:
|
|
|
|
|
kernel_targets = generic_vars["KERNEL_TARGET"].split(",")
|
|
|
|
|
if (len(kernel_targets) == 0) or (kernel_targets[0] == ""):
|
|
|
|
|
log.warning(f"KERNEL_TARGET not found in '{board_file}', syntax error?, missing quotes? stray comma?")
|
|
|
|
|
|
|
|
|
|
maintainers = []
|
|
|
|
|
if "BOARD_MAINTAINER" in generic_vars:
|
|
|
|
|
maintainers = generic_vars["BOARD_MAINTAINER"].split(" ")
|
|
|
|
|
maintainers = list(filter(None, maintainers))
|
|
|
|
|
else:
|
|
|
|
|
if core_or_userpatched == "core":
|
|
|
|
|
log.warning(f"BOARD_MAINTAINER not found in '{board_file}', syntax error?, missing quotes? stray space? missing info?")
|
|
|
|
|
|
|
|
|
|
board_has_video = True
|
|
|
|
|
if "HAS_VIDEO_OUTPUT" in generic_vars:
|
|
|
|
|
if generic_vars["HAS_VIDEO_OUTPUT"] == "no":
|
|
|
|
|
board_has_video = False
|
|
|
|
|
|
|
|
|
|
if "BOARDFAMILY" not in generic_vars:
|
|
|
|
|
log.warning(f"BOARDFAMILY not found in '{board_file}', syntax error?, missing quotes?")
|
|
|
|
|
|
|
|
|
|
# Add some more vars that are not in the board file, so we've a complete BOARD_TOP_LEVEL_VARS as well as first-level
|
|
|
|
|
extras: list[dict[str, any]] = [
|
|
|
|
|
{"name": "BOARD", "value": board_id},
|
|
|
|
|
{"name": "BOARD_SUPPORT_LEVEL", "value": (Path(board_file).suffix)[1:]},
|
|
|
|
|
{"name": "BOARD_FILE_HARDWARE_DESC", "value": hw_desc_clean},
|
|
|
|
|
{"name": "BOARD_POSSIBLE_BRANCHES", "value": kernel_targets},
|
|
|
|
|
{"name": "BOARD_MAINTAINERS", "value": maintainers},
|
|
|
|
|
{"name": "BOARD_HAS_VIDEO", "value": board_has_video},
|
|
|
|
|
{"name": "BOARD_CORE_OR_USERPATCHED", "value": core_or_userpatched}
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
# Append the extras to the generic_vars dict.
|
|
|
|
|
for extra in extras:
|
|
|
|
|
generic_vars[extra["name"]] = extra["value"]
|
|
|
|
|
|
|
|
|
|
ret = {"BOARD_TOP_LEVEL_VARS": generic_vars}
|
|
|
|
|
# Append the extras to the top-level.
|
|
|
|
|
for extra in extras:
|
|
|
|
|
ret[extra["name"]] = extra["value"]
|
|
|
|
|
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def armbian_get_all_boards_list(boards_path):
|
|
|
|
|
ret = {}
|
|
|
|
|
for file in glob.glob(boards_path + "/*.*"):
|
|
|
|
|
stem = Path(file).stem
|
|
|
|
|
if stem != "README":
|
|
|
|
|
ret[stem] = file
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_armbian_src_path():
|
|
|
|
|
# Find the location of compile.sh, relative to this Python script.
|
|
|
|
|
this_script_full_path = os.path.realpath(__file__)
|
|
|
|
|
log.debug(f"Real path to this script: '{this_script_full_path}'")
|
|
|
|
|
|
|
|
|
|
armbian_src_path = os.path.realpath(os.path.join(os.path.dirname(this_script_full_path), "..", "..", ".."))
|
|
|
|
|
log.debug(f"Real path to Armbian SRC '{armbian_src_path}'")
|
|
|
|
|
|
|
|
|
|
compile_sh_full_path = os.path.realpath(os.path.join(armbian_src_path, "compile.sh"))
|
|
|
|
|
log.debug(f"Real path to compile.sh '{compile_sh_full_path}'")
|
|
|
|
|
|
|
|
|
|
# Make sure it exists
|
|
|
|
|
if not os.path.exists(compile_sh_full_path):
|
|
|
|
|
raise Exception("Can't find compile.sh")
|
|
|
|
|
|
|
|
|
|
core_boards_path = os.path.realpath(os.path.join(armbian_src_path, "config", "boards"))
|
|
|
|
|
log.debug(f"Real path to core boards '{core_boards_path}'")
|
|
|
|
|
|
|
|
|
|
# Make sure it exists
|
|
|
|
|
if not os.path.exists(core_boards_path):
|
|
|
|
|
raise Exception("Can't find config/boards")
|
|
|
|
|
|
2023-08-23 16:26:42 +00:00
|
|
|
# userspace stuff
|
|
|
|
|
core_distributions_path = os.path.realpath(os.path.join(armbian_src_path, "config", "distributions"))
|
|
|
|
|
log.debug(f"Real path to core distributions '{core_distributions_path}'")
|
|
|
|
|
# Make sure it exists
|
|
|
|
|
if not os.path.exists(core_distributions_path):
|
|
|
|
|
raise Exception("Can't find config/distributions")
|
|
|
|
|
|
|
|
|
|
core_desktop_path = os.path.realpath(os.path.join(armbian_src_path, "config", "desktop"))
|
|
|
|
|
log.debug(f"Real path to core desktop '{core_desktop_path}'")
|
|
|
|
|
# Make sure it exists
|
|
|
|
|
if not os.path.exists(core_desktop_path):
|
|
|
|
|
raise Exception("Can't find config/desktop")
|
|
|
|
|
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
userpatches_boards_path = os.path.realpath(os.path.join(armbian_src_path, "userpatches", "config", "boards"))
|
|
|
|
|
log.debug(f"Real path to userpatches boards '{userpatches_boards_path}'")
|
|
|
|
|
has_userpatches_path = os.path.exists(userpatches_boards_path)
|
|
|
|
|
|
2023-08-23 16:26:42 +00:00
|
|
|
return {
|
|
|
|
|
"armbian_src_path": armbian_src_path, "compile_sh_full_path": compile_sh_full_path, "core_boards_path": core_boards_path,
|
|
|
|
|
"core_distributions_path": core_distributions_path, "core_desktop_path": core_desktop_path,
|
|
|
|
|
"userpatches_boards_path": userpatches_boards_path, "has_userpatches_path": has_userpatches_path
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def read_one_distro_config_file(filename):
|
|
|
|
|
# Read the contents of filename passed in and return it as string, trimmed
|
|
|
|
|
with open(filename, 'r') as file_handle:
|
|
|
|
|
file_contents = file_handle.read()
|
|
|
|
|
return file_contents.strip()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def split_commas_and_clean_into_list(string):
|
|
|
|
|
ret = []
|
|
|
|
|
for item in string.split(","):
|
|
|
|
|
item = item.strip()
|
|
|
|
|
if item != "":
|
|
|
|
|
ret.append(item)
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_desktop_inventory_for_distro(distro, armbian_paths):
|
|
|
|
|
ret = []
|
|
|
|
|
desktops_path = armbian_paths["core_desktop_path"]
|
|
|
|
|
envs_path_for_distro = os.path.join(desktops_path, distro, "environments")
|
|
|
|
|
if not os.path.exists(envs_path_for_distro):
|
|
|
|
|
log.warning(f"Can't find desktop environments for distro '{distro}' at '{envs_path_for_distro}'")
|
|
|
|
|
return ret
|
|
|
|
|
for env in os.listdir(envs_path_for_distro):
|
|
|
|
|
one_env_path = os.path.join(envs_path_for_distro, env)
|
|
|
|
|
if not os.path.isdir(one_env_path):
|
|
|
|
|
continue
|
|
|
|
|
log.debug(f"Processing desktop '{env}' for distro '{distro}'")
|
|
|
|
|
support_file_path = os.path.join(one_env_path, "support")
|
|
|
|
|
arches_file_path = os.path.join(one_env_path, "architectures")
|
|
|
|
|
if not os.path.exists(support_file_path):
|
|
|
|
|
log.warning(f"Can't find desktop support file for distro '{distro}' and environment '{env}' at '{support_file_path}'")
|
|
|
|
|
continue
|
|
|
|
|
if not os.path.exists(arches_file_path):
|
|
|
|
|
log.warning(f"Can't find desktop arches file for distro '{distro}' and environment '{env}' at '{arches_file_path}'")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
env_main_info = {
|
|
|
|
|
"id": env,
|
|
|
|
|
"support": read_one_distro_config_file(support_file_path),
|
|
|
|
|
"arches": split_commas_and_clean_into_list(read_one_distro_config_file(arches_file_path))
|
|
|
|
|
}
|
|
|
|
|
ret.append(env_main_info)
|
|
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def armbian_get_all_userspace_inventory():
|
|
|
|
|
armbian_paths = find_armbian_src_path()
|
|
|
|
|
distros_path = armbian_paths["core_distributions_path"]
|
|
|
|
|
all_distros = []
|
|
|
|
|
# find and loop over every directory in distros_path, including symlinks
|
|
|
|
|
for distro in os.listdir(distros_path):
|
|
|
|
|
one_distro_path = os.path.join(distros_path, distro)
|
|
|
|
|
if not os.path.isdir(one_distro_path):
|
|
|
|
|
continue
|
|
|
|
|
log.debug(f"Processing distro '{distro}'")
|
|
|
|
|
support_file_path = os.path.join(one_distro_path, "support")
|
|
|
|
|
arches_file_path = os.path.join(one_distro_path, "architectures")
|
|
|
|
|
name_file_path = os.path.join(one_distro_path, "name")
|
|
|
|
|
distro_main_info = {
|
|
|
|
|
"id": distro,
|
|
|
|
|
"name": read_one_distro_config_file(name_file_path),
|
|
|
|
|
"support": read_one_distro_config_file(support_file_path),
|
|
|
|
|
"arches": split_commas_and_clean_into_list(read_one_distro_config_file(arches_file_path)),
|
|
|
|
|
"desktops": get_desktop_inventory_for_distro(distro, armbian_paths)
|
|
|
|
|
}
|
|
|
|
|
all_distros.append(distro_main_info)
|
|
|
|
|
|
|
|
|
|
return all_distros
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def armbian_get_all_boards_inventory():
|
|
|
|
|
armbian_paths = find_armbian_src_path()
|
|
|
|
|
core_boards = armbian_get_all_boards_list(armbian_paths["core_boards_path"])
|
|
|
|
|
|
|
|
|
|
# first, gather the board_info for every core board. if any fail, stop.
|
|
|
|
|
info_for_board = {}
|
|
|
|
|
for board in core_boards.keys():
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
board_info = armbian_parse_board_file_for_static_info(core_boards[board], board, "core")
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
# Core boards must have the KERNEL_TARGET defined.
|
|
|
|
|
if "BOARD_POSSIBLE_BRANCHES" not in board_info:
|
|
|
|
|
raise Exception(f"Core board '{board}' must have KERNEL_TARGET defined")
|
|
|
|
|
info_for_board[board] = board_info
|
|
|
|
|
|
|
|
|
|
# Now go for the userpatched boards. Those can be all-new, or they can be patches to existing boards.
|
|
|
|
|
if armbian_paths["has_userpatches_path"]:
|
|
|
|
|
userpatched_boards = armbian_get_all_boards_list(armbian_paths["userpatches_boards_path"])
|
|
|
|
|
for uboard_name in userpatched_boards.keys():
|
pipeline: inventory all board vars; add `not-eos-with-video`; introduce `TARGETS_FILTER_INCLUDE`
> How to use:
>
> `./compile.sh inventory` - does just the board inventory; look for output in `output/info`
>
> `./compile.sh targets-dashboard` - does inventory, targets compositing, and images info; look for output in `output/info`, read the instructions output by the command if you want to load the OpenSearch dashboards.
>
> `./compile.sh targets` - does the full targets compositing and artifacts, look for output in `output/info`
>
> If you don't have a `userpatches/targets.yaml`, _one will be provided for you_ defaulting to Jammy minimal CLI
> and Jammy xfce desktop, for all boards in all branches. You can pass filters via `TARGETS_FILTER_INCLUDE=...` to narrow.
>
- board JSON inventory:
- more generic regex parsing of variables from board files:
- all top-level (non-indented) variables are parsed and included in the JSON board inventory
- this allows us to add new variables to the board files without having to update the parser
- variables can be bare, `export` or `declare -g`, but **_must_ be quoted** (single or double) and UPPER_CASE
- some special treatment for certain variables:
- `KERNEL_TARGET` is parsed as a _comma-separated_ list of valid BRANCH'es
- `BOARD_MAINTAINER` is parsed as _space-separated_ list of valid maintainer GH usernames as `BOARD_MAINTAINERS: [...]` in the JSON
- script complains if `BOARD_MAINTAINER` is not set in core boards. Empty is still allowed.
- `HAS_VIDEO_OUTPUT="no"` causes `BOARD_HAS_VIDEO: false` in the JSON (for desktop-only inventorying, see below)
- introduce `not-eos-with-video` in `items-from-inventory` at the targets compositor
- the same as `not-eos`, but with added `BOARD_HAS_VIDEO: true` filter, see above
- introduce `TARGETS_FILTER_INCLUDE` for targets compositor
- this filters the targets _after_ compositing (but before getting image info), based on the board inventory data
- it's a comma-separated list of `key:value` pairs, which are OR-ed together
- new virtual info `BOARD_SLASH_BRANCH` post-compositing inventory for filtering of a specific BOARD/BRANCH combo (e.g. `odroidhc4/edge`)
- some interesting possible filters:
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4"`: _only_ build a single board, all branches. JIRA [AR-1806]
- `TARGETS_FILTER_INCLUDE="BOARD_SLASH_BRANCH:odroidhc4/current"`: _only_ build a single board/branch combo
- `TARGETS_FILTER_INCLUDE="BOARD:odroidhc4,BOARD:odroidn2"`: _only_ build _two_ boards, all branches.
- `TARGETS_FILTER_INCLUDE="BOARD_MAINTAINERS:rpardini"`: build all boards and branches where rpardini is a maintainer
- `TARGETS_FILTER_INCLUDE="BOARDFAMILY:rockchip64"`: build all boards and branches in the rockchip64 family
- image-info-only variables like `LINUXFAMILY` is **not** available for filtering at this stage
- rename `config/templates` `targets-all-cli.yaml` to `targets-default.yaml`
- this is used when no `userpatches/targets.yaml` is found
- new default includes all boards vs branches for non-EOS boards
- also desktop for all boards that _don't_ have `HAS_VIDEO_OUTPUT='no``
- introduce simplified `targets-dashboard` CLI:
- does only inventory, compositing, and image info, but not artifact reducing, etc.
- ignore desktop builds in the OpenSearch indexer
- update the OpenSearch Dashboards, including new information now available
- invert the logic used for `CLEAN_INFO` and `CLEAN_MATRIX`
- defaults to `yes` now, so new users/CI don't get hit by stale caches by default
- repo pipeline CLI stuff is usually run on saved/restored artifacts for `output/info`, so don't clean by default via the CLI
2023-07-25 14:50:22 +00:00
|
|
|
uboard = armbian_parse_board_file_for_static_info(userpatched_boards[uboard_name], uboard_name, "userpatched")
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
is_new_board = not (uboard_name in info_for_board)
|
|
|
|
|
if is_new_board:
|
|
|
|
|
log.debug(f"Userpatched Board {uboard_name} is new")
|
|
|
|
|
# New userpatched boards must have the KERNEL_TARGET defined.
|
|
|
|
|
if "BOARD_POSSIBLE_BRANCHES" not in uboard:
|
|
|
|
|
raise Exception(f"NEW userpatched board '{uboard_name}' must have KERNEL_TARGET defined")
|
|
|
|
|
info_for_board[uboard_name] = uboard
|
|
|
|
|
else:
|
|
|
|
|
log.debug(f"Userpatched Board {uboard_name} is already in core boards")
|
|
|
|
|
info_for_board[uboard_name] = {**info_for_board[uboard_name], **uboard}
|
|
|
|
|
|
|
|
|
|
return info_for_board
|
|
|
|
|
|
|
|
|
|
|
2023-05-04 12:35:37 +00:00
|
|
|
def map_to_armbian_params(map_params, quote_params=False) -> list[str]:
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
ret = []
|
|
|
|
|
for param in map_params:
|
|
|
|
|
ret.append(param + "=" + map_params[param])
|
2023-05-04 12:35:37 +00:00
|
|
|
if quote_params:
|
|
|
|
|
ret = ["'" + param + "'" for param in ret] # single-quote each param...
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def armbian_run_command_and_parse_json_from_stdout(exec_cmd: list[str], params: dict):
|
|
|
|
|
result = None
|
|
|
|
|
logs = []
|
|
|
|
|
try:
|
2023-05-05 12:03:18 +00:00
|
|
|
log.debug(f"Start calling Armbian command: {' '.join(exec_cmd)}")
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
result = subprocess.run(
|
|
|
|
|
exec_cmd,
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
check=True,
|
|
|
|
|
universal_newlines=False, # universal_newlines messes up bash encoding, don't use, instead decode utf8 manually;
|
|
|
|
|
bufsize=-1, # full buffering
|
|
|
|
|
# Early (pre-param-parsing) optimizations for those in Armbian bash code, so use an ENV (not PARAM)
|
|
|
|
|
env={
|
|
|
|
|
"CONFIG_DEFS_ONLY": "yes", # Dont do anything. Just output vars.
|
|
|
|
|
"ANSI_COLOR": "none", # Do not use ANSI colors in logging output, don't write to log files
|
|
|
|
|
"WRITE_EXTENSIONS_METADATA": "no", # Not interested in ext meta here
|
|
|
|
|
"ALLOW_ROOT": "yes", # We're gonna be calling it as root, so allow it @TODO not the best option
|
|
|
|
|
"PRE_PREPARED_HOST": "yes" # We're gonna be calling it as root, so allow it @TODO not the best option
|
|
|
|
|
},
|
|
|
|
|
stderr=subprocess.PIPE
|
|
|
|
|
)
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
# decode utf8 manually, universal_newlines messes up bash encoding
|
|
|
|
|
logs = parse_log_lines_from_stderr(e.stderr)
|
2023-08-23 16:28:14 +00:00
|
|
|
if e.returncode == 44:
|
|
|
|
|
# special handling for exit_with_target_not_supported_error() in armbian core.
|
|
|
|
|
log.warning(f"Skipped target: {' '.join(exec_cmd)}")
|
|
|
|
|
log.warning(f"Skipped target details 1: {'; '.join(logs[-5:])}")
|
|
|
|
|
return {"in": params, "out": {}, "logs": logs, "config_ok": False, "target_not_supported": True}
|
|
|
|
|
else:
|
|
|
|
|
log.error(f"Error calling Armbian command: {' '.join(exec_cmd)}")
|
|
|
|
|
log.error(f"Error details 1: params: {params}")
|
|
|
|
|
log.error(f"Error details 2: code: {e.returncode} - {'; '.join(logs[-5:])}")
|
|
|
|
|
return {"in": params, "out": {}, "logs": logs, "config_ok": False}
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
|
|
|
|
|
if result is not None:
|
|
|
|
|
if result.stderr:
|
|
|
|
|
logs = parse_log_lines_from_stderr(result.stderr)
|
|
|
|
|
|
|
|
|
|
# parse the result.stdout as json.
|
|
|
|
|
try:
|
|
|
|
|
parsed = json.loads(result.stdout.decode("utf8"))
|
|
|
|
|
info = {"in": params, "out": parsed, "config_ok": True}
|
|
|
|
|
info["logs"] = logs
|
|
|
|
|
return info
|
|
|
|
|
except json.decoder.JSONDecodeError as e:
|
|
|
|
|
log.error(f"Error parsing Armbian JSON: params: {params}, stderr: {'; '.join(logs[-5:])}")
|
|
|
|
|
# return {"in": params, "out": {}, "logs": logs, "config_ok": False}
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_log_lines_from_stderr(lines_stderr: str):
|
|
|
|
|
# parse list, split by newline
|
|
|
|
|
lines = lines_stderr.decode("utf8").split("\n")
|
|
|
|
|
# trim lines, remove empty ones
|
|
|
|
|
logs = [line.strip() for line in lines if line.strip()]
|
|
|
|
|
# each line, split at the first ocurrence of two colons ("::")
|
|
|
|
|
result = []
|
|
|
|
|
for line in logs:
|
|
|
|
|
line = line.strip()
|
|
|
|
|
if not line:
|
|
|
|
|
continue
|
|
|
|
|
parts = line.split("::", 1)
|
|
|
|
|
if len(parts) != 2:
|
|
|
|
|
# very probably something that leaked out of logging manager, grab it
|
|
|
|
|
result.append("[LEAKED]:" + line.strip())
|
|
|
|
|
continue
|
|
|
|
|
type = parts[0].strip()
|
|
|
|
|
msg = parts[1].strip()
|
|
|
|
|
# if type begins "err" or "warn" or "wrn":
|
|
|
|
|
if type.startswith("err") or type.startswith("warn") or type.startswith("wrn"):
|
|
|
|
|
# remove some redundant stuff we don't want
|
|
|
|
|
if ("Exiting with error " in msg) or ("please wait for cleanups to finish" in msg):
|
|
|
|
|
continue
|
|
|
|
|
result.append(f"{type}: {msg}")
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def gather_json_output_from_armbian(command: str, targets: list[dict]):
|
|
|
|
|
armbian_paths = find_armbian_src_path()
|
|
|
|
|
# now loop over gathered infos
|
|
|
|
|
every_info = []
|
|
|
|
|
use_parallel: bool = True
|
|
|
|
|
if use_parallel:
|
|
|
|
|
counter = 0
|
|
|
|
|
total = len(targets)
|
|
|
|
|
# get the number of processor cores on this machine
|
2023-08-23 16:36:17 +00:00
|
|
|
max_workers = multiprocessing.cpu_count() * 4 # use four times the number of cpu cores, that's the sweet spot
|
🔥 JSON info pipeline: v18
- pipeline: add `pipeline` context object to targets; use it to filter artifacts and images to build; warn about oci-name with multiple oci-tags
- pipeline: better error messages when info's fail; show some (out-of-order) progress messages during parallel info gathering
- pipeline: targets-compositor: add `not-eos` inventory
- TARGETS_FILENAME, log all OCI lookups
- SKIP_IMAGES
- IMAGES_ONLY_OUTDATED_ARTIFACTS
- no dash in chunk id in JSON
- pipeline: very initial chunking, using the same outputs
- pipeline: template targets, `items-from-inventory:` inventory expansion, CHECK_OCI=yes, CLEAN_MATRIX=yes, CLEAN_INFO=yes, many fixes
- cli: `inventory` / `targets` / `matrix` / `workflow`
- pipeline: workflow beginnings
- pipeline: general log cleanup + OCI stats / better miss handling
- pipeline: fixes/reorg
- pipeline: catch & log JSON parsing errors
- pipeline: gha matrix: use IMAGE_FILE_ID as job description
- pipeline (delusion): gha workflow output, based on old matrix code
- pipeline: better parsing and reporting of stderr log lines (under `ANSI_COLOR=none`)
- pipeline: mapper-oci-uptodate: use separate positive/negative cache dirs (GHA will only cache positives); cache negs for 5 minutes locally
- pipeline: output-gha-matrix artifacts + images
- pipeline: output-gha-matrix artifacts + images: "really" and fake 1-item matrix if empty
- pipeline: move files into subdir; update copyright & cleanup
- pipeline: refactor bash jsoninfo driver a bit
- pipeline: outdated-artifact-image-reducer
- pipeline: introduce `target_id` at the compositor, aggregate it at the reducer, carry it over in the artifact info mapper
- pipeline: mapper-oci-uptodate
- pipeline: info-gatherer-artifact, with PRE_PREPARED_HOST
- pipeline: refactor/rename info-gatherer-image.py
- pipeline: beginnings
2022-12-30 11:20:53 +00:00
|
|
|
log.info(f"Using {max_workers} workers for parallel processing.")
|
|
|
|
|
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
|
|
|
|
every_future = []
|
|
|
|
|
for target in targets:
|
|
|
|
|
counter += 1
|
|
|
|
|
future = executor.submit(get_info_for_one_build, armbian_paths, command, target, counter, total)
|
|
|
|
|
every_future.append(future)
|
|
|
|
|
|
|
|
|
|
log.info(f"Submitted {len(every_future)} jobs to the parallel executor. Waiting for them to finish...")
|
|
|
|
|
executor.shutdown(wait=True)
|
|
|
|
|
log.info(f"All jobs finished!")
|
|
|
|
|
|
|
|
|
|
for future in every_future:
|
|
|
|
|
info = future.result()
|
|
|
|
|
if info is not None:
|
|
|
|
|
every_info.append(info)
|
|
|
|
|
else:
|
|
|
|
|
for target in targets:
|
|
|
|
|
info = get_info_for_one_build(armbian_paths, command, target)
|
|
|
|
|
if info is not None:
|
|
|
|
|
every_info.append(info)
|
|
|
|
|
|
|
|
|
|
return every_info
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_info_for_one_build(armbian_paths: dict[str, str], command: str, params: dict, counter: int, total: int):
|
|
|
|
|
try:
|
|
|
|
|
try:
|
|
|
|
|
sh: str = armbian_paths["compile_sh_full_path"]
|
|
|
|
|
cmds: list[str] = ([sh] + [command] + map_to_armbian_params(params["vars"]) + params["configs"])
|
|
|
|
|
parsed = armbian_run_command_and_parse_json_from_stdout(cmds, params)
|
|
|
|
|
return parsed
|
|
|
|
|
except BaseException as e:
|
|
|
|
|
log.error(f"Failed get info for build '{command}' '{params}': '{e}'", exc_info=True)
|
|
|
|
|
return {"ARMBIAN_CONFIG_OK": False, "PYTHON_INFO_ERROR": "{}".format(e), "INPUT": params}
|
|
|
|
|
finally:
|
|
|
|
|
if counter % 10 == 0:
|
|
|
|
|
log.info(f"Processed {counter} / {total} targets.")
|