Files
hassio-addons/.github/workflows/onpush_builder.yaml

436 lines
15 KiB
YAML

# yamllint disable rule:line-length
---
name: Builder
on:
workflow_call:
push:
branches:
- master
paths:
- "**/config.*"
jobs:
detect-changed-addons:
if: >-
${{
github.repository_owner == 'alexbelgium' &&
(github.event_name != 'push' || !contains(github.event.head_commit.message, 'nobuild'))
}}
runs-on: ubuntu-latest
outputs:
changedAddons: ${{ steps.find_addons.outputs.changed_addons }}
steps:
- name: Checkout repo
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Find add-on directories to process
id: find_addons
run: |
set -euo pipefail
if [ "${{ github.event_name }}" = "push" ]; then
before="${{ github.event.before }}"
if [ -n "$before" ] && [ "$before" != "0000000000000000000000000000000000000000" ]; then
git fetch --no-tags --depth=1 origin "$before" || true
changed_config_files=$(git diff --name-only "$before" "${{ github.sha }}" | grep -E '^[^/]+/config\.(json|ya?ml)$' || true)
else
changed_config_files=$(git diff-tree --no-commit-id --name-only -r "${{ github.sha }}" | grep -E '^[^/]+/config\.(json|ya?ml)$' || true)
fi
echo "Changed config files:"
printf '%s\n' "$changed_config_files"
changed_addons=$(printf '%s\n' "$changed_config_files" | awk -F/ 'NF { print $1 }' | sort -u | jq -R -s -c 'split("\n") | map(select(length > 0))')
else
changed_addons=$(find . -maxdepth 2 \( -name 'config.json' -o -name 'config.yaml' -o -name 'config.yml' \) -printf '%h\n' | sed 's#^\./##' | sort -u | jq -R -s -c 'split("\n") | map(select(length > 0))')
fi
echo "Changed add-ons: $changed_addons"
echo "changed_addons=${changed_addons:-[]}" >> "$GITHUB_OUTPUT"
prebuild-sanitize:
if: ${{ needs.detect-changed-addons.outputs.changedAddons != '' && needs.detect-changed-addons.outputs.changedAddons != '[]' }}
needs: detect-changed-addons
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Sanitize text files and script permissions
env:
ADDONS_JSON: ${{ needs.detect-changed-addons.outputs.changedAddons }}
run: |
set -euo pipefail
UNICODE_SPACES_REGEX=$'[\u00A0\u2002\u2003\u2007\u2008\u2009\u202F\u205F\u3000\u200B]'
mapfile -t addons < <(jq -r '.[]' <<<"$ADDONS_JSON")
for addon in "${addons[@]}"; do
echo "Sanitizing ${addon}"
cd "$GITHUB_WORKSPACE/$addon"
while IFS= read -r -d '' file; do
mime_type=$(file --mime-type -b "$file")
[[ "$mime_type" != text/* ]] && continue
perl -i -CSD -pe "
s/${UNICODE_SPACES_REGEX}/ /g;
s/\r$//;
" "$file"
done < <(find . -type f -print0)
find . -type f -iname '*.sh' -exec chmod u+x {} \;
done
- name: Assert no mixed CRLF/LF remain
uses: ymwymw/check-mixed-line-endings@v2
- name: Commit sanitize changes
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
uses: EndBug/add-and-commit@v10
with:
commit: -u
message: "GitHub bot: sanitize (spaces + LF endings) & chmod [nobuild]"
default_author: github_actions
pull: --rebase --autostash
fetch: --tags --force
lint_config:
if: ${{ needs.detect-changed-addons.outputs.changedAddons != '' && needs.detect-changed-addons.outputs.changedAddons != '[]' }}
needs: [detect-changed-addons, prebuild-sanitize]
runs-on: ubuntu-latest
continue-on-error: true
strategy:
matrix:
addon: ${{ fromJSON(needs.detect-changed-addons.outputs.changedAddons) }}
steps:
- uses: actions/checkout@v6
- name: Run Home Assistant Add-on Lint
uses: frenck/action-addon-linter@v2
with:
path: "./${{ matrix.addon }}"
build:
if: ${{ needs.detect-changed-addons.outputs.changedAddons != '' && needs.detect-changed-addons.outputs.changedAddons != '[]' }}
needs: [detect-changed-addons, lint_config]
runs-on: ${{ matrix.runner }}
name: Build ${{ matrix.arch }} ${{ matrix.addon }} add-on
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
addon: ${{ fromJSON(needs.detect-changed-addons.outputs.changedAddons) }}
arch: [amd64, aarch64]
include:
- arch: amd64
runner: ubuntu-24.04
- arch: aarch64
runner: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- name: Resolve symlinks in repository copy
run: |
set -euo pipefail
find . -type l | while read -r link; do
target=$(readlink -f "$link" || true)
if [ -z "$target" ]; then
echo "Skipping broken symlink: $link"
continue
fi
rm "$link"
if [ -d "$target" ]; then
mkdir -p "$link"
cp -a "$target/." "$link/"
else
cp "$target" "$link"
fi
done
- name: Copy templates into addon build context
env:
ADDON: ${{ matrix.addon }}
run: |
set -euo pipefail
TEMPLATES_DIR=".templates"
ADDON_DIR="./$ADDON"
# Copy all template scripts that Dockerfiles might reference
for script in ha_automodules.sh ha_autoapps.sh ha_entrypoint.sh bashio-standalone.sh ha_lsio.sh; do
if [ -f "$TEMPLATES_DIR/$script" ]; then
cp "$TEMPLATES_DIR/$script" "$ADDON_DIR/$script"
fi
done
- name: Install PyYAML
run: python3 -m pip install --disable-pip-version-check pyyaml
- name: Read add-on metadata
id: info
env:
ADDON: ${{ matrix.addon }}
ARCH: ${{ matrix.arch }}
REPOSITORY: ${{ github.repository }}
GITHUB_SHA_VALUE: ${{ github.sha }}
run: |
set -euo pipefail
python3 - <<'PY'
import json
import os
from datetime import datetime, timezone
from pathlib import Path
import yaml
addon = os.environ["ADDON"]
arch = os.environ["ARCH"]
repository = os.environ["REPOSITORY"]
github_sha = os.environ["GITHUB_SHA_VALUE"]
addon_dir = Path(addon)
output_path = Path(os.environ["GITHUB_OUTPUT"])
def load_file(path: Path):
text = path.read_text(encoding="utf-8")
if path.suffix == ".json":
return json.loads(text)
return yaml.safe_load(text) or {}
def first_existing(*names: str):
for name in names:
path = addon_dir / name
if path.exists():
return path
return None
config_path = first_existing("config.json", "config.yaml", "config.yml")
if config_path is None:
raise SystemExit(f"No config file found in {addon}")
build_path = first_existing("build.json", "build.yaml", "build.yml")
config = load_file(config_path)
build = load_file(build_path) if build_path else {}
build_from_map = build.get("build_from") or {}
arch_list = list(build_from_map.keys()) if build_from_map else list(config.get("arch") or [])
build_arch = arch in arch_list
image_raw = str(config.get("image") or "").strip()
image = image_raw.replace("{arch}", arch)
version = str(config.get("version") or "").strip()
name = str(config.get("name") or "").strip()
description = str(config.get("description") or "").strip()
url = str(config.get("url") or "").strip()
build_from = str(build_from_map.get(arch) or "").strip()
build_date = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
dockerfile = addon_dir / f"Dockerfile.{arch}"
if dockerfile.exists():
dockerfile_path = str(dockerfile)
has_dockerfile = True
else:
dockerfile = addon_dir / "Dockerfile"
dockerfile_path = str(dockerfile)
has_dockerfile = dockerfile.exists()
labels = [
f"io.hass.name={name}",
f"io.hass.description={description}",
"io.hass.type=addon",
]
if url:
labels.append(f"io.hass.url={url}")
build_args = [
f"BUILD_ARCH={arch}",
f"BUILD_VERSION={version}",
f"BUILD_DATE={build_date}",
f"BUILD_DESCRIPTION={description}",
f"BUILD_NAME={name}",
f"BUILD_REF={github_sha}",
f"BUILD_REPOSITORY={repository}",
]
if build_from:
build_args.insert(2, f"BUILD_FROM={build_from}")
def write_output(key: str, value: str):
with output_path.open("a", encoding="utf-8") as fh:
print(f"{key}<<__EOF__", file=fh)
print(value, file=fh)
print("__EOF__", file=fh)
write_output("architectures", json.dumps(arch_list))
write_output("build_arch", "true" if build_arch else "false")
write_output("has_dockerfile", "true" if has_dockerfile else "false")
write_output("dockerfile", dockerfile_path)
write_output("image", image)
write_output("version", version)
write_output("name", name)
write_output("description", description)
write_output("url", url)
write_output("build_from", build_from)
write_output("build_date", build_date)
write_output("labels", "\n".join(labels))
write_output("build_args", "\n".join(build_args))
PY
- name: Explain skipped builds
if: steps.info.outputs.build_arch != 'true' || steps.info.outputs.has_dockerfile != 'true'
run: |
if [ "${{ steps.info.outputs.has_dockerfile }}" != 'true' ]; then
echo "No Dockerfile or Dockerfile.${{ matrix.arch }} found in ${{ matrix.addon }}, skipping build."
elif [ "${{ steps.info.outputs.build_arch }}" != 'true' ]; then
echo "${{ matrix.arch }} is not a valid architecture for ${{ matrix.addon }}, skipping build."
fi
- name: Build ${{ matrix.addon }} add-on
if: steps.info.outputs.build_arch == 'true' && steps.info.outputs.has_dockerfile == 'true'
uses: home-assistant/builder/actions/build-image@2026.03.2
with:
arch: ${{ matrix.arch }}
cache-gha-scope: ${{ matrix.addon }}-${{ matrix.arch }}
context: ./${{ matrix.addon }}
file: ${{ steps.info.outputs.dockerfile }}
image: ${{ steps.info.outputs.image }}
image-tags: |
${{ steps.info.outputs.version }}
latest
version: ${{ steps.info.outputs.version }}
push: "true"
cosign: "false"
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
labels: ${{ steps.info.outputs.labels }}
build-args: ${{ steps.info.outputs.build_args }}
make-changelog:
if: >-
${{
github.event_name == 'push' &&
github.ref == 'refs/heads/master' &&
needs.detect-changed-addons.outputs.changedAddons != '' &&
needs.detect-changed-addons.outputs.changedAddons != '[]'
}}
needs: [detect-changed-addons, build]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Update changelog for minor versions
env:
ADDONS_JSON: ${{ needs.detect-changed-addons.outputs.changedAddons }}
run: |
set -euo pipefail
mapfile -t addons < <(jq -r '.[]' <<<"$ADDONS_JSON")
for addon in "${addons[@]}"; do
echo "Updating changelog for ${addon}"
cd "$GITHUB_WORKSPACE/$addon"
if [ -f config.yaml ]; then
version=$(sed -n 's/^version:[[:space:]]*//p' config.yaml | head -n 1)
elif [ -f config.yml ]; then
version=$(sed -n 's/^version:[[:space:]]*//p' config.yml | head -n 1)
elif [ -f config.json ]; then
version=$(sed -n 's/.*"version"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p' config.json | head -n 1)
else
echo "No config file found in ${addon}" >&2
exit 1
fi
version=${version//\"/}
version=${version//\'/}
version=$(echo "$version" | xargs)
if [[ "$version" == *test* ]]; then
continue
fi
touch CHANGELOG.md
if ! grep -q "^## ${version} (" CHANGELOG.md; then
first_line=$(sed -n '/./p' CHANGELOG.md | head -n 1 || true)
if [[ -n "$first_line" && "$first_line" != -* ]]; then
sed -i '1i\- Minor bugs fixed' CHANGELOG.md
elif [[ -z "$first_line" ]]; then
printf '%s\n' '- Minor bugs fixed' > CHANGELOG.md
fi
sed -i "1i\## ${version} ($(date '+%d-%m-%Y'))" CHANGELOG.md
fi
done
- name: Commit changelog changes
uses: EndBug/add-and-commit@v10
with:
commit: -u
message: "GitHub bot: changelog [nobuild]"
default_author: github_actions
pull: --rebase --autostash
fetch: --force
push: --force
revert-on-failure:
if: >-
${{
failure() &&
github.event_name == 'push' &&
github.ref == 'refs/heads/master' &&
github.repository_owner == 'alexbelgium'
}}
needs: [detect-changed-addons, prebuild-sanitize, lint_config, build]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout repo
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Revert commits from this failed push
run: |
set -euo pipefail
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
git fetch origin
git checkout master
git pull --ff-only origin master
before="${{ github.event.before }}"
if [ -n "$before" ] && [ "$before" != "0000000000000000000000000000000000000000" ]; then
mapfile -t commits < <(git rev-list "${before}..HEAD")
else
commits=("${{ github.sha }}")
fi
if [ "${#commits[@]}" -eq 0 ]; then
echo "Nothing to revert."
exit 0
fi
for commit in "${commits[@]}"; do
git revert --no-edit "$commit"
done
git push origin HEAD:master