diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 5e192fe6..da19c418 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -17,37 +17,52 @@ concurrency: cancel-in-progress: true jobs: - build: - runs-on: [self-hosted, spread-enabled] + build_release: + runs-on: ubuntu-latest + outputs: + snap_name: ${{ steps.release.outputs.snap_name }} + snap_artifact: ${{ steps.release.outputs.snap_artifact }} steps: - - name: Cleanup job workspace - id: cleanup-job-workspace - run: | - rm -rf "${{ github.workspace }}" - mkdir "${{ github.workspace }}" - - uses: actions/checkout@v4 - - - name: Build snap - run: | - spread -artifacts=./artifacts google-nested:tests/spread/build/ - find ./artifacts -type f -name "*.artifact" -exec cp {} "${{ github.workspace }}" \; - - - uses: actions/upload-artifact@v4 + - name: Build and release to edge channel + id: release + env: + LP_CREDENTIALS: ${{ secrets.LP_CREDENTIALS }} + uses: snapcore/system-snaps-cicd-tools/action-rebuild-base@main with: - name: core-snap - path: "${{ github.workspace }}/core22.artifact" - - - name: Discard spread workers - if: always() - run: | - shopt -s nullglob - for r in .spread-reuse.*.yaml; do - spread -discard -reuse-pid="$(echo "$r" | grep -o -E '[0-9]+')" - done + branch: ${{ github.event_name == 'pull_request' && github.ref || github.ref_name }} + publish: false + build_fips_release: + runs-on: ubuntu-latest + outputs: + snap_name: ${{ steps.release.outputs.snap_name }} + snap_artifact: ${{ steps.release.outputs.snap_artifact }} + steps: + - name: Build and release to edge channel + id: release + env: + LP_CREDENTIALS: ${{ secrets.LP_CREDENTIALS }} + uses: snapcore/system-snaps-cicd-tools/action-rebuild-base@main + with: + branch: ${{ github.event_name == 'pull_request' && github.ref || github.ref_name }} + fips: true + publish: false tests-main: runs-on: [self-hosted, spread-enabled] - needs: build + needs: [build_release, build_fips_release] + strategy: + fail-fast: false + matrix: + include: + - variant: standard + artifact_name: ${{ needs.build_release.outputs.snap_artifact }} + core_variant: '' + - variant: fips + artifact_name: ${{ needs.build_fips_release.outputs.snap_artifact }} + core_variant: fips + name: tests-main (${{ matrix.variant }}) + env: + CORE_VARIANT: ${{ matrix.core_variant }} steps: - name: Cleanup job workspace id: cleanup-job-workspace @@ -57,8 +72,8 @@ jobs: - uses: actions/checkout@v4 - uses: actions/download-artifact@v4 with: - name: core-snap - path: "${{ github.workspace }}/core22.artifact" + name: ${{ matrix.artifact_name }} + path: "${{ github.workspace }}" - name: Run tests uses: ./.github/actions/run-spread-tests @@ -72,9 +87,21 @@ jobs: for r in .spread-reuse.*.yaml; do spread -discard -reuse-pid="$(echo "$r" | grep -o -E '[0-9]+')" done + tests-snapd: runs-on: ubuntu-latest - needs: build + needs: [build_release, build_fips_release] + strategy: + fail-fast: false + matrix: + include: + - variant: standard + artifact_name: ${{ needs.build_release.outputs.snap_artifact }} + snap_name: ${{ needs.build_release.outputs.snap_name }} + - variant: fips + artifact_name: ${{ needs.build_fips_release.outputs.snap_artifact }} + snap_name: ${{ needs.build_fips_release.outputs.snap_name }} + name: tests-snapd (${{ matrix.variant }}) steps: - name: Cleanup job workspace id: cleanup-job-workspace @@ -91,7 +118,8 @@ jobs: path: snapd - uses: actions/download-artifact@v4 with: - name: core-snap + name: ${{ matrix.artifact_name }} + path: "${{ github.workspace }}" - name: Install spread run: curl -s https://storage.googleapis.com/snapd-spread-tests/spread/spread-amd64.tar.gz | sudo tar xzv -C /usr/bin @@ -101,6 +129,13 @@ jobs: run: | . "core-base/tests/lib/prepare-utils.sh" + # determine variant, but for standard builds we want to pass + # an empty string to the utils functions + variant="${{ matrix.variant }}" + if [ "$variant" = "standard" ]; then + variant="" + fi + echo "************* INSTALLING DEPS *************" install_core22_deps @@ -111,9 +146,9 @@ jobs: prepare_core22_cloudinit echo "************* BUILDING CORE22 IMAGE *************" - uc_snap="$(get_core_snap_name)" - mv core22.artifact "$uc_snap" - build_core22_image + uc_snap="$(get_core_snap_name "$variant")" + mv "${{ matrix.snap_name }}" "$uc_snap" + build_core22_image "$variant" echo "************* STARTING CORE22 VM *************" start_snapd_core_vm '${{ github.workspace }}' diff --git a/spread.yaml b/spread.yaml index 40dcd7d4..676eee5a 100644 --- a/spread.yaml +++ b/spread.yaml @@ -5,6 +5,7 @@ environment: PROJECT_PATH: $SETUPDIR PATH: $PATH:$PROJECT_PATH/tests/bin TESTSLIB: $PROJECT_PATH/tests/lib + BUILD_VARIANT: '$(HOST: echo "${CORE_VARIANT:-}")' SNAP_BRANCH: "edge" # stable/edge/beta UC_VERSION: 22 # TODO: are these vars needed still? @@ -132,7 +133,7 @@ suites: prepare: | # prepare common uc image setup by repacking snaps, etc - "$TESTSLIB"/prepare-uc.sh + "$TESTSLIB"/prepare-uc.sh "$BUILD_VARIANT" restore-each: | # delete the nested VM image after each task finishes so we don't use too much # disk space diff --git a/tests/lib/prepare-uc.sh b/tests/lib/prepare-uc.sh index bcf74bac..222c1314 100755 --- a/tests/lib/prepare-uc.sh +++ b/tests/lib/prepare-uc.sh @@ -3,6 +3,8 @@ set -e set -x +BUILD_VARIANT="${1:-}" + # include auxiliary functions from this script . "$TESTSLIB/prepare-utils.sh" @@ -116,16 +118,16 @@ snap pack --filename=upstream-snapd.snap "$snapddir" rm -r $snapddir # build the core22 snap if it has not been provided to us by CI -uc_snap="$(get_core_snap_name)" +uc_snap="$(get_core_snap_name "$BUILD_VARIANT")" if [ ! -f "$PROJECT_PATH/core${UC_VERSION}.artifact" ]; then - build_core22_snap "$PROJECT_PATH" + build_core22_snap "$PROJECT_PATH" "$BUILD_VARIANT" else # use provided core22 snap cp "$PROJECT_PATH/core${UC_VERSION}.artifact" "$uc_snap" fi # finally build the uc image -build_core22_image +build_core22_image "$BUILD_VARIANT" # setup some data we will inject into ubuntu-seed partition of the image above # that snapd.spread-tests-run-mode-tweaks.service will ingest diff --git a/tests/lib/prepare-utils.sh b/tests/lib/prepare-utils.sh index 7dc24fdd..f22185a7 100644 --- a/tests/lib/prepare-utils.sh +++ b/tests/lib/prepare-utils.sh @@ -115,8 +115,14 @@ start_snapd_core_vm() { } get_core_snap_name() { + local variant="${2:-}" + printf -v date '%(%Y%m%d)T' -1 - echo "core22_${date}_amd64.snap" + if [ "$variant" = "fips" ]; then + echo "core22-fips_${date}_amd64.snap" + else + echo "core22_${date}_amd64.snap" + fi } install_core22_deps() { @@ -191,10 +197,18 @@ prepare_core22_cloudinit() { build_core22_snap() { local project_dir="$1" local current_dir="$(pwd)" + local variant="${2:-}" # run snapcraft ( cd "$project_dir" + + # if it's the fips variant, rename the remote url to trigger + # the fips build + if [ "$variant" = "fips" ]; then + git remote set-url origin "$(git remote get-url origin | sed 's/$/-fips/')" + fi + sudo snapcraft --destructive-mode --verbose # copy the snap to the calling directory if they are not the same @@ -205,7 +219,8 @@ build_core22_snap() { } build_core22_image() { - local core_snap_name="$(get_core_snap_name)" + local variant="${1:-}" + local core_snap_name="$(get_core_snap_name "$variant")" ubuntu-image snap \ -i 8G \ --snap $core_snap_name \ diff --git a/tests/spread/build/build-fips-snap/task.yaml b/tests/spread/build/build-fips-snap/task.yaml new file mode 100644 index 00000000..dbf08c13 --- /dev/null +++ b/tests/spread/build/build-fips-snap/task.yaml @@ -0,0 +1,21 @@ +summary: Builds the core snap +manual: true + +artifacts: + - core22-fips.artifact + +prepare: | + # for various utilities + . "$TESTSLIB/prepare-utils.sh" + + # install dependencies + install_core22_deps + +execute: | + # for various utilities + . "$TESTSLIB/prepare-utils.sh" + + build_core22_snap "$PROJECT_PATH" "fips" + + SNAP_NAME="$(get_core_snap_name "fips")" + cp "$PROJECT_PATH/$SNAP_NAME" "core22-fips.artifact" diff --git a/tools/generate-changelog.py b/tools/generate-changelog.py index ca3edbfc..960b8b97 100755 --- a/tools/generate-changelog.py +++ b/tools/generate-changelog.py @@ -27,9 +27,9 @@ import debian.debian_support import gzip import os -import requests -import re import subprocess +import re +import requests import sys import yaml from collections import namedtuple @@ -40,13 +40,20 @@ # keep the list short to not increase the time it takes # to generate changelogs pkg_allowed_list = [ - 'apt', # is removed during hook - 'libapt-pkg6.0', # is removed as well - 'base-files', # unstable on local builds - 'ca-certificates', # unstable on local builds - 'distro-info-data' # unstable on local builds + 'dpkg', # is removed during hook + 'openssl' # contains a symlink which is broken currently ] +# List of packages with no valid changelog. We only have gnutls-bin for the +# moment. It does not have a valid changelog as +# /usr/share/doc/gnutls-bin/changelog.Debian.gz is a symlink to +# ../libgnutls-dane0/changelog.Debian.gz which is in turn a symlink to +# ../libgnutls30/changelog.Debian.gz. However, libgnutls-dane0 is not in the +# base (removed by 400-trim-pkcs-11.chroot) so the chain is broken. This is ok +# as anyway we will get the changes from libgnutls30. +pkg_no_changelog = ['gnutls-bin'] + + # Returns a dictionary from package name to version, using # the packages section. # manifest_p: path to manifest to load @@ -62,13 +69,6 @@ def packages_from_manifest(manifest_p): return pkg_dict -def is_fips(s): - m = re.search("[+~][Ff]ips[1-2\.]{1,3}", s) - if m is None: - return False - return True - - def package_name(pkg): t = pkg.split(':') return t[0] @@ -81,7 +81,7 @@ def get_changelog_from_file(docs_d, pkg): with gzip.open(chl_deb_path) as chl_fh: return chl_fh.read().decode('utf-8') elif os.path.exists(chl_path): - with gzip.open(chl_deb_path) as chl_fh: + with gzip.open(chl_path) as chl_fh: return chl_fh.read().decode('utf-8') else: raise FileNotFoundError(f"no supported changelog found for package {pkg}") @@ -89,12 +89,12 @@ def get_changelog_from_file(docs_d, pkg): def get_changelog_from_url(pkg, new_v, on_lp): url = 'https://changelogs.ubuntu.com/changelogs/binary/' - + print(f"failed to resolve changelog for {pkg} locally, downloading from official repo") safe_name = package_name(pkg) if not on_lp and safe_name not in pkg_allowed_list: raise Exception(f"{pkg} has not been whitelisted for changelog retrieval") - + if safe_name.startswith('lib'): url += safe_name[0:4] else: @@ -102,12 +102,21 @@ def get_changelog_from_url(pkg, new_v, on_lp): url += '/' + safe_name + '/' + new_v + '/changelog' changelog_r = requests.get(url) if changelog_r.status_code != requests.codes.ok: - raise Exception('No changelog found in ' + url + ' - status:' + - str(changelog_r.status_code)) + raise Exception(f'No changelog found in {url} - status: {changelog_r.status_code}') return changelog_r.text +# Exception thrown for packages with no local or remote changelog +class PackageNoChangelog(Exception): + pass + + +# Exception thrown for packages for which we do not find a previous change +class NoOldChange(Exception): + pass + + # Gets difference in changelog between old and new versions # Returns source package and the differences def get_changes_for_version(docs_d, pkg, old_v, new_v, indent, on_lp): @@ -117,11 +126,8 @@ def get_changes_for_version(docs_d, pkg, old_v, new_v, indent, on_lp): try: changelog = get_changelog_from_file(docs_d, pkg) except Exception: - # If the package is coming from the fips PPA, do not attempt - # to download from regular archive. - if is_fips(new_v): - print(f"failed to resolve FIPS changelog for {pkg}/{new_v}") - raise KeyError + if re.match(r'.*\+esm[0-9]*$', new_v) or package_name(pkg) in pkg_no_changelog: + raise PackageNoChangelog(f'package {pkg} does not have changelog') changelog = get_changelog_from_url(pkg, new_v, on_lp) source_pkg = changelog[0:changelog.find(' ')] @@ -147,7 +153,10 @@ def get_changes_for_version(docs_d, pkg, old_v, new_v, indent, on_lp): change_chunk += indent + line + '\n' if not found_version: - raise EOFError(f"{old_change_start} was not found in the changelog, aborting") + # It can happen if a binary package changes the source package it came + # from, for instance this has been seen for libatomic1 that was built + # from gcc-15 to gcc-16. + raise NoOldChange(f"{old_change_start} for {pkg} was not found in the changelog") return source_pkg, change_chunk @@ -167,24 +176,30 @@ def compare_manifests(old_manifest_p, new_manifest_p, docs_d, on_lp): try: old_v = old_packages[pkg] if old_v != new_v: - src, pkg_change = get_changes_for_version(docs_d, pkg, old_v, - new_v, ' ', on_lp) - if src not in src_pkgs: - src_pkgs[src] = SrcPkgData(old_v, new_v, pkg_change, [pkg]) - else: - src_pkgs[src].debs.append(pkg) + try: + src, pkg_change = get_changes_for_version(docs_d, pkg, old_v, + new_v, ' ', on_lp) + if src not in src_pkgs: + src_pkgs[src] = SrcPkgData(old_v, new_v, pkg_change, [pkg]) + else: + src_pkgs[src].debs.append(pkg) + except PackageNoChangelog as e: + print(e) + except NoOldChange as e: + print(e) + changes += f'{pkg} ({new_v}): new primed package\n\n' except KeyError: - changes += pkg + ' (' + new_v + '): new primed package\n\n' + changes += f'{pkg} ({new_v}): new primed package\n\n' for src_pkg, pkg_data in sorted(src_pkgs.items()): changes += ', '.join(pkg_data.debs) - changes += ' (built from ' + src_pkg + ') updated from ' - changes += pkg_data.old_v + ' to ' + pkg_data.new_v + ':\n\n' + changes += f' (built from {src_pkg}) updated from ' + changes += f'{pkg_data.old_v} to {pkg_data.new_v}:\n\n' changes += pkg_data.changes for pkg, old_v in sorted(old_packages.items()): if pkg not in new_packages: - changes += pkg + ': not primed anymore\n\n' + changes += f'{pkg}: not primed anymore\n\n' return changes @@ -193,7 +208,7 @@ def find_commit_in_changelog(clog_p) -> str: if clog_p == "" or not os.path.exists(clog_p): print(f"No previous changelog existed at {clog_p}, skipping changelog generation for local repo") return "" - + # expect commit in the first line with open(clog_p, "r") as f: line = f.readline().strip() @@ -217,12 +232,12 @@ def read_remote_git_url() -> str: def log_between_commits(name, start, end): try: return subprocess.check_output(['git', 'shortlog', '--pretty=short', f'{start}..{end}']).decode() - except: + except Exception: # if there is no path from start..end then this might fail, however this # should only happen if the branch has diverged so much that the previous # release commit does not exist in the current fork. In this case let us # notify that we could not generate the changelog - print(f"Failed to run 'git log' for the current repo starting at commit {start}, has branch diverged to much?") + print(f"Failed to run 'git log' for the current repo starting at commit {start}, has branch diverged too much?") return f'No detected changes for the {name} snap\n\n' @@ -250,7 +265,7 @@ def main(): # add a header that helps us audit where the current build is # sourced from. now = datetime.now() - changes = f'{now.strftime("%d/%m/%Y")}, commit {read_remote_git_url()}/tree/{ccommit}\n\n' + changes = f"{now.strftime("%d/%m/%Y")}, commit {read_remote_git_url()}/tree/{ccommit}\n\n" changes += f'[ Changes in the {args.name} snap ]\n\n' # Is there a previous commit? Then we get a log between them @@ -272,7 +287,6 @@ def main(): with open(old_changelog, "r") as f: changes += f.read() - # write the changelog with open(new_changelog, "w") as f: f.write(changes) return 0