Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 90 additions & 0 deletions .github/actions/aws_s3_helper/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
name: AWS S3 Helper
description: Upload and download files from AWS S3

inputs:
s3_bucket:
description: S3 Bucket Name
required: true
local_file:
description: Local file paths
required: false
default: ../artifacts/file_list.txt
download_file:
description: Download file paths
required: false
default: ''
mode:
description: Mode of operation (upload/download)
required: true
default: single-upload

outputs:
presigned_url:
description: Pre-signed URL for the uploaded file
value: ${{ steps.sync-data.outputs.presigned_url }}

runs:
using: "composite"
steps:
- name: Sync Data
id: sync-data
shell: bash
env:
UPLOAD_LOCATION: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.workflow }}/${{ github.head_ref != '' && github.head_ref || github.run_id }}/
run: |
echo "::group::$(printf '__________ %-100s' 'Process' | tr ' ' _)"
case "${{ inputs.mode }}" in
multi-upload)
echo "Uploading files to S3 bucket..."
first_line=true
# Start the JSON object
echo "{" > ${{ github.workspace }}/presigned_urls.json
while IFS= read -r file; do
if [ -f "$file" ]; then
echo "Uploading $file..."
aws s3 cp "$file" s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}
echo "Uploaded $file to s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}"
echo "Creating Pre-signed URL for $file..."
filename=$(basename "$file")
presigned_url=$(aws s3 presign s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}$filename --expires-in 3600)
if [ "$first_line" = true ]; then
first_line=false
else
echo "," >> ${{ github.workspace }}/presigned_urls.json
fi
# Append the pre-signed URL to the file
echo " \"${file}\": \"${presigned_url}\"" >> ${{ github.workspace }}/presigned_urls.json
echo "Pre-signed URL for $file: $presigned_url"
else
echo "Warning: $file does not exist or is not a regular file."
fi
done < "${{ inputs.local_file }}"
# Close the JSON object
echo "}" >> ${{ github.workspace }}/presigned_urls.json
;;
single-upload)
echo "Uploading single file to S3 bucket..."
aws s3 cp "${{ inputs.local_file }}" s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}
echo "Uploaded ${{ inputs.local_file }} to s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}"
echo "Creating Pre-signed URL for ${{ inputs.local_file }}..."
presigned_url=$(aws s3 presign s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}${{ inputs.local_file }} --expires-in 3600)
echo "presigned_url=${presigned_url}" >> "$GITHUB_OUTPUT"
;;
download)
#Download The required file from s3
echo "Downloading files from S3 bucket..."
aws s3 sync s3://${{ inputs.s3_bucket }}/${{ inputs.download_file }} .
;;
*)
echo "Invalid mode. Use 'upload' or 'download'."
exit 1
;;
esac

- name: Upload artifacts
if: ${{ inputs.mode == 'multi-upload' }}
uses: actions/upload-artifact@v4
with:
name: presigned_urls.json
path: ${{ github.workspace }}/presigned_urls.json
retention-days: 1
37 changes: 37 additions & 0 deletions .github/actions/build/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
name: Build workspace
description: Build workspace

inputs:
docker_image:
description: Docker image
required: true
default: kmake-image:latest

runs:
using: "composite"
steps:
- name: Download artifacts
shell: bash
run: |
mkdir -p ../artifacts && \
wget -O ../artifacts/ramdisk.gz https://snapshots.linaro.org/member-builds/qcomlt/testimages/arm64/1379/initramfs-test-image-qemuarm64-20230321073831-1379.rootfs.cpio.gz && \
wget -O ../artifacts/systemd-boot-efi.deb http://ports.ubuntu.com/pool/universe/s/systemd/systemd-boot-efi_255.4-1ubuntu8_arm64.deb && \
dpkg-deb -xv ../artifacts/systemd-boot-efi.deb ../artifacts/systemd

- name: Make
shell: bash
run: |
docker run -i --rm \
--user $(id -u):$(id -g) \
--workdir="$PWD" \
-v "$(dirname $PWD)":"$(dirname $PWD)" \
${{ inputs.docker_image }} bash -c "
make O=../kobj defconfig
make O=../kobj -j$(nproc)
make O=../kobj -j$(nproc) dir-pkg INSTALL_MOD_STRIP=1
"

- name: Package DLKM into ramdisk
shell: bash
run: |
(cd ../kobj/tar-install ; find lib/modules | cpio -o -H newc -R +0:+0 | gzip -9 >> ../../artifacts/ramdisk.gz)
154 changes: 154 additions & 0 deletions .github/actions/lava_job_render/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
name: Test Action
inputs:
docker_image:
description: Docker image
required: true
default: kmake-image:latest

runs:
using: "composite"
steps:
- name: Process presigned_urls.json
id: process_urls
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const p = require('path');
// Helper function to find URL by filename
function findUrlByFilename(filename) {
for (const [path, url] of Object.entries(data)) {
if (path.endsWith(filename)) {
return url;
}
}
return null;
}
const filePath = p.join(process.env.GITHUB_WORKSPACE, 'presigned_urls.json');
if (fs.existsSync(filePath)) {
console.log("File exists");
} else {
console.log("File does not exist");
core.setFailed(`File not found: ${filePath}`);
}
// Read the JSON file
const data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
// Extract URLs into variables
const modulesTarUrl = findUrlByFilename('modules.tar.xz');
const imageUrl = findUrlByFilename('Image');
const vmlinuxUrl = findUrlByFilename('vmlinux');
const dtbUrl = findUrlByFilename('qcs6490-rb3gen2.dtb');
// Set outputs
core.setOutput('modules_url', modulesTarUrl);
core.setOutput('image_url', imageUrl);
core.setOutput('vmlinux_url', vmlinuxUrl);
core.setOutput('dtb_url', dtbUrl);
console.log(`Modules URL: ${modulesTarUrl}`);
console.log(`Image URL: ${imageUrl}`);
console.log(`Vmlinux URL: ${vmlinuxUrl}`);
console.log(`Dtb URL: ${dtbUrl}`);

- name: Create metadata.json
id: create_metadata
shell: bash
run: |
echo "Creating job definition"
# Create the job definition using the processed URLs
cd ../job_render
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e dtb_url="${{ steps.process_urls.outputs.dtb_url }}" \
${{ inputs.docker_image }} \
jq '.artifacts["dtbs/qcom/qcs6490-rb3gen2.dtb"] = env.dtb_url' data/metadata.json > temp.json && mv temp.json data/metadata.json

- name: Upload metadata.json
id: upload_metadata
uses: ./.github/actions/aws_s3_helper
with:
local_file: ../job_render/data/metadata.json
s3_bucket: qli-prd-kernel-gh-artifacts
mode: single-upload

- name: Create template json
shell: bash
run: |
echo "Creating job definition"
metadata_url="${{ steps.upload_metadata.outputs.presigned_url }}"
vmlinux_url="${{ steps.process_urls.outputs.vmlinux_url }}"
image_url="${{ steps.process_urls.outputs.image_url }}"
modules_url="${{ steps.process_urls.outputs.modules_url }}"
# Create the job definition using the processed URLs
cd ../job_render
# using metadata_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e metadata_url="$metadata_url" \
${{ inputs.docker_image }} \
jq '.artifacts.metadata = env.metadata_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
# using image_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e image_url="$image_url" \
${{ inputs.docker_image }} \
jq '.artifacts.kernel = env.image_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
# using vmlinux_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e vmlinux_url="$vmlinux_url" \
${{ inputs.docker_image }} \
jq '.artifacts.vmlinux = env.vmlinux_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
# using modules_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e modules_url="$modules_url" \
${{ inputs.docker_image }} \
jq '.artifacts.modules = env.modules_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json

- name: Update firmware and ramdisk
shell: bash
run: |
cd ../job_render
ramdisk_url="$(aws s3 presign s3://qli-prd-kernel-gh-artifacts/meta-qcom/initramfs-kerneltest-full-image-qcom-armv8a.cpio.gz --expires 7600)"
firmware_url="$(aws s3 presign s3://qli-prd-kernel-gh-artifacts/meta-qcom/initramfs-firmware-rb3gen2-image-qcom-armv8a.cpio.gz --expires 7600)"
# using ramdisk_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e ramdisk_url="$ramdisk_url" \
${{ inputs.docker_image }} \
jq '.artifacts.ramdisk = env.ramdisk_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json

# using firmware_url
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
-e firmware_url="$firmware_url" \
${{ inputs.docker_image }} \
jq '.artifacts.firmware = env.firmware_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json

- name: Create lava_job_definition
shell: bash
run: |
cd ../job_render
mkdir renders
docker run -i --rm \
--user "$(id -u):$(id -g)" \
--workdir="$PWD" \
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
${{ inputs.docker_image }} \
sh -c 'export BOOT_METHOD=fastboot && \
export TARGET=qcs6490-rb3gen2 && \
export TARGET_DTB=qcs6490-rb3gen2 && \
python3 lava_Job_definition_generator.py --localjson ./data/cloudData.json'
24 changes: 24 additions & 0 deletions .github/actions/pull_docker_image/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Pull docker image from ghcr
description: Pull docker image from ghcr

inputs:
image:
description: The docker image to pull
required: true
default: kmake-image:latest

github_token:
description: The GitHub token to use for authentication
required: true

runs:
using: "composite"
steps:
- name: Clone kmake-image
run: |
git clone https://github.com/qualcomm-linux/kmake-image.git

- name: Build docker image
run: |
cd kmake-image
docker build . -t kmake-image
75 changes: 75 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
name: _build
on:
workflow_call:
inputs:
docker_image:
description: Docker image
type: string
required: true

jobs:
build:
runs-on:
group: GHA-Kernel-SelfHosted-RG
labels: [ self-hosted, kernel-prd-u2404-x64-large-od-ephem ]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.ref }}
fetch-depth: 0

- name: Pull docker image
uses: ./.github/actions/pull_docker_image
with:
image: ${{ inputs.docker_image }}
github_token: ${{ secrets.GITHUB_TOKEN }}

- name: Build workspace
id: build_workspace
uses: ./.github/actions/build
with:
docker_image: ${{ inputs.docker_image }}

- name: Create file list for artifacts upload
run: |
touch ../artifacts/file_list.txt
tar -cJf modules.tar.xz ../kobj/tar-install/lib/modules/
echo "modules.tar.xz" >> ../artifacts/file_list.txt
echo "../kobj/arch/arm64/boot/Image" >> ../artifacts/file_list.txt
echo "../kobj/vmlinux" >> ../artifacts/file_list.txt
echo "../kobj/arch/arm64/boot/dts/qcom/qcs6490-rb3gen2.dtb" >> ../artifacts/file_list.txt

- name: Upload artifacts
uses: ./.github/actions/aws_s3_helper
with:
s3_bucket: qli-prd-kernel-gh-artifacts
aws_access_key_id: ${{ secrets.AWSKEYID }}
aws_secret_access_key: ${{ secrets.AWSACCESSKEY }}
local_file: ../artifacts/file_list.txt
mode: multi-upload

- name: Clean up
run: |
rm -rf ../artifacts
rm -rf ../kobj
rm -rf modules.tar.xz

- name: Update summary
if: success() || failure()
shell: bash
run: |
if [ ${{ steps.build_workspace.outcome }} == 'success' ]; then
echo "Build was successful"
summary=":heavy_check_mark: Build Success"
else
echo "Build failed"
summary=":x: Build Failed"
fi
SUMMARY='
<details><summary><i>Build Summary</i></summary>

'${summary}'
</details>
'
echo -e "$SUMMARY" >> $GITHUB_STEP_SUMMARY
Loading
Loading