diff --git a/.github/actions/notify_testing_matrix/action.yml b/.github/actions/notify_testing_matrix/action.yml new file mode 100644 index 000000000..b94f33de8 --- /dev/null +++ b/.github/actions/notify_testing_matrix/action.yml @@ -0,0 +1,28 @@ +name: 'Notify Matrix Chat' +description: 'Send a message to a Matrix chat room' +inputs: + matrix_token: + description: 'The token to use to authenticate with the Matrix server' + required: true + subject: + description: 'Subject of the message' + default: 'no subject' + release_type: + description: 'Type of the release' + required: true + version: + description: 'The version of the release' + default: "version unknown" + repository: + description: 'The repository that triggered the action' + +runs: + using: 'composite' + steps: + - name: Send message to Matrix + uses: fadenb/matrix-chat-message@v0.0.6 + with: + homeserver: 'matrix.org' + token: ${{ inputs.matrix_token }} + channel: '!WjxEKjjINpyBRPFgxl:krbel.duckdns.org' + message: "Testing QA phase in repo ${{ inputs.repository }} of a ${{ inputs.release_type }} release (${{ inputs.version }}) started. Subject to test: ${{ inputs.subject }}" \ No newline at end of file diff --git a/.github/workflows/auto_translate.yml b/.github/workflows/auto_translate.yml new file mode 100644 index 000000000..3ee13785a --- /dev/null +++ b/.github/workflows/auto_translate.yml @@ -0,0 +1,58 @@ +name: Auto translate +on: + workflow_call: + inputs: + branch: + description: 'The branch to checkout and run the action in' + type: string + default: ${{ github.ref }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: 'The python version to use' + type: string + default: "3.8" + locale_folder: + description: 'The base folder to look for localization files' + type: string + default: locale + + +jobs: + autotranslate: + env: + API_KEY: ${{secrets.DL_API_KEY}} + LOCALE_FOLDER: ${{ github.workspace }}/action/package/${{ inputs.locale_folder }} + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GH_PAT }} + ref: ${{ inputs.branch }} + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + - name: Install Translate Tools + run: | + python -m pip install -r action/github/requirements/pip_translation.txt + - name: Auto Translate + run: | + python action/github/scripts/translate.py + - name: Commit to dev + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "fix(localisation): autotranslate common languages" + repository: action/package/ diff --git a/.github/workflows/check_conventional_commits.yml b/.github/workflows/check_conventional_commits.yml new file mode 100644 index 000000000..ff4b49c53 --- /dev/null +++ b/.github/workflows/check_conventional_commits.yml @@ -0,0 +1,59 @@ +name: License Tests +on: + workflow_call: + inputs: + runner: + description: 'Runner to use' + type: string + default: 'ubuntu-latest' + branch: + description: 'Branch to check out' + type: string + default: ${{ github.ref }} + action_branch: + description: 'Branch to check out' + type: string + default: main + custom_labels: + description: 'Custom labels to use' + type: string + default: '{}' + pr_number: + description: 'Optional pull request number' + type: string + default: '' + error_on_failure: + description: 'Throws an error if the title not adheres to Conventional Commit format or testing phase is ongoing' + type: boolean + default: false + +jobs: + validate-pr-title: + runs-on: ${{ inputs.runner }} + env: + GH_PAT: ${{ secrets.GH_PAT }} + GITHUB_REPOSITORY: ${{ github.repository }} + PR_LABELS: ${{ inputs.custom_labels }} + PR_NUMBER: ${{ inputs.pr_number }} + ERROR_ON_FAILURE: ${{ inputs.error_on_failure }} + TEST_PHASE_CACHE: ${{ github.workspace }}/test-status + PCCC_CONFIG_FILE: ${{ github.workspace }}/pccc.toml + steps: + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + - name: Install Python Requirements + run: | + python -m pip install --upgrade pip + pip install -r ${{ github.workspace }}/requirements/pip_base.txt + - name: Get Cached Test Status + uses: actions/cache@v2 + with: + path: test-status + key: test-status-${{ runner.os }} + - name: Status Check + run: | + python scripts/check_PRs.py + \ No newline at end of file diff --git a/.github/workflows/check_supported_py_versions.yml b/.github/workflows/check_supported_py_versions.yml new file mode 100644 index 000000000..5f955e8f3 --- /dev/null +++ b/.github/workflows/check_supported_py_versions.yml @@ -0,0 +1,25 @@ +name: Check Supported Python Versions + +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * *' + +jobs: + update-versions: + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - uses: actions/checkout@v4 + - name: Run Python Version Check Script + run: + python scripts/check_supported_py_versions.py + - name: Commit and Push if Changes + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add supported_py_versions.json + git commit -m "Update Supported Python Versions" || exit 0 # Exit gracefully if no changes + git push \ No newline at end of file diff --git a/.github/workflows/clear_test_phase.yml b/.github/workflows/clear_test_phase.yml new file mode 100644 index 000000000..b5edb80e0 --- /dev/null +++ b/.github/workflows/clear_test_phase.yml @@ -0,0 +1,33 @@ +on: + workflow_call: + +jobs: + clear_test_flag: + runs-on: ubuntu-latest + # set cached test status to complete + steps: + - name: Set test status + run: | + echo "complete" > test-status + - name: Delete Test Tracking Cache + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache key" + cacheKeys=$(gh actions-cache list -R ${{ github.repository }} -L 100 | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "Deleting caches..." + for cacheKey in $cacheKeys + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GH_PAT }} + - name: Cache test status + uses: actions/cache@v2 + with: + path: test-status + key: test-status-${{ runner.os }} diff --git a/.github/workflows/license_tests.yml b/.github/workflows/license_tests.yml new file mode 100644 index 000000000..0de81531b --- /dev/null +++ b/.github/workflows/license_tests.yml @@ -0,0 +1,97 @@ +name: License Tests +on: + workflow_call: + inputs: + runner: + description: 'Runner to use' + type: string + default: 'ubuntu-latest' + branch: + description: 'Branch to check out' + type: string + default: ${{ github.ref }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: 'Python version to use' + type: string + default: '3.8' + system_deps: + description: 'Additional system dependencies (whitespace delimited) to install' + required: false + type: string + pip_packages: + description: 'Additional Python packages (whitespace delimited) to install' + type: string + required: false + install_extras: + description: 'Optional extras to install the python package with' + required: false + type: string + packages-exclude: + description: 'Regex to exclude packages from the license check' + type: string + default: '^(precise-runner|fann2|tqdm|bs4|nvidia|bitstruct).*' + licenses-exclude: + description: 'Regex to exclude licenses from the license check' + type: string + default: '^(Mozilla).*$' +jobs: + license_tests: + timeout-minutes: 15 + runs-on: ${{ inputs.runner }} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + path: action/github/ + ref: ${{ inputs.action_branch }} + repository: OpenVoiceOS/.github + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Custom System Dependencies + if: ${{ inputs.system_deps != null }} + run: | + sudo apt update + sudo apt install -y ${{ inputs.system_deps }} + - name: upgrade pip + run: | + pip install --upgrade pip + - name: Install Custom Python Requirements + if: ${{ inputs.pip_packages != null }} + run: | + pip install ${{ inputs.pip_packages }} + - name: Install repo + run: | + cd ${{ github.workspace }}/action/package + if [ -z ${{ inputs.install_extras }} ]; then + pip install . + else + pip install .[${{ inputs.install_extras }}] + fi + - name: Get explicit and transitive dependencies + run: | + pip freeze > requirements-all.txt + - name: Check licenses + id: license_check_report + uses: pilosus/action-pip-license-checker@v2 + with: + requirements: 'requirements-all.txt' + fail: 'Copyleft,Other,Error' + fails-only: true + pre: true + verbose: 2 + exclude: ${{ inputs.packages-exclude }} + exclude-license: ${{ inputs.licenses-exclude }} + - name: Print report + if: ${{ always() }} + run: echo "${{ steps.license_check_report.outputs.report }}" \ No newline at end of file diff --git a/.github/workflows/notify_pr_matrix.yml b/.github/workflows/notify_pr_matrix.yml new file mode 100644 index 000000000..772cd19a3 --- /dev/null +++ b/.github/workflows/notify_pr_matrix.yml @@ -0,0 +1,30 @@ +name: Notify Matrix Chat + +# only triggers on pull request closed events +on: + workflow_call: + inputs: + pr_id: + type: number + required: true + subject: + type: string + default: '' + +jobs: + notify_pr_matrix: + runs-on: ubuntu-latest + steps: + - name: Get repo + run: | + echo REPO=$(echo ${{ github.repository}} | cut -d '/' -f 2) >> $GITHUB_ENV + - uses: actions/checkout@v4 + - name: Send message to Matrix bots channel + id: matrix-chat-message + uses: fadenb/matrix-chat-message@v0.0.6 + with: + homeserver: 'matrix.org' + token: ${{ secrets.MATRIX_TOKEN }} + channel: '!WjxEKjjINpyBRPFgxl:krbel.duckdns.org' + message: | + ${{ inputs.subject }} PR merged! (${{ env.REPO }}) [PR](https://github.com/${{ github.repository }}/pull/${{ inputs.pr_id }}) \ No newline at end of file diff --git a/.github/workflows/notify_testing_matrix.yml b/.github/workflows/notify_testing_matrix.yml new file mode 100644 index 000000000..39a0dce94 --- /dev/null +++ b/.github/workflows/notify_testing_matrix.yml @@ -0,0 +1,29 @@ +name: Notify Matrix Chat (Test State) + +# only triggers on pull request closed events +on: + workflow_call: + inputs: + subject: + type: string + default: 'no subject' + release_type: + type: string + required: true + version: + type: string + description: 'The version of the release' + default: 'version unknown' + +jobs: + notify_pr_matrix: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Send message to Matrix bots channel + uses: fadenb/matrix-chat-message@v0.0.6 + with: + homeserver: 'matrix.org' + token: ${{ secrets.MATRIX_TOKEN }} + channel: '!WjxEKjjINpyBRPFgxl:krbel.duckdns.org' + message: "Testing QA phase of a ${{ inputs.release_type }} release (${{ inputs.version }}) started (${{ github.repository }}). Subject to test: ${{ inputs.subject }}" \ No newline at end of file diff --git a/.github/workflows/propose_translation.yml b/.github/workflows/propose_translation.yml new file mode 100644 index 000000000..a7f54f9ee --- /dev/null +++ b/.github/workflows/propose_translation.yml @@ -0,0 +1,71 @@ +name: Propose Translation +on: + workflow_call: + inputs: + branch: + description: 'The branch to checkout and run the action in' + type: string + default: ${{ github.ref_name }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: 'The python version to use' + type: string + default: "3.8" + language: + description: 'The language to translate to (ISO 3166-1 alpha-2 code; form "xx-xx")' + type: string + default: "" + locale_folder: + description: 'The base folder to look for localization files' + type: string + default: locale + reviewers: + description: 'A comma or newline-separated string of reviewers' + type: string + default: "emphasize" + + +jobs: + Propose_translation: + env: + TARGET_LANG: ${{ inputs.language }} + API_KEY: ${{ secrets.DL_API_KEY }} + LOCALE_FOLDER: ${{ github.workspace }}/action/package/${{ inputs.locale_folder }} + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Translate Tools + run: | + python -m pip install -r action/github/requirements/pip_translation.txt + - name: Run Translate Script + run: | + python ${{ github.workspace }}/action/github/scripts/translate.py + - name: Create Pull Request + uses: peter-evans/create-pull-request@v5 + with: + token: ${{ secrets.GH_PAT }} + path: action/package/ + commit-message: autotranslate + author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> + title: "fix(localisation): Add ${{ inputs.language }} language" + body: Translations for review + labels: translation + branch: staging/translation_${{ inputs.language }} + reviewers: ${{ inputs.reviewers }} diff --git a/.github/workflows/pytest_file_or_dir.yml b/.github/workflows/pytest_file_or_dir.yml new file mode 100644 index 000000000..405e92699 --- /dev/null +++ b/.github/workflows/pytest_file_or_dir.yml @@ -0,0 +1,180 @@ +name: Unit Tests +on: + workflow_call: + inputs: + runner: + type: string + default: "ubuntu-latest" + branch: + description: 'Branch to use' + type: string + default: ${{ github.ref }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + system_deps: + description: 'Additional system dependencies (whitespace delimited) to install' + required: false + type: string + python_matrix: + description: 'Python matrix (string) to use' + type: string + default: '["3.8", "3.9", "3.10", "3.11"]' + pip_packages: + description: 'Additional Python packages (whitespace delimited) to install' + type: string + required: false + pip_install_dirs: + description: 'Additional local Python package directories to install' + type: string + required: false + install_extras: + description: 'comma delimited extras to install the package with (eg: "test,dev")' + type: string + required: false + timeout_minutes: + description: 'Timeout in minutes for the job' + type: number + default: 15 + test_location: + description: 'test file or directory to run relative to the base folder' + type: string + default: test/unittests + is_skill: + description: 'Whether this is an ovos skill or not' + type: boolean + default: false + codecov: + description: 'Whether to record the test code coverage' + type: boolean + default: true + append_coverage: + description: 'Whether to append coverage to codecov, used for consecutive runs' + type: boolean + default: false + upload_coverage: + description: 'Whether to upload coverage to codecov' + type: boolean + default: true + +jobs: + unit_tests: + strategy: + matrix: + python-version: ${{ fromJSON(inputs.python_matrix) }} + runs-on: ${{ inputs.runner }} + timeout-minutes: ${{ inputs.timeout_minutes }} + env: + FIRST_PYTHON_VERSION: ${{ fromJSON(inputs.python_matrix)[0] }} + steps: + - name: Checkout Package Repo + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + path: action/package/ + fetch-depth: 0 + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Set up python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: set codecov run flag + id: codecov + run: | + if [ "${{ matrix.python-version }}" = "$FIRST_PYTHON_VERSION" ] && [ "${{ inputs.codecov }}" = "true" ]; then + echo "should_run=true" >> "$GITHUB_OUTPUT" + echo "codecov will run" + else + echo "should_run=false" >> "$GITHUB_OUTPUT" + echo "codecov will not run" + fi + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Install Custom System Dependencies + if: ${{ inputs.system_deps != null }} + run: | + sudo apt install -y ${{ inputs.system_deps }} + - name: upgrade pip + run: | + pip install --upgrade pip + - name: Install Common Python Requirements + run: | + if [ "${{ inputs.is_skill }}" = "true" ]; then + pip install -r ${{ github.workspace }}/action/github/requirements/pip_skill_tests.txt + else + pip install -r ${{ github.workspace }}/action/github/requirements/pip_tests.txt + fi + - name: Install Custom Python Requirements + if: ${{ inputs.pip_packages != null }} + run: | + pip install ${{ inputs.pip_packages }} + - name: Install Python Package Directories + if: ${{ inputs.pip_install_dirs != null }} + run: | + cd ${{ github.workspace }}/action/package/ + IFS=$'\n' read -d '' -r -a dirs <<< "${{ inputs.pip_install_dirs }}" + for dir in "${dirs[@]}" + do + pip install -e "$dir" + done + - name: Install Package + run: | + cd ${{ github.workspace }}/action/package + PACKAGE_NAME=$(python setup.py --name) + if [ -n "${{ inputs.install_extras }}" ]; then + pip install -e .[${{ inputs.install_extras }}] + else + pip install -e . + fi + echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_ENV" + - name: download test coverage artifact + if: ${{ steps.codecov.outputs.should_run == 'true' && inputs.append_coverage }} + uses: actions/download-artifact@v4 + with: + name: coverage_${{ github.repository_id }} + path: ${{ github.workspace }}/action/package/ + github-token: ${{ secrets.GH_PAT }} + - name: Test package with ${{ matrix.python-version }} + run: | + cd ${{ github.workspace }}/action/package/ + if [ ${{ steps.codecov.outputs.should_run }} == 'true' ]; then + echo "Running tests with codecov" + if [ "${{ inputs.append_coverage }}" = "true" ]; then + pytest --cov-append --cov=./ --cov-report=xml ${{ inputs.test_location }} + else + pytest --cov=./ --cov-report=xml ${{ inputs.test_location }} + coverage report + fi + else + echo "Running tests without codecov" + pytest ${{ inputs.test_location }} + fi + - name: upload test coverage artifact + if: ${{ steps.codecov.outputs.should_run == 'true' && !inputs.upload_coverage }} + uses: actions/upload-artifact@v4 + with: + name: coverage_${{ github.repository_id }} + overwrite: true + retention-days: 1 + if-no-files-found: ignore + path: ${{ github.workspace }}/action/package/coverage.xml + - name: Upload coverage + if: ${{ steps.codecov.outputs.should_run == 'true' && inputs.upload_coverage }} + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + root_dir: ${{ github.workspace }}/action/package + directory: ${{ github.workspace }}/action/package + working-directory: ${{ github.workspace }}/action/package + flags: unittests + name: ovos + verbose: true diff --git a/.github/workflows/python_build_tests.yml b/.github/workflows/python_build_tests.yml new file mode 100644 index 000000000..6c26a6f55 --- /dev/null +++ b/.github/workflows/python_build_tests.yml @@ -0,0 +1,91 @@ +name: Python Build Tests +on: + workflow_call: + inputs: + runner: + description: 'Runner to use' + type: string + default: ubuntu-latest + branch: + description: 'Branch to check out' + type: string + default: ${{ github.ref }} + system_deps: + description: 'Additional system dependencies (whitespace delimited) to install' + required: false + type: string + pip_packages: + description: 'Additional Python packages (whitespace delimited) to install' + type: string + required: false + python_matrix: + description: 'Python matrix (string) to use' + type: string + default: '["3.8", "3.9", "3.10", "3.11"]' + test_manifest: + description: 'if to test MANIFEST.in' + required: false + type: boolean + default: false + manifest_ignored: + description: 'Files to ignore in MANIFEST.in' + required: false + type: string + default: "test/**" + test_pipaudit: + description: 'if to test with pip-audit' + type: boolean + default: false + test_relative_paths: + description: 'if to test relative paths' + type: boolean + default: true + pipaudit_ignored: + type: string + default: "GHSA-r9hx-vwmv-q579 PYSEC-2022-43012" +jobs: + py_build_tests: + timeout-minutes: 15 + strategy: + max-parallel: 2 + matrix: + python-version: ${{ fromJson(inputs.python_matrix) }} + runs-on: ${{inputs.runner}} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + - name: Set up python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Custom System Dependencies + if: ${{ inputs.system_deps != null }} + run: | + sudo apt update + sudo apt install -y ${{ inputs.system_deps }} + - name: upgrade pip + run: | + pip install --upgrade pip + - name: Install Build Tools + run: | + pip install --upgrade pip + python -m pip install build wheel + - name: Install Custom Python Requirements + if: ${{ inputs.pip_packages != null }} + run: | + pip install ${{ inputs.pip_packages }} + - name: Build Distribution Packages + run: | + python setup.py bdist_wheel sdist + - name: Test Manifest + if: ${{ inputs.test_manifest }} + uses: tj-actions/check-manifest@v1 + with: + args: --ignore ${{ inputs.manifest_ignored }} + - name: PIP Audit + if: ${{ inputs.test_pipaudit }} + uses: pypa/gh-action-pip-audit@v1.0.8 + with: + ignore-vulns: ${{ inputs.pipaudit_ignored }} diff --git a/.github/workflows/release_alpha.yml b/.github/workflows/release_alpha.yml new file mode 100644 index 000000000..0af756ef2 --- /dev/null +++ b/.github/workflows/release_alpha.yml @@ -0,0 +1,122 @@ +# This workflow will generate a distribution and upload it to PyPI + +name: Publish Alpha Build ...aX +on: + workflow_call: + inputs: + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + version_file: + description: 'The file location to read the version from' + type: string + default: version.py + python_version: + description: 'The python version to use' + type: string + default: "3.8" + changelog_file: + description: 'The file location to write the changelog to' + type: string + default: CHANGELOG.md + # release type other than alpha would need some semver foo + release_type: + description: 'The type of release to make' + type: string + default: alpha + + +jobs: + build_and_publish: + runs-on: ubuntu-latest + env: + VERSION_FILE: ${{ github.workspace }}/action/package/${{ inputs.version_file }} + SOURCE_BRANCH: ${{ inputs.release_type == 'alpha' && 'dev' || 'testing' }} + GIT_CLIFF_CONFIG: ${{ github.workspace }}/action/github/cliff.toml + GIT_CLIFF_WORKDIR: ${{ github.workspace }}/action/package/ + GITHUB_TOKEN: ${{ secrets.GH_PAT }} + GITHUB_REPO: ${{ github.repository }} + steps: + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Python Dependencies + run: | + pip install --upgrade pip + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + pip install -r ${{ github.workspace }}/action/github/requirements/pip_build_tools.txt + - name: Checkout Repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GH_PAT }} + ref: ${{ env.SOURCE_BRANCH }} + path: action/package/ + fetch-depth: 0 + # note: "bump-prelease" is a special operation, as it doesn't preserve order + # normally the next prerelease version of 0.3.0 would be 0.3.1-a.1 + # so we bump the patch/minor/major version and then add the prerelease (0.3.0 -> 0.3.0-a.1) + - name: Increment Version + id: version + run: | + cd ${{ github.workspace }}/action/github/scripts/ + PREVIOUS_VERSION=$(python semver_release_version.py --file ${{ env.VERSION_FILE }} --latest) + if [ "${{ inputs.release_type }}" != 'alpha' ]; then + NEXT_VERSION=$( python semver_release_version.py --next --type ${{ inputs.release_type }} --file ${{ env.VERSION_FILE }} --bump-prerelease --save) + else + NEXT_VERSION=$(python semver_release_version.py --next --type alpha --file ${{ env.VERSION_FILE }} --save) + fi + RELEASE_CYCLE_START=$( python semver_release_version.py --version "$VERSION" --latest --type patch) + echo "RELEASE_CYCLE_START=${RELEASE_CYCLE_START}" >> $GITHUB_ENV + echo "PREVIOUS_VERSION=${PREVIOUS_VERSION}" >> $GITHUB_ENV + echo "NEXT_VERSION=${NEXT_VERSION}" >> $GITHUB_ENV + - name: change working directory + run: cd ${{ github.workspace }}/action/package/ + - name: Create Changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.PREVIOUS_VERSION }} + env: + GIT_CLIFF_PREPEND: ${{ github.workspace }}/action/package/${{ inputs.changelog_file }} + GIT_CLIFF_TAG: ${{ env.NEXT_VERSION }} + - name: Commit Prerelease Changes (${{ env.PREVIOUS_VERSION }} -> ${{ env.NEXT_VERSION }}) + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: Prerelease Changes ${{ env.NEXT_VERSION }} + repository: action/package/ + - name: Create Release Changelog + id: release_changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.PREVIOUS_VERSION }} + env: + GIT_CLIFF_TAG: ${{ env.NEXT_VERSION }} + - name: Create Pre-release + id: create_release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GH_PAT }} + tag: ${{ env.NEXT_VERSION }} + name: Release ${{ env.NEXT_VERSION }} + body: | + Changes in this Release + ${{ steps.release_changelog.outputs.changelog }} + commit: dev + prerelease: true + - name: Build Distribution Packages + run: | + mkdir -p ${{ github.workspace }}/action/dist + cd ${{ github.workspace }}/action/package/ + python setup.py sdist bdist_wheel --dist-dir ${{ github.workspace }}/action/dist + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{secrets.PYPI_TOKEN}} + packages-dir: action/dist/ diff --git a/.github/workflows/release_semver_publish.yml b/.github/workflows/release_semver_publish.yml new file mode 100644 index 000000000..076951def --- /dev/null +++ b/.github/workflows/release_semver_publish.yml @@ -0,0 +1,121 @@ +name: Publish SemVer Build +on: + workflow_call: + secrets: + PYPI_TOKEN: + required: true + inputs: + branch: + description: 'The branch to checkout' + type: string + default: master + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: "Python version to use for the build process." + type: string + default: "3.8" + changelog_file: + description: "File to write the changelog to." + type: string + default: CHANGELOG.md + subject: + description: 'Subject title of the push/pull-request event to parse the release type.' + type: string + required: true + +jobs: + build_and_publish: + env: + PCCC_CONFIG_FILE: ${{ github.workspace }}/action/github/pccc.toml + GIT_CLIFF_CONFIG: ${{ github.workspace }}/action/github/cliff.toml + GIT_CLIFF_WORKDIR: ${{ github.workspace }}/action/package/ + GITHUB_TOKEN: ${{ secrets.GH_PAT }} + GITHUB_REPO: ${{ github.repository }} + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + fetch-depth: 0 + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{inputs.python_version}} + - name: Install Python Dependencies + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + pip install -r ${{ github.workspace }}/action/github/requirements/pip_build_tools.txt + - name: Get Release Version + run: | + RELEASE_VERSION=$(python ${{ github.workspace }}/action/package/setup.py --version) + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> $GITHUB_ENV + - name: Parse Release Type + run: | + RELEASE_TYPE=$(python ${{ github.workspace }}/action/github/scripts/parse_semver_release.py) + echo "RELEASE_TYPE=${RELEASE_TYPE}" >> $GITHUB_ENV + RELEASE_CYCLE_START=$(python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --latest --type "$RELEASE_TYPE" ) + echo "RELEASE_CYCLE_START=${RELEASE_CYCLE_START}" >> $GITHUB_ENV + - name: Create Release Changelog + id: release_changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.RELEASE_CYCLE_START }} + env: + GIT_CLIFF_TAG: ${{ env.RELEASE_VERSION }} + - name: Create Release + id: create_release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GH_PAT }} + tag: ${{ env.RELEASE_VERSION }} + name: Release ${{ env.RELEASE_VERSION }} + body: | + Changes in this Release + ${{ steps.release_changelog.outputs.changelog }} + - name: Build Distribution Packages + run: | + mkdir -p ${{ github.workspace }}/action/dist + cd ${{ github.workspace }}/action/package/ + python setup.py sdist bdist_wheel --dist-dir ${{ github.workspace }}/action/dist + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{secrets.PYPI_TOKEN}} + packages-dir: action/dist/ + # set cached test status to complete + - name: Set test status + run: | + echo "complete" > test-status + - name: Delete Test Tracking Cache + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache key" + cacheKeys=$(gh actions-cache list -R ${{ github.repository }} -L 100 | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "Deleting caches..." + for cacheKey in $cacheKeys + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GH_PAT }} + - name: Cache test status + uses: actions/cache@v2 + with: + path: test-status + key: test-status-${{ runner.os }} diff --git a/.github/workflows/release_semver_pull_master.yml b/.github/workflows/release_semver_pull_master.yml new file mode 100644 index 000000000..b89a9bc49 --- /dev/null +++ b/.github/workflows/release_semver_pull_master.yml @@ -0,0 +1,102 @@ +name: Declare Stable Release (Proposal) +on: + workflow_call: + inputs: + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: 'The python version to use' + type: string + default: "3.8" + changelog_file: + type: string + description: 'The file to write the changelog to' + default: CHANGELOG.md + +jobs: + propose_stable_release: + runs-on: ubuntu-latest + env: + TARGET_BRANCH: 'master' + PCCC_CONFIG_FILE: ${{ github.workspace }}/action/github/pccc.toml + GIT_CLIFF_CONFIG: ${{ github.workspace }}/action/github/cliff.toml + GIT_CLIFF_WORKDIR: ${{ github.workspace }}/action/package/ + GITHUB_TOKEN: ${{ secrets.GH_PAT }} + GITHUB_REPO: ${{ github.repository }} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GH_PAT }} + ref: testing + fetch-depth: 0 + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Authenticate GitHub CLI + run: | + unset GITHUB_TOKEN + echo "${{ secrets.GH_PAT }}" | gh auth login --with-token + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{inputs.python_version}} + - name: Install Python Dependencies + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + - name: Get Release Version + run: | + RELEASE_VERSION=$(python ${{ github.workspace }}/action/package/setup.py --version) + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> $GITHUB_ENV + - name: Parse Release Type + run: | + RELEASE_TYPE=$(python ${{ github.workspace }}/action/github/scripts/parse_semver_release.py) + echo "RELEASE_TYPE=${RELEASE_TYPE}" >> $GITHUB_ENV + - name: Check Latest Release + run: | + LATEST_RELEASE=$(python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --latest --type ${{ env.RELEASE_TYPE }}) + echo "LATEST_RELEASE=${LATEST_RELEASE}" >> $GITHUB_ENV + - name: Create Release Changelog (Proposal) + id: release_changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.LATEST_RELEASE }} + env: + GIT_CLIFF_TAG: ${{ env.RELEASE_VERSION }} + - name: Create Master Branch (if necessary) + run: | + cd ${{ github.workspace }}/action/package/ + git fetch origin dev:dev + TARGET_BRANCH_EXISTS=$(git ls-remote --heads origin ${{ env.TARGET_BRANCH }} | wc -l) + INITIAL_COMMIT=$(git rev-list --max-parents=0 dev) + if [ "$TARGET_BRANCH_EXISTS" -eq "0" ]; then + if [ ${{ env.LATEST_RELEASE }} == "0.0.0" ]; then + git checkout -b ${{ env.TARGET_BRANCH }} $INITIAL_COMMIT + else + git checkout -b ${{ env.TARGET_BRANCH }} tags/${{ env.LATEST_RELEASE }} + fi + git push origin ${{ env.TARGET_BRANCH }} --force + fi + - name: Create Pull Request to ${{ env.TARGET_BRANCH }} + run: | + cd ${{ github.workspace }}/action/package/ + gh pr create \ + --title "ci(release): declare ${{ env.RELEASE_TYPE }} release stable (${{ env.RELEASE_VERSION }})" \ + --body "$(cat << EOF + Included changes: + ${{ steps.release_changelog.outputs.changelog }} + EOF + )" \ + --base ${{ env.TARGET_BRANCH }} \ + --head testing \ + --assignee ${{ github.actor }} diff --git a/.github/workflows/release_semver_start.yml b/.github/workflows/release_semver_start.yml new file mode 100644 index 000000000..aee966583 --- /dev/null +++ b/.github/workflows/release_semver_start.yml @@ -0,0 +1,404 @@ +name: Kickoff Testing Release +on: + workflow_call: + inputs: + branch: + type: string + default: dev + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_version: + description: 'The python version to use' + type: string + default: "3.8" + version_file: + description: 'The file location to read the version from' + type: string + default: version.py + release_type: + type: string + description: 'The type of release to make' + subject: + type: string + description: 'Subject title of the push/pull request event to parse the release type.' + required: false + locale_folder: + type: string + description: 'The folder location of the locale files' + required: false + update_intentfile: + type: string + description: 'The file location of the intent test file to update' + required: false + changelog_file: + type: string + description: 'The file location of the changelog' + default: CHANGELOG.md + +jobs: + parse_conventional_commits: + outputs: + release_type: ${{ steps.parse.outputs.release_type }} + runs-on: ubuntu-latest + env: + PCCC_CONFIG_FILE: ${{ github.workspace }}/action/github/pccc.toml + steps: + - name: Checkout Package Repo + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + path: action/package/ + fetch-depth: 0 + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Set up python ${{ inputs.python_version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Install Common Python Requirements + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + - name: Parse Conventional Commits + id: parse + run: | + export TITLE="${{ inputs.subject }}" + cd ${{ github.workspace }}/action/github + RELEASE_TYPE=$(python scripts/parse_semver_release.py) + echo "release_type=$RELEASE_TYPE" >> $GITHUB_OUTPUT + autotranslate: + needs: parse_conventional_commits + runs-on: ubuntu-latest + outputs: + translated: ${{ steps.changes.outputs.translated }} + permissions: + contents: write + env: + API_KEY: ${{secrets.DL_API_KEY}} + LOCALE_FOLDER: ${{ github.workspace }}/action/package/${{ inputs.locale_folder }} + INTENT_TEST_FILE: ${{ github.workspace }}/action/package/${{ inputs.update_intentfile }} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - uses: dorny/paths-filter@v2 + if: ${{ inputs.locale_folder != '' }} + id: filter + with: + working-directory: action/package/ + filters: | + us_specific: + - '${{ inputs.locale_folder }}/en-us/**' + - '${{ inputs.locale_folder }}/../dialog/en-us/**' + - '${{ inputs.locale_folder }}/../vocab/en-us/**' + general: + - '${{ inputs.locale_folder }}/**' + - '${{ inputs.locale_folder }}/../dialog/**' + - '${{ inputs.locale_folder }}/../vocab/**' + - name: Setup Python + if: steps.filter.outputs.general == 'true' + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Python Requirements + if: steps.filter.outputs.general == 'true' + run: | + python -m pip install -r action/github/requirements/pip_translation.txt + - name: Pull latest changes + if: steps.filter.outputs.general == 'true' + run: | + git pull origin ${{ inputs.branch }} + - name: Auto Translate + if: steps.filter.outputs.general == 'true' + run: | + python action/github/scripts/translate.py + # 0 if no changes were made, 1 if changes were made + - name: changes made? + id: changes + run: | + cd ${{ github.workspace }}/action/package/ + git diff --exit-code + echo "translated=$?" >> $GITHUB_OUTPUT + - name: Commit autotranslation to ${{ inputs.branch }} + if: steps.changes.outputs.translated == '1' + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "ci(localisation): autotranslate common languages" + repository: action/package/ + branch: ${{ inputs.branch }} + - name: update resource test file + if: steps.changes.outputs.translated == '1' && inputs.update_intentfile != '' + run: | + python action/github/scripts/update_intent_testfile.py + - name: Commit resource test file changes to ${{ inputs.branch }} + if: steps.changes.outputs.translated == '1' && inputs.update_intentfile != '' + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "ci(localisation): update resource test file" + repository: action/package/ + branch: ${{ inputs.branch }} + # alpha release (+ testing) + alpha_release: + needs: [parse_conventional_commits, autotranslate] + if: (contains(fromJson('["patch", "minor", "major", "alpha"]'), needs.parse_conventional_commits.outputs.release_type)) || (contains(fromJson('["patch", "minor", "major", "alpha"]'), inputs.release_type)) + uses: openvoiceos/.github/.github/workflows/release_alpha.yml@feat/shared_actions1 + secrets: inherit + with: + action_branch: ${{ inputs.action_branch }} + version_file: ${{ inputs.version_file }} + python_version: ${{ inputs.python_version }} + changelog_file: ${{ inputs.changelog_file }} + # >= patch release + kickoff_testing: + needs: [parse_conventional_commits, alpha_release, autotranslate] + if: (contains(fromJson('["patch", "minor", "major"]'), needs.parse_conventional_commits.outputs.release_type) || (inputs.release_type != 'alpha' && inputs.release_type != '')) + runs-on: ubuntu-latest + env: + TARGET_BRANCH: 'testing' + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GH_PAT }} + VERSION_FILE: ${{ github.workspace}}/action/package/${{ inputs.version_file }} + RELEASE_TYPE: ${{ needs.parse_conventional_commits.outputs.release_type }} + GH_PAT: ${{ secrets.GH_PAT }} + steps: + - name: Checkout Package Repo + uses: actions/checkout@v4 + with: + token: ${{ secrets.GH_PAT }} + ref: ${{ inputs.branch }} + path: action/package/ + fetch-depth: 0 + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Authenticate GitHub CLI + run: | + unset GITHUB_TOKEN + echo "${{ secrets.GH_PAT }}" | gh auth login --with-token + - name: Set up python ${{ inputs.python_version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Common Python Requirements + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + - name: Bump to next ${{ env.RELEASE_TYPE }} version + run: | + cd ${{ github.workspace }}/action/package/ + git checkout ${{ inputs.branch }} + VERSION=$(python setup.py --version) + echo "VERSION=${VERSION}" >> $GITHUB_ENV + cd ${{ github.workspace }}/action/github/scripts/ + NEXT_VERSION=$( python semver_release_version.py --next --file ${{ env.VERSION_FILE }} --type ${{ env.RELEASE_TYPE }} --save ) + echo "NEXT_VERSION=${NEXT_VERSION}" >> $GITHUB_ENV + - name: Push Version Change (${{ env.VERSION }} -> ${{ env.NEXT_VERSION }}) + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: Increment Version to ${{ env.NEXT_VERSION }} + repository: action/package/ + - name: Force Push dev to testing branch + run: | + cd ${{ github.workspace }}/action/package/ + git push origin dev:testing --force + # set cached test status to active + - name: Set test status + run: | + cd ${{ github.workspace }} + echo "testing" > test-status + - name: Delete Test Tracking Cache + run: | + gh extension install actions/gh-actions-cache + + echo "Fetching list of cache key" + cacheKeys=$(gh actions-cache list -R ${{ github.repository }} -L 100 | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "Deleting caches..." + for cacheKey in $cacheKeys + do + gh actions-cache delete $cacheKey -R ${{ github.repository }} --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GH_PAT }} + - name: Cache test status + uses: actions/cache@v2 + with: + path: test-status + key: test-status-${{ runner.os }} + - name: Notify Matrix Room + uses: ./action/github/.github/actions/notify_testing_matrix + with: + matrix_token: ${{ secrets.MATRIX_TOKEN }} + subject: ${{ inputs.subject }} + release_type: ${{ env.RELEASE_TYPE }} + version: ${{ env.NEXT_VERSION }} + repository: ${{ github.repository }} + # testing release + # note: autotranslate as dependency requires the job running without restrictions! + cherry_pick_testing: + needs: [parse_conventional_commits, autotranslate] + if: ${{ needs.parse_conventional_commits.outputs.release_type == 'release' }} + env: + VERSION_FILE: ${{ github.workspace}}/action/package/${{ inputs.version_file }} + GIT_CLIFF_CONFIG: ${{ github.workspace }}/action/github/cliff.toml + GIT_CLIFF_WORKDIR: ${{ github.workspace }}/action/package/ + GITHUB_TOKEN: ${{ secrets.GH_PAT }} + GITHUB_REPO: ${{ github.repository }} + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GH_PAT }} + fetch-depth: 0 + ref: dev + path: action/package/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{ inputs.action_branch }} + path: action/github/ + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + - name: Install Python Dependencies + run: | + pip install --upgrade pip + pip install -r ${{ github.workspace }}/action/github/requirements/pip_base.txt + pip install -r ${{ github.workspace }}/action/github/requirements/pip_build_tools.txt + - name: Versioning + run: | + cd ${{ github.workspace }}/action/package/ + git checkout testing + TEST_VERSION=$(python setup.py --version) + echo "TEST_VERSION=${TEST_VERSION}" >> $GITHUB_ENV + PREVIOUS_VERSION=$(python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --version "$TEST_VERSION" --latest) + NEXT_VERSION=$(python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --version "$PREVIOUS_VERSION" --next --type alpha) + RELEASE_CYCLE_START=$( python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --version "$TEST_VERSION" --latest --type patch) + echo "RELEASE_CYCLE_START=${RELEASE_CYCLE_START}" >> $GITHUB_ENV + echo "PREVIOUS_VERSION=${PREVIOUS_VERSION}" >> $GITHUB_ENV + echo "NEXT_VERSION=${NEXT_VERSION}" >> $GITHUB_ENV + git checkout dev + - name: Create Changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.PREVIOUS_VERSION }} + env: + GIT_CLIFF_PREPEND: ${{ github.workspace }}/action/package/${{ inputs.changelog_file }} + GIT_CLIFF_TAG: ${{ env.NEXT_VERSION }} + - name: Commit Prerelease Changes (${{ env.PREVIOUS_VERSION }} -> ${{ env.NEXT_VERSION }}) + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: Prerelease Changes ${{ env.NEXT_VERSION }} + repository: action/package/ + # based on the workflow run, set the number of commits to cherry pick + # note: merge commits are filtered + # note: COMMITS=2 (commit/merge commit + version bump) + - name: get commit shas to cherry pick + id: get_commit_sha + run: | + cd ${{ github.workspace }}/action/package/ + if [ -f .git/shallow ]; then + git fetch --unshallow + fi + git checkout dev + COMMITS=2 + if [ "${{ needs.autotranslate.outputs.translated }}" == "1" ]; then + COMMITS=$((COMMITS + 1)) + fi + echo "Commit History (max. 10):" + echo "--------------" + git log --pretty=format:"%h %s" -s HEAD --max-count=10 + echo -e "\n--------------" + COMMIT_SHA=$(git log -n "$COMMITS" --pretty=format:"%H" | awk '{a[i++]=$0} END {for (j=i-1; j>=0;) print a[j--] }' | tr '\n' ' ') + echo "Considerring $COMMITS commits to cherry pick: $COMMIT_SHA" + echo "COMMIT_SHA=${COMMIT_SHA}" >> $GITHUB_ENV + - name: Checkout At Tag And Cherry Pick + run: | + cd ${{ github.workspace }}/action/package/ + git config user.name "EggmanBot" + git config user.email "openvoiceos@gmail.com" + git checkout -b temp-branch "tags/$PREVIOUS_VERSION" + IFS=' ' read -ra ADDR <<< "${{ env.COMMIT_SHA }}" + for i in "${ADDR[@]}"; do + PARENTS_COUNT=$(git cat-file -p $i | grep -c parent) + if [ $PARENTS_COUNT -gt 1 ] + then + git cherry-pick -m 1 --allow-empty $i + else + git cherry-pick --allow-empty $i + fi + done + CHERRYPICKED_SHA=$(git rev-parse HEAD) + echo "CHERRYPICKED_SHA=${CHERRYPICKED_SHA}" >> $GITHUB_ENV + - name: Push changes to testing branch + run: | + cd ${{ github.workspace }}/action/package/ + git push --force origin temp-branch:testing + - name: Create Changelog + id: release_changelog + run: | + cd ${{ github.workspace }}/action/package/ + python ${{ github.workspace }}/action/github/scripts/changelog_postprocess.py --since ${{ env.PREVIOUS_VERSION }} + env: + GIT_CLIFF_TAG: ${{ env.NEXT_VERSION }} + - name: (Re)Set and push Release Version + run: | + python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --version ${{ env.TEST_VERSION }} --save ${{ env.VERSION_FILE }} + cd ${{ github.workspace }}/action/package/ + git diff --exit-code || (git commit -am "Increment Version to ${{ env.TEST_VERSION }}" && git push origin temp-branch:testing) + # not globally bumped, only for the release + - name: Bump to next alpha version (for the release) + run: | + python ${{ github.workspace }}/action/github/scripts/semver_release_version.py --version ${{ env.NEXT_VERSION }} --save ${{ env.VERSION_FILE }} + - name: Create Pre-release + id: create_release + uses: ncipollo/release-action@v1 + with: + token: ${{ secrets.GH_PAT }} + tag: ${{ env.NEXT_VERSION }} + name: Release ${{ env.NEXT_VERSION }} + body: | + Changes in this Release + ${{ steps.release_changelog.outputs.changelog }} + commit: ${{ env.CHERRYPICKED_SHA}} + prerelease: true + - name: Build Distribution Packages + run: | + mkdir -p ${{ github.workspace }}/action/dist + cd ${{ github.workspace }}/action/package/ + python setup.py sdist bdist_wheel --dist-dir ${{ github.workspace }}/action/dist + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{secrets.PYPI_TOKEN}} + packages-dir: action/dist/ diff --git a/.github/workflows/skill_test_installation.yml b/.github/workflows/skill_test_installation.yml new file mode 100644 index 000000000..8153f1a42 --- /dev/null +++ b/.github/workflows/skill_test_installation.yml @@ -0,0 +1,87 @@ +name: Skill Installation Tests +on: + workflow_call: + inputs: + runner: + description: 'Runner to use' + type: string + default: ubuntu-latest + branch: + description: 'Branch to use' + type: string + default: ${{ github.ref }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + python_matrix: + description: 'Python versions to test against' + type: string + default: '["3.8", "3.9", "3.10", "3.11"]' + system_deps: + description: 'Additional system dependencies (whitespace delimited) to install' + required: false + type: string + pip_packages: + description: 'Additional Python packages (whitespace delimited) to install' + required: false + type: string + skill_id: + description: 'Skill ID to test' + required: true + type: string + skill_location: + description: 'Relative path to skill' + type: string + default: '' + +jobs: + test_skill_install: + runs-on: ${{inputs.runner}} + timeout-minutes: 5 + strategy: + matrix: + python-version: ${{ fromJson(inputs.python_matrix) }} + steps: + - name: Checkout Skill Repo + uses: actions/checkout@v4 + with: + ref: ${{inputs.branch}} + path: action/skill/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{inputs.action_branch}} + path: action/github/ + - name: Set up python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Install Custom System Dependencies + if: ${{ inputs.system_deps != null }} + run: | + sudo apt install -y ${{ inputs.system_deps }} + - name: upgrade pip + run: | + pip install --upgrade pip + - name: Install Common Python Requirements + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_skill_tests.txt + - name: Install Custom Python Requirements + if: ${{ inputs.pip_packages != null }} + run: | + pip install ${{ inputs.pip_packages }} + - name: Install Python Skill + run: | + cd ${{ github.workspace }}/action/skill + pip install . + - name: Test Skill Installation + run: | + export TEST_SKILL_PATH="${{ github.workspace }}/action/skill/${{inputs.skill_location}}" + export TEST_SKILL_ENTRYPOINT_NAME="${{ inputs.skill_id }}" + pytest ${{ github.workspace }}/action/github/test/test_skill_install.py diff --git a/.github/workflows/skill_test_resources.yml b/.github/workflows/skill_test_resources.yml new file mode 100644 index 000000000..e74a4639b --- /dev/null +++ b/.github/workflows/skill_test_resources.yml @@ -0,0 +1,96 @@ +name: Skill Ressource Tests +on: + workflow_call: + inputs: + runner: + description: 'The runner to use' + type: string + default: "ubuntu-latest" + branch: + description: 'The branch to use' + type: string + default: ${{ github.ref }} + action_branch: + description: 'The shared action branch to checkout' + type: string + default: main + timeout: + type: number + default: 15 + system_deps: + description: 'Additional system dependencies (whitespace delimited) to install' + required: false + type: string + python_matrix: + description: 'Python matrix (string) to use' + type: string + default: '["3.8", "3.9", "3.10", "3.11"]' + pip_packages: + description: 'Additional Python packages (whitespace delimited) to install' + required: false + type: string + intent_testfile: + description: 'Intent file to test' + required: true + type: string + +jobs: + test_resources: + runs-on: ${{ inputs.runner }} + timeout-minutes: ${{ inputs.timeout }} + env: + INTENT_TEST_FILE: ${{ github.workspace }}/action/skill/${{ inputs.intent_testfile }} + strategy: + matrix: + python-version: ${{ fromJSON(inputs.python_matrix) }} + steps: + - name: Checkout Skill Repo + uses: actions/checkout@v4 + with: + ref: ${{inputs.branch}} + path: action/skill/ + - name: Checkout Scripts Repo + uses: actions/checkout@v4 + with: + repository: OpenVoiceOS/.github + ref: ${{inputs.action_branch}} + path: action/github/ + - name: Set up python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Common System Dependencies + run: | + sudo apt update + xargs sudo apt install -y < ${{ github.workspace }}/action/github/requirements/sys_deb_common_deps.txt + - name: Install Custom System Dependencies + if: ${{ inputs.system_deps != null }} + run: | + sudo apt install -y ${{ inputs.system_deps }} + - name: upgrade pip + run: | + pip install --upgrade pip + - name: Install Common Python Requirements + run: | + pip install -r ${{ github.workspace }}/action/github/requirements/pip_skill_tests.txt + - name: Install Custom Python Requirements + if: ${{ inputs.pip_packages != null }} + run: | + pip install ${{ inputs.pip_packages }} + - name: Install Python Skill + run: | + cd ${{ github.workspace }}/action/skill + pip install . + PKG_NAME=$(python setup.py --name) + BASE_FOLDER=$(echo $PKG_NAME | tr '-' '_') + PKG_FOLDER=$(pip show -f $PKG_NAME | grep Location | cut -d ' ' -f 2) + echo "TEST_SKILL_PKG_FOLDER=$PKG_FOLDER/$BASE_FOLDER" >> $GITHUB_ENV + - name: Update Resource Testfile + run: | + python action/github/scripts/update_intent_testfile.py + - name: Test Skill Resources + run: | + export INTENT_ENGINE="padatious" + echo "Intent Test File: ${{ env.INTENT_TEST_FILE }}" + echo "Skill Folder: ${{ env.TEST_SKILL_PKG_FOLDER }}" + pytest action/github/test/test_skill_resourcesV2.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..92961755c --- /dev/null +++ b/.gitignore @@ -0,0 +1,166 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + +# VS Code +# This ignores the .vscode/ to be ignored in the case where it's in the root of your repository +# which is usually not the case when it's inside .config/ which should be added to your .gitignore +# as well +.vscode/ \ No newline at end of file diff --git a/README.md b/README.md index d89e6d19c..c9417f6b4 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,379 @@ +## Conventional Commits +We _slowly_ adopt [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) to our repositories. +Goal is to have a consistent commit message format that can be used to + - _streamline cd/ci_ or simply + - _help understand commits better_ for every party involved. + +The usage is not mandatory atm, but we encourage to use it. (maintainers will adjust the commit messages if necessary) + +------------ +_Types_: + +Using below prefixes (eg. `fix: ...`) will automate the versioning and labelling of the pull requests. + - `fix`: patches a bug in your codebase. This correlates with _**PATCH**_ in Semantic Versioning. + - `feat`: introduces a new feature to the codebase. This correlates with _**MINOR**_ in Semantic Versioning. + - BREAKING CHANGE: A commit that has a _footer_ `BREAKING CHANGE:`, or _appends a `!`_ after the type/scope, introduces a breaking API change. This correlates with _**MAJOR**_ in Semantic Versioning. A BREAKING CHANGE can be part of commits of any type. + + Example (usage of _!_ and _footer_): + ``` + chore!: drop support for Node 6 + + BREAKING CHANGE: use JavaScript features not available in Node 6. + ``` + Other _types_ that create a alpha release - if not breaking: + - `build`: Changes that affect the build system or external dependencies. + - `chore`: Changes which don’t change source code or tests e.g. changes to the build process, auxiliary tools, libraries. + - `perf`: A code change that improves performance. + - `refactor`: A code change that neither fixes a bug nor adds a feature. + - `revert`: Revert something. + + _Types_ that don't get a release: + - `ci`: Changes to CI configuration files and scripts. + - `style`: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc). + - `test`: Adding missing tests or correcting existing tests. + - `docs`: Documentation only changes. + +--------------- + _Scopes_: + (context of the change; eg. `fix(parser): ...`) + + - `config`: Changes the configuration + - `parser`: Changes to the intent parser code, + - ... + - `cli`: Changes/additions to the command line interfaces, + - `gui`: Changes/additions to the graphical user interface, + - `audio`: Changes to the audio handling, + - `tts`: Changes to the text-to-speech handling, + - `sst`: Changes to the speech-to-text handling, + - `nlp`: Changes to the natural language processing, + - `plugins`: Changes to the plugin system, + - `localisation`: Changes to the localisation files, + - `resources`: Changes to the resource files, + - `release`: Everything related to the release process + + This is an excerpt [list of predefined scopes](https://github.com/OpenVoiceOS/.github/blob/feat/shared_actions1/pccc.toml). This list is not exclusive, but the main system components. + If you have to be more specific, feel free to use submodule names or more detailed scopes. + +-------------- +## Premises + - The `main` branch is the stable branch. + - The `dev` branch is the development branch. + - The `testing` branch is a persistent branch for testing purposes. +--------- + - Pushed or merged commits without a proper title/commit message (Conventional Commit spec) will get no release. + - Translations are supposed to be prefixed as `fix` or `feat` to get a stable release. + - General rule: PRs/commits are _dev only_, other branches will be protected. + - PRs/commits that directly address issues with a **release** need a prefix/scope `release():`. _Those_ will be part of the patch/minor/major release. + - remember: `ci`/`style`/`test`/`docs` commits don't get a release. + - The version of the `testing` branch is to be held steady. + - A fix for later releases has to be commited as usual with `fix: ...`. Those PRs get a warning label "test ongoing". Usually it`s best to include them into the ongoing release/test. + - All pending PRs get a daily check (00:00 UTC) and labeling is adjusted accordingly. + - The PR title of release PRs that conclude the testing phase (_Proposals_) mustn't be edited + - If those proposals need additional changes, the PR is to be closed until the changes are commited (to dev). + - Release-tags: The tag resembles the semantic versioning (eg. `1.2.3`; no prefix!, alphas will be formatted appropriately) + + TODO (per repo): + - `setup.py`: setuptools cant cope with semver compliance: `x.x.x-alpha...` stays `x.x.xax` for now + - add `main` branch protection rules + +------------ +## Workflows/Actions +**You can also find the implementation of the workflows in the [`skill-template-repo`](https://github.com/OpenVoiceOS/skill-template-repo)** + +## Release Handling (alpha/patch/minor/major versions) +_Alpha releases are directly published without going through a test phase_ + +Strategy: 3-staged + - _Manually_ propose a testing start or _automatically_ with setting "Conventional Commits" + - _Manually_ conclude testing phase, propose a stable release (PR) + - _Automatically_ publishing a stable release (on merge) + +**Start release mechanism** + +```yaml +name: Start release mechanism +on: + workflow_dispatch: + inputs: + release_type: + type: choice + options: + - "alpha" + - "patch" + - "minor" + - "major" + # Make SURE that sqashed PRs do have the PRs title as commit message !!!!!!!! + push: + branches: + - dev + paths-ignore: + - 'ovos_testpkg/version.py' + - 'test/**' + - 'examples/**' + - '.github/**' + - '.gitignore' + - 'CHANGELOG.md' + - 'MANIFEST.in' + - 'scripts/**' + +jobs: + start_semver_release_mechanism: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/release_semver_start.yml@feat/shared_actions1 + with: + branch: dev # Branch to use, default: branch that triggered the action + action_branch: feat/shared_actions1 # Shared action branch to use, default: main + python_version: "3.10" # the python version to use + version_file: "ovos_testpkg/version.py" # the file containing the version number + locale_folder: ovos_testpkg/locale # the location of the base localisation folder + update_intentfile: test/unittests/test_intent.yaml # the intent file resources gets tested against + release_type: ${{ inputs.release_type || null }} # if manually triggered, set a release type + subject: ${{ github.event.head_commit.message || null }} # on push, the commit message is used as release subject +``` +**Conclude testing phase** + +_After the testing phase, a PR is opened to propose the stable release_ +```yaml +name: Conclude testing phase +on: + workflow_dispatch: + +jobs: + pull_to_master: + uses: openvoiceos/.github/.github/workflows/release_semver_pull_master.yml@feat/shared_actions1 + secrets: inherit + with: + action_branch: shared_actions1 # Shared action branch to use, default: main + python_version: "3.10" # the python version to use +``` +**Publishing stable release** + +```yaml +name: Publish Stable Release + +on: + pull_request: + types: [ closed ] + branches: + - master + +jobs: + publish_stable_release: + if: > + github.event.pull_request.merged == true && + github.actor != 'EggmanBot' && + (contains(github.event.pull_request.title, 'patch release stable') || + contains(github.event.pull_request.title, 'minor release stable') || + contains(github.event.pull_request.title, 'major release stable')) + uses: openvoiceos/.github/.github/workflows/release_semver_publish.yml@feat/shared_actions1 + secrets: inherit + with: + action_branch: feat/shared_actions1 + python_version: "3.10" + subject: ${{ github.event.pull_request.title }} +``` +----------------- + +## Propose translatios +Introduce a new language localisation by proposing a translation via pull request. (creating new branch staging/translation_xx-xx) +```yaml +name: Propose Translations +on: + workflow_dispatch: + inputs: # multiple ways to set this up + translation: # predefined list of languages* + type: choice + options: + - "de-de" + ... + - "zh-cn" + # or + translations: # wait for dispatcher input for a langcode (xx-xx) + type: string + required: true + +jobs: + propose_translation: + uses: openvoiceos/.github/.github/workflows/propose_translation.yml@main + secrets: inherit + with: + branch: dev # Branch to use, default: branch that triggered the action + python_version: "3.8" + language: ${{ inputs.translation }} + locale_folder: ovos_core/locale/ # the location of the base localisation folder, default: locale + reviewers: "jarbasai,emphasize" # comma separated list of reviewers, default: emphasize +``` +* [available languages with deepl](https://support.deepl.com/hc/en-us/articles/360019925219-Languages-included-in-DeepL-Pro) + +## License testing +Tests validity of licenses of all packages (explicit and transitive). +(Note:) +```yaml +name: License testing +on: + + +jobs: + license_tests: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/license_tests.yml@main + with: + runner: ubuntu-latest # Runner to use, default: ubuntu-latest + branch: dev # Branch to use, default: branch that triggered the action + system_deps: "somepkg" # System dependencies (whitespace delimited) to install + pip_packages: "random-pkg" # Python packages (whitespace delimited) to install + python_version: "3.8" # Python version (quoted) to use, default: 3.8 + install_extras: test # Optional extras to install the python package with + packages-exclude: '^(fann2|tqdm|bs4).*' # Custom regex to exclude packages from the license check + # default: '^(precise-runner|fann2|tqdm|bs4|nvidia|bitstruct).*' + licenses-exclude: ^(BSD-3).*$' # Custom regex to exclude licenses from the license check + # default: '^(Mozilla|NeonAI License v1.0).*$' +``` +## Build testing +Tests the build of the python package. +```yaml +name: Build testing +on: + + +jobs: + build_tests: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/python_build_tests.yml@main + with: + runner: ubuntu-latest # Runner to use, default: ubuntu-latest + branch: dev # Branch to use, default: branch that triggered the action + system_deps: "libfann-dev libfann2" # System dependencies (whitespace delimited) to install + pip_packages: "pytest pytest-cov" # Additional python packages (whitespace delimited) to install + # commonly installed: `build wheel` + python_matrix: '["3.8", "3.9", "3.10"]' # Python version matrix to use, default: '["3.8", "3.9", "3.10", "3.11"]' + test_manifest: true # if to test with MANIFEST.in, default: false + manifest_ignored: "test/** qt5/**" # Files to ignore in MANIFEST.in, default: "test/**" + test_relative_paths: false # if to test with relative paths, default: true + test_pipaudit: true # if to test with pip-audit, default: false + pipaudit_ignored: "" # Vulnerabilities to ignore in pip-audit, + # default: "GHSA-r9hx-vwmv-q579 PYSEC-2022-43012" +``` +## Unit Tests (file or directory) +```yaml +name: Unit Tests +on: + + +jobs: + unit_tests: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/pytest_file_or_dir.yml@main + with: + runner: ubuntu-latest + branch: dev # Branch to use, default: branch that triggered the action + action_branch: custom/branch # Shared action branch to use, default: main + timeout_minutes: 15 # Timeout in minutes for the job, default: 15 + system_deps: "libfann-dev libfann2" # (*) Additional system dependencies to install before running the license check + python_matrix: '["3.8", "3.9", "3.10"]' # Python version matrix to use, default: '["3.8", "3.9", "3.10", "3.11"]' + pip_packages: "pytest pytest-cov" # (**) Additional python packages (whitespace delimited) to install + pip_install_dirs: | # Additional directories to install python packages from + relpath/to/package1 + relpath/to/package2 + install_extras: lgpl,mycroft # Comma-separated extras to install the python package with + test_location: test/unittests # Test file (or directory) to run, default: test/unittests + is_skill: true # Whether this is an ovos skill, default: false + codecov: true # Whether to record the test code coverage, default: true + # below (append/upload_coverage) can be omitted if codecov is false + + upload_coverage: true # Whether to upload the coverage to codecov server, default: false + # should upload only if there are no following jobs that need coverage + + # showcase with multiple jobs that should add to the coverage test + next_test_that_needs_coverage: + if: github.actor != 'EggmanBot' + needs: unit_tests + uses: openvoiceos/.github/.github/workflows/pytest_file_or_dir.yml@main + with: + ... + append_coverage: true # Whether to append coverage to the previous job, default: false + # the artifact will be downloaded, appended and uploaded again + ... + and_another_test_that_needs_coverage: + needs: [ unit_tests, next_test_that_needs_coverage ] + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/pytest_file_or_dir.yml@main + with: + ... + append_coverage: true + upload_coverage: true # Whether to upload the coverage to codecov server + ... +``` +(*) [Common system dependencies](https://github.com/OpenVoiceOS/.github/requirements/sys_deb_common_deps.txt) +(**) Common python dependencies: [skill](https://github.com/OpenVoiceOS/.github/requirements/pip_skill_tests.txt) / [other](https://github.com/OpenVoiceOS/.github/requirements/pip_tests.txt) +## Skills +### Skill Installation Tests +```yaml +name: Skill Installation Tests +on: + + +jobs: + skill_installation_tests: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/skill_test_installation.yml@main + with: + runner: ubuntu-latest + branch: dev # Branch to use, default: branch that triggered the action + action_branch: custom/branch # Shared action branch to use, default: main + system_deps: "libfann-dev libfann2" # (*) Additional system dependencies (whitespace delimited) to install + python_matrix: '["3.8", "3.9", "3.10"]' # Python version matrix to use, default: '["3.8", "3.9", "3.10", "3.11"]' + pip_packages: "pytest pytest-cov" # (**) Additional python packages (whitespace delimited) to install' + skill_id: "ovos-skill-x.openvoiceos" # Skill id of the testskill, required + skill_location: "skill" # Skill location relative to the root (can usually be omitted, used if the skill is not located in the base folder) +``` +(*) [Common system dependencies](https://github.com/OpenVoiceOS/.github/requirements/sys_deb_common_deps.txt) +(**) [Common python dependencies](https://github.com/OpenVoiceOS/.github/requirements/pip_skill_tests.txt) +### Skill Resource Tests +Tests the resources of a skill (e.g dialogs, vocabs, regex or intent resources) for completeness and workability. +```yaml +name: Skill Ressource Tests +on: + + +jobs: + skill_resource_tests: + if: github.actor != 'EggmanBot' + uses: openvoiceos/.github/.github/workflows/skill_test_resources.yml@main + with: + runner: ubuntu-latest # Runner to use, default: ubuntu-latest + timeout: 15 # Timeout for the test, default: 15 + branch: dev # Branch to use, default: branch that triggered the action + action_branch: custom/branch # Shared action branch to use, default: main + system_deps: "libfann-dev libfann2" # (*) Additional system dependencies (whitespace delimited) to install + python_matrix: '["3.8", "3.9", "3.10"]' # Python version matrix to use, default: '["3.8", "3.9", "3.10", "3.11"]' + pip_packages: "pytest pytest-cov" # (**) Additional python packages (whitespace delimited) to install + intent_testfile: test/test_intents.yaml # Intent test file to test against, required + test_padatious: true # if to test against padatious, default: false + test_padacioso: true # if to test against padacioso, default: true +``` +(*) [Common system dependencies](https://github.com/OpenVoiceOS/.github/requirements/sys_deb_common_deps.txt) +(**) [Common python dependencies](https://github.com/OpenVoiceOS/.github/requirements/pip_skill_tests.txt) +## Notifications +### Notify Matrix on Pull Request +```yaml +name: Notify Matrix Chat + +on: + pull_request: + types: [ closed ] + +jobs: + notify_pr_matrix: + if: github.event.pull_request.merged == true + secrets: inherit + uses: openvoiceos/.github/.github/workflows/notify_pr_matrix.yml@main + with: + pr_id: ${{ github.event.number }} + subject: ${{ github.event.pull_request.title }} +``` +------------- + ![OpenVoiceOS](https://openvoiceos.com/wp-content/uploads/2020/10/loading400.png) # OpenVoiceOS A community powered Linux distribution, purpose-built with buildroot to showcase the power of Open Source Voice AI for a range of devices. @@ -13,4 +389,4 @@ At this moment development is in very early stages and focussed on the Raspberry * [Forum thread @ Mycroft A.I.](https://community.mycroft.ai/t/openvoiceos-a-bare-minimal-production-type-of-os-based-on-buildroot/4708) * [Matrix Chat rooms](https://matrix.to/#/!XFpdtmgyCoPDxOMPpH:matrix.org?via=matrix.org) -Visit [openvoiceos.org](https://openvoiceos.org) to learn more! +Visit [openvoiceos.org](https://openvoiceos.org) to learn more! \ No newline at end of file diff --git a/cliff.toml b/cliff.toml new file mode 100644 index 000000000..1db620d6d --- /dev/null +++ b/cliff.toml @@ -0,0 +1,92 @@ +# git-cliff ~ default configuration file +# https://git-cliff.org/docs/configuration +# +# Lines starting with "#" are comments. +# Configuration options are organized into tables and keys. +# See documentation for more information on available options. + +[changelog] +# changelog header +header = """ +# Changelog\n +All notable changes to this project will be documented in this file.\n +""" +# template for the changelog body +# https://keats.github.io/tera/docs/#introduction +body = """ +{% if version %}\ + ## [{{ version | trim_start_matches(pat="v") }}](https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/tree/{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }} + ### [Latest Changes](https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/compare/{{ previous.version }}...{{ version }}) + ### [Release Cycle Changelog](https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/compare/...{{ version }}) +{% else %}\ + ## [unreleased] +{% endif %}\ +{% for group, commits in commits | group_by(attribute="group") %} + ### {{ group | striptags | trim | upper_first }} + {% for commit in commits %} + - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ + {% if commit.breaking %}[**breaking**] {% endif %}\ + {{ commit.message | upper_first }}\ + {% endfor %} +{% endfor %}\n +""" +# template for the changelog footer +footer = """ + +""" +# remove the leading and trailing s +trim = true +# postprocessors +postprocessors = [ + { pattern = '', replace_command = 'sed "s##$GITHUB_REPO#"' }, + { pattern = '', replace_command = 'if [ -z "$RELEASE_CYCLE_START" ]; then sed "//d"; else sed "s##$RELEASE_CYCLE_START#"; fi' }, +] + +[git] +# parse the commits based on https://www.conventionalcommits.org +conventional_commits = true +# filter out the commits that are not conventional +filter_unconventional = true +# process each line of a commit as an individual commit +split_commits = false +# regex for preprocessing the commit messages +commit_preprocessors = [ + # Replace issue numbers + { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](https://github.com//pull/${2}))"}, + # Check spelling of the commit with https://github.com/crate-ci/typos + # If the spelling is incorrect, it will be automatically fixed. + #{ pattern = '.*', replace_command = 'typos --write-changes -' }, +] +# regex for parsing and grouping commits +commit_parsers = [ + { message = "^feat", group = "🚀 Features" }, + { message = "^fix|^release", group = "🐛 Bug Fixes" }, + { message = "^doc", group = "📚 Documentation" }, + { message = "^perf", group = "⚡ Performance" }, + { message = "^refactor", group = "🚜 Refactor" }, + { message = "^style", group = "🎨 Styling" }, + { message = "^test", group = "🧪 Testing" }, + { message = "^chore\\(release\\): prepare for", skip = true }, + { message = "^chore\\(deps.*\\)", skip = true }, + { message = "^chore\\(pr\\)", skip = true }, + { message = "^chore\\(pull\\)", skip = true }, + { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, + { body = ".*security", group = "🛡️ Security" }, + { message = "^revert", group = "◀️ Revert" }, +] +# protect breaking changes from being skipped due to matching a skipping commit_parser +protect_breaking_commits = false +# filter out the commits that are not matched by commit parsers +filter_commits = false +# regex for matching git tags +# tag_pattern = "v[0-9].*" +# regex for skipping tags +# skip_tags = "" +# regex for ignoring tags +# ignore_tags = "" +# sort the tags topologically +topo_order = false +# sort the commits inside sections by oldest/newest order +sort_commits = "oldest" +# limit the number of commits included in the changelog. +# limit_commits = 42 diff --git a/pccc.toml b/pccc.toml new file mode 100644 index 000000000..ac8a405ba --- /dev/null +++ b/pccc.toml @@ -0,0 +1,58 @@ +# configuration keys for the Python Conventional Commit Checker +[pccc] + +header_length = 50 +body_length = 72 +wrap = true +force_wrap = true +spell_check = false +repair = false +ignore_generated_commits = true + +generated_commits = [ + '''^\(tag:\s+v\d+\.\d+\.\d\)\s+\d+\.\d+\.\d+$''', + '''^Merge branch 'master'.*$''', +] + +# chore -> alpha +# fix -> patch +# feat -> minor +# in combination with "!" ("feat!:") or BREAKING CHANGE (in body/message) -> major + +types = [ + "ci", + "docs", + "style", + "test", + "release", + "chore", + "build", + "feat", + "fix", + "perf", + "refactor", + "revert" +] + +scopes = [ + "config", + "parser", + "tooling", + "cli", + "gui", + "core", + "audio", + "tts", + "stt", + "nlp", + "plugins", + "localisation", + "resources", + "release", + "testing" +] + +footers = [ + "github-closes", + "signed-off-by", +] \ No newline at end of file diff --git a/pr_labels.json b/pr_labels.json new file mode 100644 index 000000000..4f8924af3 --- /dev/null +++ b/pr_labels.json @@ -0,0 +1,29 @@ +{ + "feat": "feature", + "fix": "fix", + "docs": "documentation", + "test": "unittests", + "ci": "CI/CD", + "refactor": "refactor", + "perf": "performance", + "chore": "housekeeping", + "revert": "revert", + "wip": "WIP", + "style": "codestyle", + "config": "config", + "parser": "parser", + "tooling": "tooling", + "cli": "CLI", + "gui": "GUI", + "core": "core", + "audio": "audio", + "tts": "TTS", + "stt": "STT", + "nlp": "NLP", + "plugins": "plugins", + "localisation": "localisation", + "resources": "resources", + "release": "release", + "breaking": "breaking change", + "need_cc": "CC missing" +} \ No newline at end of file diff --git a/requirements/pip_base.txt b/requirements/pip_base.txt new file mode 100644 index 000000000..5fee76d95 --- /dev/null +++ b/requirements/pip_base.txt @@ -0,0 +1,4 @@ +PyGithub +semver +pccc +git-cliff diff --git a/requirements/pip_build_tools.txt b/requirements/pip_build_tools.txt new file mode 100644 index 000000000..97123c753 --- /dev/null +++ b/requirements/pip_build_tools.txt @@ -0,0 +1,2 @@ +build +wheel diff --git a/requirements/pip_skill_tests.txt b/requirements/pip_skill_tests.txt new file mode 100644 index 000000000..cd1f630a8 --- /dev/null +++ b/requirements/pip_skill_tests.txt @@ -0,0 +1,13 @@ +coveralls +pytest +pytest-cov +mock +wheel +cython +ovos-core>=0.0.8a66 +ovos-workshop>=0.0.16a14 + +ovos-phal-plugin-connectivity-events~=0.0.1 +padatious>=0.4.8, < 0.5.0 +fann2>=1.0.7, < 1.1.0 +padaos>=0.1, < 0.2 diff --git a/requirements/pip_tests.txt b/requirements/pip_tests.txt new file mode 100644 index 000000000..660c5e277 --- /dev/null +++ b/requirements/pip_tests.txt @@ -0,0 +1,6 @@ +coveralls +pytest +pytest-cov +mock +wheel +cython diff --git a/requirements/pip_translation.txt b/requirements/pip_translation.txt new file mode 100644 index 000000000..21196deef --- /dev/null +++ b/requirements/pip_translation.txt @@ -0,0 +1,2 @@ +ovos-translate-plugin-deepl +ovos-utils \ No newline at end of file diff --git a/requirements/sys_deb_common_deps.txt b/requirements/sys_deb_common_deps.txt new file mode 100644 index 000000000..5ee2b78c8 --- /dev/null +++ b/requirements/sys_deb_common_deps.txt @@ -0,0 +1,11 @@ +gcc +python3-dev +swig +libssl-dev +libfann-dev +portaudio19-dev +libpulse-dev +libicu-dev +pkg-config +libenchant-2-2 +gh diff --git a/requirements/version.py b/requirements/version.py new file mode 100644 index 000000000..1b5a3e6c8 --- /dev/null +++ b/requirements/version.py @@ -0,0 +1,8 @@ +# version file used in all ovos python packages +# this is the first version in the release cycle +# START_VERSION_BLOCK +VERSION_MAJOR = 0 +VERSION_MINOR = 1 +VERSION_BUILD = 0 +VERSION_ALPHA = 0 +# END_VERSION_BLOCK \ No newline at end of file diff --git a/scripts/changelog_postprocess.py b/scripts/changelog_postprocess.py new file mode 100644 index 000000000..ebdb4e877 --- /dev/null +++ b/scripts/changelog_postprocess.py @@ -0,0 +1,157 @@ +from os import environ, getcwd, urandom +from os.path import join, isfile +import sys +import subprocess +import json +import re +import argparse +import base64 + + +parser = argparse.ArgumentParser() +parser.add_argument("--context", "-c", help="Path to the changelog context file") + +release_group = parser.add_mutually_exclusive_group() +release_group.add_argument("--items", "-i", choices=["unreleased", "latest", "current"], help="Items to include in the changelog", default="full") +release_group.add_argument("--since", "-s", help="Include items since a specific version") + +args = parser.parse_args() + +if args.since == "0.0.0": + args.since = None + +PULL_LINK_PATTERN = r' \(\[#\d+\]\(https:\/\/github\.com\/.+?\/pull\/\d+\)\)' +CLIFF_WORKDIR = environ.get("GIT_CLIFF_WORKDIR", getcwd()) +CLIFF_IGNORE_FILE = join(CLIFF_WORKDIR, ".cliffignore") +GIT_CLIFF_OUTPUT = environ.get("GIT_CLIFF_OUTPUT") +if GIT_CLIFF_OUTPUT: + del environ["GIT_CLIFF_OUTPUT"] +GIT_CLIFF_PREPEND = environ.get("GIT_CLIFF_PREPEND") +if GIT_CLIFF_PREPEND: + del environ["GIT_CLIFF_PREPEND"] + +GITHUB_ACTION_OUTPUT = environ.get("GITHUB_OUTPUT") +if GIT_CLIFF_OUTPUT or GIT_CLIFF_PREPEND: + GITHUB_ACTION_OUTPUT = None + + +def escape_control_characters(s): + return re.sub(r'[\x00-\x1f\x7f-\x9f]', lambda c: "\\u{0:04x}".format(ord(c.group())), s) + + +def strip_pull_request_links(text): + return re.sub(PULL_LINK_PATTERN, '', text).strip() + + +def in_git_repo(): + try: + subprocess.check_output(['git', '-C', CLIFF_WORKDIR, 'rev-parse']) + return True + except subprocess.CalledProcessError: + return False + + +def is_tag(tag): + try: + subprocess.check_output(['git', '-C', CLIFF_WORKDIR, 'rev-parse', tag]) + return True + except subprocess.CalledProcessError: + return False + + +def valid_json(s): + try: + json.loads(escape_control_characters(s)) + return True + except json.JSONDecodeError: + return False + + +def run_cliff(get_context = False): + command = ["git", "cliff"] + mute = False + + if args.items == "unreleased": + command.append("--unreleased") + elif args.items == "latest": + command.append("--latest") + elif args.items == "current": + command.append("--current") + elif args.since: + last_commit = subprocess.check_output(["git", "-C", CLIFF_WORKDIR, "log", "-1", "--pretty=format:%h"]).decode().strip() + command.append(f"{args.since}..{last_commit}") + + if get_context: + command.append("--context") + mute = True + elif GIT_CLIFF_OUTPUT: + command.append("--output") + command.append(GIT_CLIFF_OUTPUT) + elif GIT_CLIFF_PREPEND: + command.append("--prepend") + command.append(GIT_CLIFF_PREPEND) + + process = subprocess.Popen(command, env=environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + # interact with the subprocess's standard output and error streams + stdout, stderr = process.communicate() + + if not mute: + if stderr.strip(): + output = stderr.decode() + else: + output = stdout.decode() + + if GITHUB_ACTION_OUTPUT: + delimiter = base64.b64encode(urandom(15)).decode('utf-8') + with open(GITHUB_ACTION_OUTPUT, 'a') as f: + f.write(f'changelog<<{delimiter}\n') + f.write(f'{output}\n') + f.write(f'{delimiter}\n') + else: + print(output) + + return stdout.decode().strip() + +if not args.context or not isfile(args.context): + CONTEXT = run_cliff(get_context=True) +else: + with open(args.context, 'r') as f: + CONTEXT = f.read() + +if not valid_json(CONTEXT): + raise Exception("You need to provide a valid changelog context (json)") +if not in_git_repo(): + raise Exception("You have to run this script in a git repository or provide a proper `GIT_CLIFF_WORKDIR` environment variable.") +elif args.since and not is_tag(args.since): + raise Exception(f"The tag provided {args.since} doesn't exist.") +else: + # empty the file + with open(CLIFF_IGNORE_FILE, 'w') as f: + f.write("") + +escaped_json_string = escape_control_characters(CONTEXT) +changelog_context = json.loads(escaped_json_string) + +for entry in changelog_context: + last_commit = None + for commit in entry.get('commits', []): + + message = commit['message'] + if not (last_commit and re.search(PULL_LINK_PATTERN, message)): + last_commit = commit + continue + + stripped_message = strip_pull_request_links(message) + if stripped_message == last_commit['message'] and \ + commit.get('scope') == last_commit.get('scope'): + # add to ignored commits (as the merge commit will be part of the changelog) + with open(CLIFF_IGNORE_FILE, 'a') as f: + f.write(f"{last_commit['id']}\n") + last_commit = commit + + +run_cliff() + +# delete the ignore file +subprocess.run(["rm", "-f", CLIFF_IGNORE_FILE]) diff --git a/scripts/check_PRs.py b/scripts/check_PRs.py new file mode 100644 index 000000000..74a6ac9c1 --- /dev/null +++ b/scripts/check_PRs.py @@ -0,0 +1,144 @@ +import os +from os.path import join, dirname, isfile +import json +import re +from typing import List, Optional + +from github import Github +from github.Repository import Repository +import pccc + + +CONFIG_FILE = os.environ.get("PCCC_CONFIG_FILE") +if CONFIG_FILE and not isfile(CONFIG_FILE): + print(f"Config file {CONFIG_FILE} not found.") + exit(1) + +TOKEN = os.getenv('GH_PAT') or os.getenv('GITHUB_TOKEN') +REPOSITORY = os.getenv('GITHUB_REPOSITORY') +PR_LABELS: dict = json.loads(os.getenv('PR_LABELS', '{}')) +SINGLE_PR = os.getenv('PR_NUMBER') +ERROR_ON_FAILURE = os.getenv('ERROR_ON_FAILURE', 'false').lower() == 'true' +if not PR_LABELS: + PR_LABELS = json.loads(open(join(dirname(dirname(__file__)), "pr_labels.json")).read()) + +test_phase_cache = os.getenv('TEST_PHASE_CACHE', '') +print(f"TEST_PHASE_CACHE: {test_phase_cache}") +if not isfile(test_phase_cache): + ongoing_test = False + if test_phase_cache: + print("The file specified in TEST_PHASE_FILE does not exist.") +else: + with open(test_phase_cache, 'r') as f: + content = f.read().strip("\n").strip() + print(f"file content: {content}, eq: {content == 'testing'}") + ongoing_test = content == "testing" + + +def get_scope(title): + match = re.match(r"^[a-z]+\s*\((.+)\):", title) + if match: + return match.group(1) + return None + + +def strip_scope(title): + return re.sub(r"^([a-z]+)\s*\(([^)]+)\):", r"\1:", title) + + +def cc_type(desc: str) -> str: + ccr = parse_cc(strip_scope(desc)) + if ccr: + return ccr.header.get("type") + + return "unknown" + + +def cc_breaking(desc: str) -> bool: + ccr = parse_cc(strip_scope(desc)) + if ccr: + return ccr.breaking.get("flag") or ccr.breaking.get("token") + + return False + + +def cc_scope(desc: str) -> str: + ccr = parse_cc(desc) + if ccr: + return ccr.header.get("scope") + + return get_scope(desc) or "unknown" + + +def parse_cc(desc: str) -> Optional[pccc.ConventionalCommitRunner]: + ccr = pccc.ConventionalCommitRunner() + ccr.options.load((f"-o{CONFIG_FILE}",) if CONFIG_FILE else None) + ccr.raw = desc + ccr.clean() + try: + ccr.parse() + return ccr + # no spec compliant format + except Exception: + return None + + +def check_cc_labels(desc: str) -> List[str]: + + labels = set() + _type = cc_type(desc) + _scope = cc_scope(desc) + test_relevant_cc = ["feat", "fix", "refactor"] + if _type == "unknown": + return [PR_LABELS.get("need_cc", "CC missing")] + if cc_breaking(desc): + labels.add(PR_LABELS.get("breaking", "breaking change")) + if _type == "release": + labels.add("fix") + elif _type in PR_LABELS: + labels.add(PR_LABELS.get(_type)) + if _scope in PR_LABELS: + labels.add(PR_LABELS.get(_scope)) + elif _scope != "unknown": + labels.add(_scope) + if ongoing_test and (any(t in test_relevant_cc for t in [_type, _scope]) or cc_breaking(desc)): + labels.add("ongoing test") + + return list(labels) + + +def ensure_label_exists(repo: Repository, labels: List[str], color: str = 'ffffff'): + for label_name in labels: + if not any(label.name == label_name for label in repo.get_labels()): + repo.create_label(label_name, color) + + # switch the strings to label objects + for label in repo.get_labels(): + if label.name in labels: + labels[labels.index(label.name)] = label + + +git = Github(TOKEN).get_repo(REPOSITORY) +open_pulls = git.get_pulls(state='open') +cc_missing = False + + +for pr in open_pulls: + if SINGLE_PR and pr.number != int(SINGLE_PR): + continue + pr_description = f"{pr.title}\n{pr.body}" + labels = check_cc_labels(pr_description) + ensure_label_exists(git, labels) + pr.set_labels(*labels) + + # clear the test flag if the PR adresses a release or doesn't get a release at all. + if SINGLE_PR: + if cc_type(pr_description) in ["release", "ci", "style", "test", "docs"] or \ + cc_scope(pr_description) == "release": + ongoing_test = False + if cc_type(pr_description) == "unknown": + cc_missing = True + +# nuke status check (if requested) +if (cc_missing or ongoing_test) and ERROR_ON_FAILURE: + raise Exception(f"CC missing: {cc_missing}, ongoing test phase: {ongoing_test}") diff --git a/scripts/check_supported_py_versions.py b/scripts/check_supported_py_versions.py new file mode 100644 index 000000000..769d6b8f8 --- /dev/null +++ b/scripts/check_supported_py_versions.py @@ -0,0 +1,33 @@ +import json +from datetime import datetime + +# Manually define the non-EOL Python versions and their EOL dates +# Update this as new releases come out or old versions reach EOL +python_versions = { + "3.8": "2024-10-01", + "3.9": "2025-10-01", + "3.10": "2026-10-01", + "3.11": "2027-10-01", + #"3.12": "2028-10-01" +} + +# Get the current date +current_date = datetime.now().date() + +# Filter out versions that are past their EOL +non_eol_versions = {version: eol_date for version, eol_date in python_versions.items() + if datetime.strptime(eol_date, "%Y-%m-%d").date() > current_date} + +# Prepare the data for the JSON file +data = { + "schemaVersion": 1, + "label": "python", + "message": ", ".join(non_eol_versions.keys()), + "color": "blue" +} + +# Write the data to a JSON file +with open("supported_py_versions.json", "w") as file: + json.dump(data, file) + +print("Updated python-versions.json with non-EOL Python versions.") \ No newline at end of file diff --git a/scripts/parse_semver_release.py b/scripts/parse_semver_release.py new file mode 100644 index 000000000..d38e572cd --- /dev/null +++ b/scripts/parse_semver_release.py @@ -0,0 +1,111 @@ +import re +from os import environ, devnull +from os.path import isfile +import sys + +import pccc +import semver + +""" +translates a conventional commit title/message into a semver version +""" + + +CONFIG_FILE = environ.get("PCCC_CONFIG_FILE") +if CONFIG_FILE and not isfile(CONFIG_FILE): + print(f"Config file {CONFIG_FILE} not found.") + exit(1) + + +def get_scope(title): + match = re.match(r"^[a-z]+\s*\((.+)\):", title) + if match: + return match.group(1) + return None + + +def strip_scope(title): + if get_scope(title) != "release": + return re.sub(r"^([a-z]+)\s*\(([^)]+)\):", r"\1:", title) + return title + + +def get_version(): + # note: this is a PEP 440 compliant version, so alpha versions come in "1.0.0a1" + version = environ.get("VERSION", "") or \ + environ.get("RELEASE_VERSION", "") or \ + environ.get("PREVIOUS_VERSION", "") + match = re.match(r"(\d+\.\d+\.\d+)([aA-zZ].*)", version) + if match: + return f"{match.group(1)}-{match.group(2)}" + else: + return version + + +def semver_from_cc(): + ccr = pccc.ConventionalCommitRunner() + if CONFIG_FILE is None: + # Redirect stdout to null + original_stdout = sys.stdout + sys.stdout = open(devnull, 'w') + + ccr.options.load((f"-o{CONFIG_FILE}",) if CONFIG_FILE else None) + + if CONFIG_FILE is None: + # Restore original stdout + sys.stdout = original_stdout + + ccr.raw = f"{TITLE}\n{BODY}" + ccr.clean() + try: + ccr.parse() + # no spec compliant format + except Exception: + print("No semver release.") + exit(0) + + if ccr.breaking.get("flag") or ccr.breaking.get("token"): + return "major" + # commits that gets added to a release (special case) + elif ccr.header.get("type") == "release" or \ + ccr.header.get("scope") == "release": + return "release" + elif ccr.header.get("type") == "feat": + return "minor" + elif ccr.header.get("type") in ["fix", "refactor"]: + return "patch" + elif ccr.header.get("type") not in ["ci", "docs", "style", "test"]: + return "alpha" + else: + print("No semver release.") + exit(0) + +def semver_from_version(): + try: + version = semver.VersionInfo.parse(VERSION) + except ValueError: + print("No semver release.") + exit(0) + + if version.prerelease: + return "alpha" + elif version.patch != 0: + return "patch" + elif version.minor != 0: + return "minor" + elif version.major != 0: + return "major" + +TITLE = strip_scope(environ.get("TITLE", "")) +BODY = environ.get("BODY") +VERSION = get_version() + +if VERSION: + release = semver_from_version() +elif TITLE: + release = semver_from_cc() +else: + print("No semver release.") + exit(0) + +print(release) \ No newline at end of file diff --git a/scripts/prepare_skillstore.py b/scripts/prepare_skillstore.py new file mode 100644 index 000000000..f289e4f2a --- /dev/null +++ b/scripts/prepare_skillstore.py @@ -0,0 +1,83 @@ +import os +import json +from ovos_utils.bracket_expansion import expand_parentheses, expand_options +from ovos_skills_manager import SkillEntry + + +branch = os.getenv("BRANCH") +if not branch: + raise ValueError("environment variable `BRANCH` not set") +repo = os.getenv("REPO") +if not repo: + raise ValueError("environment variable `REPO` not set") +author = "OpenVoiceOS" + +url = f"https://github.com/{author}/{repo}@{branch}" + +skill = SkillEntry.from_github_url(url) +tmp_skills = "/tmp/osm_installed_skills" +skill_folder = f"{tmp_skills}/{skill.uuid}" + +BASE_FOLDER = os.getenv("BASE_FOLDER") +if not BASE_FOLDER: + raise ValueError("environment variable `LOCALE_FOLDER` not set") +elif not os.path.exists(BASE_FOLDER): + raise ValueError(f"environment variable `LOCALE_FOLDER` is not a folder: {BASE_FOLDER}") + + +desktop_dir = os.path.join(BASE_FOLDER, "res", "desktop") +android_ui = os.path.join(BASE_FOLDER, "ui", "+android") +os.makedirs(desktop_dir, exist_ok=True) + +readme = os.path.join(BASE_FOLDER, "readme.md") +jsonf = os.path.join(desktop_dir, "skill.json") +desktopf = os.path.join(desktop_dir, f"{repo}.desktop") +skill_code = os.path.join(BASE_FOLDER, "__init__.py") + +res_folder = os.path.join(BASE_FOLDER, "locale", "en-us") + + +def read_samples(path): + samples = [] + with open(path) as fi: + for _ in fi.read().split("\n"): + if _ and not _.strip().startswith("#"): + samples += expand_options(_) + return samples + +samples = [] +for root, folders, files in os.walk(res_folder): + for f in files: + if f.endswith(".intent"): + samples += read_samples(os.path.join(root, f)) +skill._data["examples"] = list(set(samples)) + +has_android = os.path.exists(android_ui) +with open(skill_code) as f: + has_homescreen = f"{repo}.{author}.home" in f.read() + +if not os.path.exists(readme): + with open(readme, "w") as f: + f.write(skill.generate_readme()) + +if has_homescreen and not os.path.exists(desktopf): + with open(desktopf, "w") as f: + f.write(skill.desktop_file) + +if not os.path.exists(jsonf): + data = skill.json + with open(jsonf, "w") as f: + if not has_android or not has_homescreen: + data.pop("android") + if not has_homescreen: + data.pop("desktop") + data["desktopFile"] = False +else: + with open(jsonf) as f: + data = json.load(f) + +# set dev branch +data["branch"] = "dev" + +with open(jsonf, "w") as f: + json.dump(data, f, indent=4) diff --git a/scripts/release_skillstore.py b/scripts/release_skillstore.py new file mode 100644 index 000000000..c0a7de151 --- /dev/null +++ b/scripts/release_skillstore.py @@ -0,0 +1,46 @@ +import json +import os + +BASE_FOLDER = os.getenv("BASE_FOLDER") +if not BASE_FOLDER: + raise ValueError("environment variable `LOCALE_FOLDER` not set") +elif not os.path.exists(BASE_FOLDER): + raise ValueError(f"environment variable `LOCALE_FOLDER` is not a folder: {BASE_FOLDER}") +VERSION_FILE = os.getenv("VERSION_FILE") +if not os.path.isfile(VERSION_FILE): + raise ValueError(f"environment variable `VERSION_FILE` is not a file: {VERSION_FILE}") + + +def get_version(): + """ Find the version of the package""" + major, minor, build, alpha = (None, None, None, None) + with open(VERSION_FILE) as f: + for line in f: + if 'VERSION_MAJOR' in line: + major = line.split('=')[1].strip() + elif 'VERSION_MINOR' in line: + minor = line.split('=')[1].strip() + elif 'VERSION_BUILD' in line: + build = line.split('=')[1].strip() + elif 'VERSION_ALPHA' in line: + alpha = line.split('=')[1].strip() + + if ((major and minor and build and alpha) or + '# END_VERSION_BLOCK' in line): + break + version = f"{major}.{minor}.{build}" + if alpha and int(alpha) > 0: + version += f"a{alpha}" + return version + + +desktop_dir = os.path.join(BASE_FOLDER, "res", "desktop") +jsonf = os.path.join(desktop_dir, "skill.json") + +with open(jsonf) as f: + data = json.load(f) + +data["branch"] = "v" + get_version() + +with open(jsonf, "w") as f: + json.dump(data, f, indent=4) diff --git a/scripts/semver_release_version.py b/scripts/semver_release_version.py new file mode 100644 index 000000000..8fabdbfd7 --- /dev/null +++ b/scripts/semver_release_version.py @@ -0,0 +1,386 @@ +from os import getenv +from os.path import isfile +import re +from typing import SupportsInt, Optional, Union, List + +from github import Github +from github.GitRelease import GitRelease +import semver +import argparse + +""" +This script is supposed to parse a github release history and get latest (or first / last / next) +release versions for the specified release type. (patch, minor, major, prerelease) +If no release type is specified, get the latest/.. release version. + +You can make this relative to a specific version by providing a version or file to read the version from. +eg. `... --version 0.2.1 --latest --type alpha` will get the latest alpha release below version 0.2.1 +or `... --file path/to/version.py --next` will get the next release version from the version file. + +Options + cycle # restrict the release type to the current cycle + # (i.e. if the current version is 2.1.2 and `--type patch`, it will only consider 2.1.x releases) + +Args + --type: str # the release type to get the version for + --file: str # the file to read the version from + --version: str # the version to get the release for + + --save: str # writes an ovos version file to the specified path (if the version is above the latest) + if no path is provided and read from a file, the file will be overwritten + --fsave: str # force the ovos version file to be written even if the version is below the latest + + --alpha_marker: str # marker for alpha releases, default is 'a' + +Flags + --last # get the last release of that release type in the current cycle + --next # get the next release of the upcoming release type + --first # get the first release of that release type in the current cycle + --latest # get the latest version released +""" + + +def add_common_arguments(parser): + parser.add_argument("--alpha_marker", default="a") + parser.add_argument("--type", choices=["patch", "minor", "major", "alpha", "prerelease"]) + parser.add_argument("--save", nargs='?', const=True, default=False) + parser.add_argument("--fsave", nargs='?', const=True, default=False) + + release_group = parser.add_mutually_exclusive_group() + release_group.add_argument("--last", action='store_true') + release_group.add_argument("--next", action='store_true') + release_group.add_argument("--latest", action='store_true') + release_group.add_argument("--first", action='store_true') + + input_group = parser.add_mutually_exclusive_group() + #input_group.add_argument("--repo") + input_group.add_argument("--file") + input_group.add_argument("--version") + + +parser = argparse.ArgumentParser() +add_common_arguments(parser) + +subparsers = parser.add_subparsers(dest="command") +cycle_parser = subparsers.add_parser("cycle") +add_common_arguments(cycle_parser) + +args = parser.parse_args() + +RELEASE_TYPE = args.type +ALPHA_MARKER = args.alpha_marker +REPOSITORY = getenv("GITHUB_REPOSITORY") +RESTRICT_TO_CYCLE = args.command == "cycle" + +if REPOSITORY is None and not (args.file or args.version): + parser.error("either set up an environmental variable `GITHUB_REPOSITORY` or pass --version or --file as arguments") + +if RELEASE_TYPE == "alpha": + RELEASE_TYPE = "prerelease" + + +class OVOSReleases(semver.Version): + __history = [] + __github_token = None + __repo = None + __release = None + __prefix = "" + + def __init__(self, major: SupportsInt = 0, + minor: SupportsInt = 0, + patch: SupportsInt = 0, + prerelease: Optional[Union[str, int]] = None, + build: Optional[Union[str, int]] = None, + release: Optional[GitRelease] = None): + self.__release = release + if isinstance(release, GitRelease): + self.__prefix = re.match(r"^([a-zA-Z-\/\\]+)?", release.tag_name).group(1) or "" + ver = self.parse(release.tag_name) + major = ver.major + minor = ver.minor + patch = ver.patch + prerelease = ver.prerelease + build = ver.build + + super().__init__(major, minor, patch, prerelease, build) + + def __str__(self) -> str: + return f"{self.__prefix}{super().__str__()}" + + def next(self, rtype: Optional[str], alpha_marker: str = ALPHA_MARKER)\ + -> "OVOSReleases": + rtype = rtype or "prerelease" + next_v = self.next_version(rtype, alpha_marker) + return OVOSReleases(next_v.major, next_v.minor, next_v.patch, next_v.prerelease, next_v.build) + + def latest(self, rtype: Optional[str] = None) -> "OVOSReleases": + if rtype and not self.history: + raise ValueError("No release history") + + release_versions = self.filter_versions(rtype, RESTRICT_TO_CYCLE) + + latest_version = OVOSReleases(0, 0, 0) + if rtype is None and (not self.history or not release_versions): + latest_version = self + elif release_versions: + latest_version = max(release_versions) + + return latest_version + + def last(self, rtype: Optional[str] = None) -> "OVOSReleases": + if not self.history: + raise ValueError("No release history") + + release_versions = self.filter_versions(rtype, RESTRICT_TO_CYCLE) + + last_version = OVOSReleases(0, 0, 0) + if release_versions: + last_version = release_versions[min(1, len(release_versions) - 1)] + + return last_version + + def first(self, rtype: Optional[str] = None) -> "OVOSReleases": + if not self.history: + raise ValueError("No release history") + + release_versions = self.filter_versions(rtype, RESTRICT_TO_CYCLE) + + first_version = OVOSReleases(0, 0, 0) + if release_versions: + first_version = min(release_versions) + + return first_version + + @property + def history(self) -> List["OVOSReleases"]: + """ + Returns the release history of the github repository + """ + return self.__history + + @property + def released(self) -> bool: + """ + Returns whether the release is already released + """ + return self.__release is not None + + @property + def tag(self) -> str: + """ + Returns the tag of the release + """ + if not self.released: + return None + return self.__release.tag_name + + @property + def etag(self) -> str: + """ + Returns the etag (MD5 hash of the content) of the release + """ + if not self.released: + return None + return self.__release.etag + + @property + def release_url(self) -> str: + """ + Returns the url of the release + """ + if not self.released: + return None + return self.__release.url + + @property + def tarball_url(self) -> str: + """ + Returns the tarball url of the release + """ + if not self.released: + return None + return self.__release.tarball_url + + @property + def title(self) -> str: + """ + Returns the release title + """ + if not self.released: + return None + return self.__release.title + + @property + def body(self) -> str: + """ + Returns the release description + """ + if not self.released: + return None + return self.__release.body + + @classmethod + def from_file(cls, path: str) -> Optional["OVOSReleases"]: + with open(path, "r") as f: + data = f.read() + data = re.search(r"# START_VERSION_BLOCK(.*?)# END_VERSION_BLOCK", data, re.DOTALL) + if data: + data = data.group(1) + data = re.findall(r"VERSION_(\w+) = (\d+)", data) + data = {k: int(v) for k, v in data} + if data["ALPHA"]: + data["ALPHA"] = f"{ALPHA_MARKER}.{data['ALPHA']}" + else: + data["ALPHA"] = None + return cls(data["MAJOR"], data["MINOR"], data["BUILD"], data["ALPHA"]) + + @classmethod + def from_repo(cls, repo: Optional[str], token: Optional[str] = None) -> "OVOSReleases": + cls.__github_token = token + cls.__repo = repo + + releases = [] + if repo: + git = Github(token).get_repo(repo) + releases = git.get_releases() + if not releases: + return OVOSReleases(0, 0, 0) + + cls.__history = [OVOSReleases(release=release) for release in releases + if OVOSReleases.parse(release.tag_name) is not None] + return cls.__history[0] + + @classmethod + def from_list(cls, history: List[str]) -> "OVOSReleases": + cls.__history = [cls.parse(tag) for tag in history] + return cls.__history[0] + + def to_file(self, path: str) -> None: + """ + Saves the version to the specified file using + the ovos version format specification + """ + with open(path, "w") as f: + f.write(f"""# START_VERSION_BLOCK +VERSION_MAJOR = {self.major} +VERSION_MINOR = {self.minor} +VERSION_BUILD = {self.patch} +VERSION_ALPHA = {self.prerelease.replace(ALPHA_MARKER, '').replace('.', '') if self.prerelease else 0} +# END_VERSION_BLOCK +""") + + def to_pypi_format(self) -> str: + return f"{self.__prefix}{self.major}.{self.minor}.{self.patch}{self.prerelease.replace('.', '') if self.prerelease else ''}" + + @staticmethod + def parse(tag: str) -> semver.Version: + # remove prefix from tag + tag = re.sub(r"^([a-zA-Z-\/\\]+)?", "", tag) + + # hack for alpha releases + if re.match(rf"[0-9]+\.[0-9]+\.[0-9]+{ALPHA_MARKER}[0-9]+", tag): + tag = re.sub(rf"([0-9]+){ALPHA_MARKER}([0-9]+)", rf"\1-{ALPHA_MARKER}.\2", tag) + + if not semver.Version.is_valid(tag): + return None + + ver = semver.Version.parse(tag) + return OVOSReleases(ver.major, ver.minor, ver.patch, ver.prerelease, ver.build) + + def get(self, version: Optional[str], file: Optional[str]) -> "OVOSReleases": + if version: + version = OVOSReleases.parse(version) + for v in self.history: + if v.compare(version) == 0: + return v + return version + elif file: + version = OVOSReleases.from_file(file) + for v in self.history: + if v.compare(version) == 0: + return v + return version + + def filter_versions(self, release_type: Optional[str] = None, cycle_only: bool = False) -> List[semver.Version]: + """ + Returns the release versions of the specified release type + + If cycle only restrict versions to the given cycle + (i.e. if self is 2.1.2, it will return all 2.1.x releases if release_type is patch, + normally: 2.1.2 (latest), 2.1.1 (last/first), depending on the release history) + """ + + if release_type: + release_type = release_type.lower() + filtered_versions = [] + if release_type == 'major': + filtered_versions = [v for v in self.history + if v <= self + and v.major != 0 + and v.minor == 0 + and v.patch == 0 + and v.prerelease is None] + if cycle_only: + filtered_versions = filter(lambda v: v.major == self.major, filtered_versions) + elif release_type == 'minor': + filtered_versions = [v for v in self.history + if v <= self + and v.minor != 0 + and v.patch == 0 + and v.prerelease is None] + if cycle_only: + filtered_versions = filter(lambda v: v.major == self.major, filtered_versions) + elif release_type == 'patch': + filtered_versions = [v for v in self.history + if v <= self and v.patch != 0 and v.prerelease is None] + if cycle_only: + filtered_versions = filter(lambda v: v.major == self.major and v.minor == self.minor, filtered_versions) + elif release_type in ["prerelease", "alpha"]: + filtered_versions = [v for v in self.history + if v <= self and v.prerelease is not None] + if cycle_only: + filtered_versions = filter(lambda v: v.major == self.major and v.minor == self.minor and v.patch == self.patch, filtered_versions) + elif release_type is None: + filtered_versions = [v for v in self.history if v <= self] + if cycle_only: + filtered_versions = filter(lambda v: v.major == self.major, filtered_versions) + else: + raise ValueError('Invalid release type') + + return sorted(filtered_versions, reverse=True) + + +# instanciate the class history +releases = OVOSReleases.from_repo(REPOSITORY, getenv("GH_PAT") or getenv("GITHUB_TOKEN")) +# if version or file is provided, get the version from the repository history or use the provided version +if args.version or args.file: + base = releases.get(args.version, args.file) +else: + base = releases.latest() + +# output handling +if (args.save is True or args.fsave is True) and not args.file: + arg = "--save" + if args.fsave: + arg = "--fsave" + raise ValueError(f"No file specified to save the version to (ie {arg} /path/to/version.py)") + +if args.first: + version = base.first(RELEASE_TYPE) +elif args.last: + version = base.last(RELEASE_TYPE) +elif args.next: + version = base.next(RELEASE_TYPE) +elif args.latest: + version = base.latest(RELEASE_TYPE) +else: + version = base + +if (args.save or args.fsave) and version is not None: + file = args.file or args.save or args.fsave + if (version > base or args.fsave) or \ + all( arg is False for arg in [args.first, args.last, args.next] ): + version.to_file(file) + +if version is not None: + print(version.to_pypi_format()) diff --git a/scripts/translate.py b/scripts/translate.py new file mode 100644 index 000000000..7727cd7be --- /dev/null +++ b/scripts/translate.py @@ -0,0 +1,172 @@ +from os.path import dirname, join, isdir, exists +from pathlib import Path +import shutil +import os +import re +from ovos_utils.bracket_expansion import expand_options +from ovos_translate_plugin_deepl import DeepLTranslator + + +BASE_LANGS = {"de-de", "es-es", "fr-fr", "it-it", "nl-nl", "pt-pt"} +API_KEY = os.getenv("API_KEY") +if not API_KEY: + raise ValueError("environment variable `API_KEY` not set") +BASE_FOLDER = os.getenv("LOCALE_FOLDER") +if not BASE_FOLDER: + raise ValueError("environment variable `LOCALE_FOLDER` not set") +elif not isdir(BASE_FOLDER): + raise ValueError(f"environment variable `LOCALE_FOLDER` is not a folder: {BASE_FOLDER}") + +# old skill structure +old_voc_folder = join(dirname(BASE_FOLDER), "vocab") +old_dialog_folder = join(dirname(BASE_FOLDER), "dialog") +old_res_folder = [old_voc_folder, old_dialog_folder] + + +def get_target_languages(): + langs = set() + folder_to_check = [BASE_FOLDER] + old_res_folder + for folder in folder_to_check: + if not exists(folder): + continue + for subfolder in os.listdir(folder): + # only lancodes in the form of xx-xx + if len(subfolder) == 5 and subfolder[2] == "-": + langs.add(subfolder) + langs = langs.union(BASE_LANGS) + if "en-us" in langs: + langs.remove("en-us") + return langs + +single_lang = os.getenv("TARGET_LANG") +target_langs = (single_lang,) if single_lang \ + else get_target_languages() + +src_lang="en-us" +src_files={} +# note: regex/namedvalues are just copied, this cant be auto translated reliably +ext = [".voc", ".dialog", ".intent", ".entity", ".rx", ".value", ".word"] +untranslated = [".rx", ".value", ".entity"] + +tx = DeepLTranslator({"api_key": API_KEY}) + + +def file_location(f: str, base: str) -> bool: + for root, dirs, files in os.walk(base): + for file in files: + if f == file: + return join(root, file) + return None + +def translate(lines: list, target_lang: str) -> list: + translations = [] + for line in lines: + drop = False + replacements = dict() + # TODO: still not failsafe as the translator of different langs might + # interpret "@1" as something to translate (seen this with french), hence the drop + for num, var in enumerate(re.findall(r"(?:{{|{)[ a-zA-Z0-9_]*(?:}}|})", line)): + line = line.replace(var, f'@{num}', 1) + replacements[f'@{num}'] = var + try: + translated = tx.translate(line, target=target_lang, source=src_lang) + except Exception as e: + continue + for num, var in replacements.items(): + # saveguard against bad translations + if num not in translated: + drop = True + break + translated = translated.replace(num, var) + if not drop: + translations.append(translated) + + return translations + + +def entities(file: str) -> set: + vars = set() + if not exists(file): + return vars + + lines = get_lines(file) + for line in lines: + for var in re.findall(r"(?:{{|{)[ a-zA-Z0-9_]*(?:}}|})", line): + vars.add(var) + return vars + + +def get_lines(file: str): + with open(file, "r") as f: + # entity files often include #-placeholder + if file.endswith(".entity"): + lines = [exp for l in f.read().split("\n") for exp + in expand_options(l) if l] + else: + lines = [exp for l in f.read().split("\n") for exp + in expand_options(l) if l and not l.startswith("#")] + return lines + + +def migrate_locale(folder): + for lang in os.listdir(folder): + path = join(folder, lang) + for root, dirs, files in os.walk(path): + for file in files: + if file_location(file, join(BASE_FOLDER, lang)) is None: + rel_path = root.replace(folder, "").lstrip("/") + new_path = join(BASE_FOLDER, rel_path) + os.makedirs(new_path, exist_ok=True) + shutil.move(join(root, file), + join(new_path, file)) + shutil.rmtree(path) + shutil.rmtree(folder) + + +for folder in old_res_folder: + if not isdir(folder): + continue + migrate_locale(folder) + +src_folder = join(BASE_FOLDER, src_lang) +for root, dirs, files in os.walk(src_folder): + if src_lang not in root: + continue + for f in files: + if any(f.endswith(e) for e in ext): + file_path = join(root, f) + rel_path = file_path.replace(src_folder, "").lstrip("/") + src_files[rel_path] = file_path + +for lang in target_langs: + lang = lang.lower() + # service cant translate + if not tx.get_langcode(lang): + continue + for rel_path, src in src_files.items(): + filename = Path(rel_path).name + dst = file_location(filename, join(BASE_FOLDER, lang)) or \ + join(BASE_FOLDER, lang, rel_path) + if entities(src) != entities(dst): + if exists(dst): + os.remove(dst) + elif not exists(dst): + pass + else: + continue + os.makedirs(dirname(dst), exist_ok=True) + + lines = get_lines(src) + if any(filename.endswith(e) for e in untranslated): + tx_lines = lines + is_translated = False + else: + tx_lines = translate(lines, lang) + is_translated = True + if tx_lines: + tx_lines = list(set(tx_lines)) + with open(dst, "w") as f: + if is_translated: + f.write(f"# auto translated from {src_lang} to {lang}\n") + for translated in set(tx_lines): + f.write(translated + "\n") diff --git a/scripts/update_intent_testfile.py b/scripts/update_intent_testfile.py new file mode 100644 index 000000000..920c74ee7 --- /dev/null +++ b/scripts/update_intent_testfile.py @@ -0,0 +1,178 @@ +from os import getenv +from os.path import isdir, isfile +from dataclasses import dataclass +from pathlib import Path +from functools import reduce +import operator +import random +import re +from typing import List +import yaml + +from ovos_utils.log import LOG +from ovos_utils.messagebus import FakeBus +from ovos_utils import flatten_list +from ovos_workshop.skill_launcher import SkillLoader +from ovos_workshop.skills.base import BaseSkill +from ovos_workshop.intents import IntentBuilder + + +@dataclass +class Intent: + service: str + name: str + filestems: set + suffix: str = "" + + def __post_init__(self): + if self.service == "padatious": + self.suffix = ".intent" + elif self.service == "adapt": + self.suffix = ".voc" + + +def get_skill_object() -> BaseSkill: + """ + Get an initialized skill object by entrypoint with the requested skill_id. + @param skill_entrypoint: Skill plugin entrypoint or directory path + @param bus: FakeBus instance to bind to skill for testing + @param skill_id: skill_id to initialize skill with + @param config_patch: Configuration update to apply + @returns: Initialized skill object + """ + + bus = FakeBus() + bus.run_forever() + + skill_folder = getenv("TEST_SKILL_PKG_FOLDER") + if not skill_folder or not isdir(skill_folder): + raise ValueError("TEST_SKILL_PKG_FOLDER is not set or invalid") + + LOG.info(f"Loading local skill: {skill_folder}") + loader = SkillLoader(bus, skill_folder, "unknown") + if loader.load(): + return loader.instance + + return None + + +def get_intents_from_skillcode(skill) -> List[Intent]: + intents = [] + for method_name in dir(skill): + method = getattr(skill, method_name) + if callable(method) and hasattr(method, 'intents'): + for intent in method.intents: + if isinstance(intent, str): + # If the intent is a string, it's the intent name + stem = Path(intent).stem + # string contains the suffix ".intent" + intents.append(Intent("padatious", intent, [stem])) + elif isinstance(intent, IntentBuilder): + vocs = list() + if intent.at_least_one: + vocs.append(intent.at_least_one[0]) + if intent.requires: + vocs.append((intent.requires[0][0],)) + if intent.optional: + vocs.append((intent.optional[0][0],)) + + intents.append(Intent("adapt", intent.name, vocs)) + return intents + + +def count_permutations(options): + permutations = [] + for sublist in options: + choices = set(flatten_list(sublist)) + permutations.append(len(choices)) + return reduce(operator.mul, permutations, 1) + + +def generate_sentences(options: List[List[List[str]]], + max: int, + max_random: int = 0) -> List[str]: + sentences = [] + while len(sentences) < min(max, count_permutations(options)): + # we can add an ai sentence generator in the future + sentence = [] + for sublist in options: + choice = random.choice(sublist) + sentence.append(random.choice(choice)) + _sentence = " ".join(sentence) + if _sentence not in sentences: + sentences.append(" ".join(sentence)) + return sentences + + +def update_resources(skill: BaseSkill): + + if skill is None: + raise ValueError("Skill not found") + + intents = set() + supported_languages =skill.resources.get_inventory().get("languages") + + # we want the intents used in the code + # (opposed to being present as resource and not being used in the skill) + skill_intents = get_intents_from_skillcode(skill) + + # Load the test intent file + yaml_location = getenv("INTENT_TEST_FILE") + if not yaml_location or not isfile(yaml_location): + raise ValueError("INTENT_TEST_FILE is not set or invalid") + + with open(yaml_location) as f: + test_yaml = yaml.safe_load(f) + + for intent in skill_intents: + intents.add(intent.name) + + # update yaml file based on the present intents + for lang in supported_languages: + test_yaml.setdefault(lang, dict()) + resources = skill.load_lang(lang=lang) + for intent in skill_intents: + test_yaml[lang].setdefault(intent.name, list()) + present_intents = test_yaml[lang][intent.name] + valid_intents = [] + # prepare adapt intents + if intent.service == "adapt": + options = [ + [ flatten_list(resources.load_vocabulary_file(voc)) + for voc in vocs ] for vocs in intent.filestems + ] + # filter out intents that don't match the options + for line in present_intents: + if all(any(any(word in line for word in choice) for choice in option) for option in options): + valid_intents.append(line) + # add possible combinations of options + for option in options: + if not any(any(any(word in intent for word in choice) for choice in option) for intent in valid_intents): + valid_intents.extend(generate_sentences(options, 4)) + # prepare padatious intents + elif intent.service == "padatious": + options = resources.load_intent_file(intent.name) + # substitute entities + for i, option in enumerate(options): + options[i] = re.sub(r'\{.*?\}', "test", option) + # filter out intents that don't match the options + for line in present_intents: + if line in options: + valid_intents.append(line) + random.shuffle(options) + if len(valid_intents) < 5: + for option in options: + if option not in valid_intents: + valid_intents.append(option) + + test_yaml[lang][intent.name] = valid_intents + + LOG.info(f"Test yaml: {test_yaml}") + with open(yaml_location, "w", encoding='utf8') as f: + yaml.dump(test_yaml, f, allow_unicode=True) + + # shutdown skill + skill.shutdown() + + +update_resources(get_skill_object()) diff --git a/test/test_skill_install.py b/test/test_skill_install.py new file mode 100644 index 000000000..63965ba52 --- /dev/null +++ b/test/test_skill_install.py @@ -0,0 +1,80 @@ +import unittest +from typing import Optional +from os import environ +from os.path import isdir +from shutil import rmtree +from pathlib import Path + +from ovos_utils.log import LOG +from ovos_utils.messagebus import FakeBus +from ovos_config.config import update_mycroft_config, Configuration +from ovos_workshop.skills.base import BaseSkill +from ovos_workshop.skill_launcher import PluginSkillLoader, SkillLoader +from ovos_plugin_manager.skills import find_skill_plugins + + +def get_skill_object(bus: FakeBus, path: str = "", + skill_id: str = "", config_patch: Optional[dict] = None) -> BaseSkill: + """ + Get an initialized skill object laoded from path or using the plugin manager. + @param bus: FakeBus instance to bind to skill for testing + @param path: directory path the skill should be loaded from + @param skill_id: skill_id to initialize skill with + @param config_patch: Configuration update to apply + @returns: Initialized skill object + """ + if config_patch: + user_config = update_mycroft_config(config_patch) + if user_config not in Configuration.xdg_configs: + Configuration.xdg_configs.append(user_config) + if path: + if not isdir(path): + raise FileNotFoundError(path) + LOG.info(f"Loading local skill from: {path}") + loader = SkillLoader(bus, path, skill_id) + if loader.load(): + return loader.instance + plugins = find_skill_plugins() + if skill_id not in plugins: + raise ValueError(f"Requested skill not found: {skill_id}; available skills: {list(plugins.keys())}") + else: + LOG.info(f"Loading skill from plugin: {skill_id}") + plugin = plugins[skill_id] + skill = plugin(bus=bus, skill_id=skill_id) + return skill + + +class TestSkillLoading(unittest.TestCase): + @classmethod + def setUpClass(self): + self.skill_id = environ.get("TEST_SKILL_ENTRYPOINT_NAME") + self.path = str(Path(environ.get("TEST_SKILL_PATH"))) + + def test_from_plugin(self): + bus = FakeBus() + skill = get_skill_object(bus, skill_id=self.skill_id) + self.assertEqual(skill.bus, bus) + self.assertEqual(skill.root_dir, self.path) + + def test_from_loader(self): + bus = FakeBus() + skill = get_skill_object(bus, path=self.path) + self.assertEqual(skill.bus, bus) + self.assertEqual(skill.root_dir, self.path) + + def test_from_plugin_loader(self): + bus = FakeBus() + loader = PluginSkillLoader(bus, self.skill_id) + for skill_id, plug in find_skill_plugins().items(): + if skill_id == self.skill_id: + loader.load(plug) + break + else: + raise RuntimeError("plugin not found") + + self.assertEqual(loader.skill_id, self.skill_id) + self.assertEqual(loader.instance.bus, bus) + self.assertEqual(loader.instance.skill_id, self.skill_id) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/test/test_skill_intents.py b/test/test_skill_intents.py new file mode 100644 index 000000000..d0c4718c3 --- /dev/null +++ b/test/test_skill_intents.py @@ -0,0 +1,266 @@ +import logging +from os import getenv +from os.path import isdir +from typing import Optional + +import importlib +import unittest +import yaml +from mock import Mock, patch + +from ovos_core.intent_services import PadatiousService, PadatiousMatcher +from ovos_bus_client import Message +from ovos_bus_client.session import Session, SessionManager +from ovos_config.config import update_mycroft_config, Configuration +from ovos_utils.messagebus import FakeBus +from ovos_utils.log import LOG +from ovos_plugin_manager.skills import find_skill_plugins +from ovos_workshop.skill_launcher import SkillLoader +from ovos_workshop.skills.base import BaseSkill + + +PIPELINE = ["adapt_high", "adapt_medium", "adapt_low"] +use_padacioso = getenv("INTENT_ENGINE") == "padacioso" +if use_padacioso: + PIPELINE.extend(["padacioso_high", + "padacioso_medium", + "padacioso_low"]) +else: + PIPELINE.extend(["padatious_high", + "padatious_medium", + "padatious_low"]) +LOG.level = logging.DEBUG + + +class MockPadatiousMatcher(PadatiousMatcher): + include_med = True + include_low = False + + def __init__(self, *args, **kwargs): + PadatiousMatcher.__init__(self, *args, **kwargs) + LOG.debug("Creating test Padatious Matcher") + + def match_medium(self, utterances, lang=None, __=None): + if not self.include_med: + LOG.info(f"Skipping medium confidence check for {utterances}") + return None + PadatiousMatcher.match_medium(self, utterances, lang=lang) + + def match_low(self, utterances, lang=None, __=None): + if not self.include_low: + LOG.info(f"Skipping low confidence check for {utterances}") + return None + PadatiousMatcher.match_low(self, utterances, lang=lang) + + +def get_skill_object(skill_entrypoint: str, bus: FakeBus, + skill_id: str, config_patch: Optional[dict] = None) -> BaseSkill: + """ + Get an initialized skill object by entrypoint with the requested skill_id. + @param skill_entrypoint: Skill plugin entrypoint or directory path + @param bus: FakeBus instance to bind to skill for testing + @param skill_id: skill_id to initialize skill with + @param config_patch: Configuration update to apply + @returns: Initialized skill object + """ + if config_patch: + user_config = update_mycroft_config(config_patch) + if user_config not in Configuration.xdg_configs: + Configuration.xdg_configs.append(user_config) + if isdir(skill_entrypoint): + LOG.info(f"Loading local skill: {skill_entrypoint}") + loader = SkillLoader(bus, skill_entrypoint, skill_id) + if loader.load(): + return loader.instance + plugins = find_skill_plugins() + if skill_entrypoint not in plugins: + raise ValueError(f"Requested skill not found: {skill_entrypoint}; available skills: {list(plugins.keys())}") + plugin = plugins[skill_entrypoint] + skill = plugin(bus=bus, skill_id=skill_id) + return skill + + +class TestSkillIntentMatching(unittest.TestCase): + test_intents = getenv("INTENT_TEST_FILE") + with open(test_intents) as f: + valid_intents = yaml.safe_load(f) + negative_intents = valid_intents.pop('unmatched intents', dict()) + common_query = valid_intents.pop("common query", dict()) + skill_entrypoint = getenv("TEST_SKILL_ENTRYPOINT_NAME") + + # Ensure all tested languages are loaded + import ovos_config + update_mycroft_config({"secondary_langs": list(valid_intents.keys())}) + importlib.reload(ovos_config.config) + + # make the default session use the test pipeline + session = Session("default", pipeline=PIPELINE) + SessionManager.default_session = session + SessionManager.sessions = {"default": session} + + # Start the IntentService + bus = FakeBus() + from ovos_core.intent_services import IntentService + intent_service = IntentService(bus) + + # Create the skill to test + test_skill_id = 'test_skill.test' + skill = get_skill_object(skill_entrypoint=skill_entrypoint, + bus=bus, + skill_id=test_skill_id) + assert skill.config_core["secondary_langs"] == list(valid_intents.keys()) + + last_message = None + + @classmethod + def setUpClass(cls) -> None: + def _on_message(msg): + cls.last_message = msg + + cls.bus.on("message", _on_message) + + def test_00_init(self): + for lang in self.valid_intents: + if hasattr(self.skill, "_native_langs"): + # ovos-workshop < 0.0.15 + self.assertIn(lang, self.skill._native_langs, lang) + else: + self.assertIn(lang, self.skill.native_langs, lang) + self.assertIn(lang, + self.intent_service.padatious_service.containers) + # intents = [intent[1]['name'] for intent in + # self.skill.intent_service.registered_intents if + # intent[1]['lang'] == lang] + # LOG.info(f"{lang} intents: {intents}") + # self.assertIsNotNone(intents, f"No intents registered for {lang}") + # for intent in self.valid_intents[lang]: + # # Validate IntentServiceInterface registration + # self.assertIn(f"{self.test_skill_id}:{intent}", intents, + # f"Intent not defined for {lang}") + + def test_intents(self): + for lang in self.valid_intents: + self.assertIsInstance(lang.split('-')[0], str) + self.assertIsInstance(lang.split('-')[1], str) + for intent, examples in self.valid_intents[lang].items(): + intent_event = f'{self.test_skill_id}:{intent}' + self.skill.events.remove(intent_event) + intent_handler = Mock() + self.skill.events.add(intent_event, intent_handler) + for utt in examples: + if isinstance(utt, dict): + data = list(utt.values())[0] + utt = list(utt.keys())[0] + else: + data = list() + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + try: + intent_handler.assert_called_once() + except AssertionError as e: + LOG.error(f"sent:{message.serialize()}") + LOG.error(f"received:{self.last_message}") + raise AssertionError(utt) from e + intent_message = intent_handler.call_args[0][0] + self.assertIsInstance(intent_message, Message, utt) + self.assertEqual(intent_message.msg_type, intent_event, utt) + for datum in data: + if isinstance(datum, dict): + name = list(datum.keys())[0] + value = list(datum.values())[0] + else: + name = datum + value = None + if name in intent_message.data: + # This is an entity + voc_id = name + else: + # We mocked the handler, data is munged + voc_id = f'{self.test_skill_id.replace(".", "_")}' \ + f'{name}' + self.assertIsInstance(intent_message.data.get(voc_id), + str, intent_message.data) + if value: + self.assertEqual(intent_message.data.get(voc_id), + value, utt) + intent_handler.reset_mock() + + @patch("ovos_core.intent_services.padacioso_service.PadaciosoService", + new=MockPadatiousMatcher) + def test_negative_intents(self): + test_config = self.negative_intents.pop('config', None) + if test_config: + MockPadatiousMatcher.include_med = test_config.get('include_med', + True) + MockPadatiousMatcher.include_low = test_config.get('include_low', + False) + + intent_failure = Mock() + self.intent_service.send_complete_intent_failure = intent_failure + + # # Skip any fallback/converse handling + # self.intent_service.fallback = Mock() + # self.intent_service.converse = Mock() + # if not self.common_query: + # # Skip common_qa unless explicitly testing a Common QA skill + # self.intent_service.common_qa = Mock() + + for lang in self.negative_intents.keys(): + for utt in self.negative_intents[lang]: + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + try: + intent_failure.assert_called_once_with(message) + intent_failure.reset_mock() + except AssertionError as e: + LOG.error(self.last_message) + raise AssertionError(utt) from e + + def test_common_query(self): + qa_callback = Mock() + qa_response = Mock() + self.skill.events.add('question:action', qa_callback) + self.skill.events.add('question:query.response', qa_response) + for lang in self.common_query.keys(): + for utt in self.common_query[lang]: + if isinstance(utt, dict): + data = list(utt.values())[0] + utt = list(utt.keys())[0] + else: + data = dict() + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + response = qa_response.call_args[0][0] + callback = qa_response.call_args[0][0] + self.assertIsInstance(response, Message) + self.assertTrue(response.data["phrase"] in utt) + self.assertEqual(response.data["skill_id"], self.skill.skill_id) + self.assertIn("callback_data", response.data.keys()) + self.assertIsInstance(response.data["conf"], float) + self.assertIsInstance(response.data["answer"], str) + + self.assertIsInstance(callback, Message) + self.assertEqual(callback.data['skill_id'], self.skill.skill_id) + self.assertEqual(callback.data['phrase'], + response.data['phrase']) + if not data: + continue + if isinstance(data.get('callback'), dict): + self.assertEqual(callback.data['callback_data'], + data['callback']) + elif isinstance(data.get('callback'), list): + self.assertEqual(set(callback.data['callback_data'].keys()), + set(data.get('callback'))) + if data.get('min_confidence'): + self.assertGreaterEqual(response.data['conf'], + data['min_confidence']) + if data.get('max_confidence'): + self.assertLessEqual(response.data['conf'], + data['max_confidence']) + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/test/test_skill_resources.py b/test/test_skill_resources.py new file mode 100644 index 000000000..f35675f3e --- /dev/null +++ b/test/test_skill_resources.py @@ -0,0 +1,200 @@ +# NEON AI (TM) SOFTWARE, Software Development Kit & Application Framework +# All trademark and other rights reserved by their respective owners +# Copyright 2008-2022 Neongecko.com Inc. +# Contributors: Daniel McKnight, Guy Daniels, Elon Gasper, Richard Leeds, +# Regina Bloomstine, Casimiro Ferreira, Andrii Pernatii, Kirill Hrymailo +# BSD-3 License +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, +# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import unittest +import json +import yaml +from os import getenv +from os.path import expanduser, isfile, isdir +from typing import Optional + +from ovos_utils.messagebus import FakeBus +from ovos_utils.log import LOG +from ovos_workshop.skills.base import BaseSkill +from ovos_workshop.skill_launcher import SkillLoader +from ovos_config.config import update_mycroft_config, Configuration +from ovos_plugin_manager.skills import find_skill_plugins + + +def get_skill_object(skill_entrypoint: str, bus: FakeBus, + skill_id: str, config_patch: Optional[dict] = None) -> BaseSkill: + """ + Get an initialized skill object by entrypoint with the requested skill_id. + @param skill_entrypoint: Skill plugin entrypoint or directory path + @param bus: FakeBus instance to bind to skill for testing + @param skill_id: skill_id to initialize skill with + @param config_patch: Configuration update to apply + @returns: Initialized skill object + """ + if config_patch: + user_config = update_mycroft_config(config_patch) + if user_config not in Configuration.xdg_configs: + Configuration.xdg_configs.append(user_config) + if isdir(skill_entrypoint): + LOG.info(f"Loading local skill: {skill_entrypoint}") + loader = SkillLoader(bus, skill_entrypoint, skill_id) + if loader.load(): + return loader.instance + plugins = find_skill_plugins() + if skill_entrypoint not in plugins: + raise ValueError(f"Requested skill not found: {skill_entrypoint}; available skills: {list(plugins.keys())}") + plugin = plugins[skill_entrypoint] + skill = plugin(bus=bus, skill_id=skill_id) + return skill + + +def load_resource_tests(test_file: str) -> dict: + """ + Load resource tests from a file + @param test_file: Test file to load + @returns: Loaded test spec + """ + test_file = expanduser(test_file) + if not isfile(test_file): + raise FileNotFoundError(test_file) + with open(test_file) as f: + resources = yaml.safe_load(f) + return resources + + +class TestSkillResources(unittest.TestCase): + # Static parameters + messages = list() + bus = FakeBus() + bus.run_forever() + test_skill_id = 'test_skill.test' + + # Define skill and resource spec to use in tests + resources = load_resource_tests(getenv("RESOURCE_TEST_FILE")) + skill_entrypoint_name = getenv("TEST_SKILL_ENTRYPOINT_NAME") + + # Specify valid languages to test + supported_languages = resources['languages'] + + # Specify skill intents as sets + adapt_intents = set(resources['intents']['adapt']) + padatious_intents = set(resources['intents']['padatious']) + + # regex entities, not necessarily filenames + regex = set(resources['regex']) + # vocab is lowercase .voc file basenames + vocab = set(resources['vocab']) + # dialog is .dialog file basenames (case-sensitive) + dialog = set(resources['dialog']) + + core_config_patch = {"secondary_langs": supported_languages} + + @classmethod + def setUpClass(cls) -> None: + cls.bus.on("message", cls._on_message) + + cls.skill = get_skill_object(skill_entrypoint=cls.skill_entrypoint_name, + bus=cls.bus, + skill_id=cls.test_skill_id, + config_patch=cls.core_config_patch) + + cls.adapt_intents = {f'{cls.test_skill_id}:{intent}' + for intent in cls.adapt_intents} + cls.padatious_intents = {f'{cls.test_skill_id}:{intent}' + for intent in cls.padatious_intents} + + @classmethod + def _on_message(cls, message): + cls.messages.append(json.loads(message)) + + def test_skill_setup(self): + self.assertEqual(self.skill.skill_id, self.test_skill_id) + if hasattr(self.skill, "_core_lang"): + # ovos-workshop < 0.0.15 + self.assertEqual(set([self.skill._core_lang] + + self.skill._secondary_langs), + set(self.supported_languages), + f"expected={self.supported_languages}") + else: + self.assertEqual(set([self.skill.core_lang] + + self.skill.secondary_langs), + set(self.supported_languages), + f"expected={self.supported_languages}") + + def test_intent_registration(self): + registered_adapt = list() + registered_padatious = dict() + registered_vocab = dict() + registered_regex = dict() + for msg in self.messages: + if msg["type"] == "register_intent": + registered_adapt.append(msg["data"]["name"]) + elif msg["type"] == "padatious:register_intent": + lang = msg["data"]["lang"] + registered_padatious.setdefault(lang, list()) + registered_padatious[lang].append(msg["data"]["name"]) + elif msg["type"] == "register_vocab": + lang = msg["data"]["lang"] + if msg['data'].get('regex'): + registered_regex.setdefault(lang, dict()) + regex = msg["data"]["regex"].split( + '<', 1)[1].split('>', 1)[0].replace( + self.test_skill_id.replace('.', '_'), '') + registered_regex[lang].setdefault(regex, list()) + registered_regex[lang][regex].append(msg["data"]["regex"]) + else: + registered_vocab.setdefault(lang, dict()) + voc_filename = msg["data"]["entity_type"].replace( + self.test_skill_id.replace('.', '_'), '').lower() + registered_vocab[lang].setdefault(voc_filename, list()) + registered_vocab[lang][voc_filename].append( + msg["data"]["entity_value"]) + self.assertEqual(set(registered_adapt), self.adapt_intents, + registered_adapt) + for lang in self.supported_languages: + if self.padatious_intents: + self.assertEqual(set(registered_padatious[lang]), + self.padatious_intents, + registered_padatious[lang]) + if self.vocab: + self.assertEqual(set(registered_vocab[lang].keys()), + self.vocab, registered_vocab) + if self.regex: + self.assertEqual(set(registered_regex[lang].keys()), + self.regex, registered_regex) + for voc in self.vocab: + # Ensure every vocab file has at least one entry + self.assertGreater(len(registered_vocab[lang][voc]), 0) + for rx in self.regex: + # Ensure every rx file has exactly one entry + self.assertTrue(all((rx in line for line in + registered_regex[lang][rx])), self.regex) + + def test_dialog_files(self): + for lang in self.supported_languages: + dialogs = self.skill._lang_resources[lang].dialog_renderer.templates + for dialog in self.dialog: + self.assertIn(dialog, dialogs.keys(), + f"lang={lang}") + + # TODO: Consider adding tests for resource file existence \ No newline at end of file diff --git a/test/test_skill_resourcesV2.py b/test/test_skill_resourcesV2.py new file mode 100644 index 000000000..04db2b5c8 --- /dev/null +++ b/test/test_skill_resourcesV2.py @@ -0,0 +1,347 @@ +import logging +from os import getenv +from os.path import isdir +from typing import Optional, List + +import importlib +import unittest +import yaml +from mock import Mock, patch + +from ovos_core.intent_services import PadatiousMatcher, IntentService +from ovos_bus_client import Message +from ovos_bus_client.session import Session, SessionManager +from ovos_config.config import update_mycroft_config, Configuration +from ovos_utils.messagebus import FakeBus +from ovos_utils.log import LOG +from ovos_plugin_manager.skills import find_skill_plugins +from ovos_workshop.skill_launcher import SkillLoader +from ovos_workshop.skills.base import BaseSkill +from ovos_workshop.resource_files import SkillResources + + +PIPELINE = ["adapt_high", "adapt_medium", "adapt_low"] +use_padacioso = getenv("INTENT_ENGINE") == "padacioso" +if use_padacioso: + PIPELINE.extend(["padacioso_high", + "padacioso_medium", + "padacioso_low"]) +else: + PIPELINE.extend(["padatious_high", + "padatious_medium", + "padatious_low"]) +LOG.level = logging.DEBUG + +CAPTURE_INTENT_MESSAGES = [ + "register_vocab", + "register_intent", +] + + +class MockPadatiousMatcher(PadatiousMatcher): + include_med = True + include_low = False + + def __init__(self, *args, **kwargs): + PadatiousMatcher.__init__(self, *args, **kwargs) + LOG.debug("Creating test Padatious Matcher") + + def match_medium(self, utterances, lang=None, __=None): + if not self.include_med: + LOG.info(f"Skipping medium confidence check for {utterances}") + return None + PadatiousMatcher.match_medium(self, utterances, lang=lang) + + def match_low(self, utterances, lang=None, __=None): + if not self.include_low: + LOG.info(f"Skipping low confidence check for {utterances}") + return None + PadatiousMatcher.match_low(self, utterances, lang=lang) + + +def get_skill_object(skill_entrypoint: str, bus: FakeBus, + skill_id: str, config_patch: Optional[dict] = None) -> BaseSkill: + """ + Get an initialized skill object by entrypoint with the requested skill_id. + @param skill_entrypoint: Skill plugin entrypoint or directory path + @param bus: FakeBus instance to bind to skill for testing + @param skill_id: skill_id to initialize skill with + @param config_patch: Configuration update to apply + @returns: Initialized skill object + """ + if config_patch: + user_config = update_mycroft_config(config_patch) + if user_config not in Configuration.xdg_configs: + Configuration.xdg_configs.append(user_config) + + if isdir(skill_entrypoint): + LOG.info(f"Loading local skill: {skill_entrypoint}") + loader = SkillLoader(bus, skill_entrypoint, skill_id) + if loader.load(): + return loader.instance + plugins = find_skill_plugins() + if skill_entrypoint not in plugins: + raise ValueError(f"Requested skill not found: {skill_entrypoint}; available skills: {list(plugins.keys())}") + plugin = plugins[skill_entrypoint] + skill = plugin(bus=bus, skill_id=skill_id) + return skill + + +class TestSkillIntents(unittest.TestCase): + messages: List[Message]= list() + last_message: Optional[Message] = None + valid_intents = dict() + intents = set() + vocab = set() + regex = set() + + valid_intents = dict() + negative_intents = dict() + common_query = dict() + + # make the default session use the test pipeline + session = Session("default", pipeline=PIPELINE) + SessionManager.default_session = session + SessionManager.sessions = {"default": session} + + test_skill_id = 'test_skill.test' + skill = None + + + @classmethod + def setUpClass(cls) -> None: + + def _on_message(msg): + cls.last_message = msg + cls.messages.append(msg) + + skill_folder = getenv("TEST_SKILL_PKG_FOLDER") + # Ensure all tested languages are loaded + import ovos_config + cls.supported_languages = SkillResources.get_available_languages(skill_folder) + update_mycroft_config({"secondary_langs": cls.supported_languages}) + importlib.reload(ovos_config.config) + + # Start the IntentService + cls.bus = FakeBus() + cls.bus.run_forever() + cls.intent_service = IntentService(cls.bus) + + for msg_type in CAPTURE_INTENT_MESSAGES: + cls.bus.on(msg_type, _on_message) + + cls.skill = get_skill_object(skill_entrypoint=skill_folder, + bus=cls.bus, + skill_id=cls.test_skill_id) + + skill_resources = cls.skill.resources.get_inventory() + + # Load the test intent file + yaml_location = getenv("INTENT_TEST_FILE") + with open(yaml_location) as f: + valid_intents = yaml.safe_load(f) + + cls.negative_intents = valid_intents.pop('unmatched intents', dict()) + cls.common_query = valid_intents.pop("common query", dict()) + cls.regex = set(skill_resources['regex']) + + cls.valid_intents = valid_intents + cls.intents = set(valid_intents["en-us"].keys()) + + @classmethod + def tearDownClass(cls) -> None: + cls.skill.shutdown() + + def test_00_init(self): + for lang in self.valid_intents: + if hasattr(self.skill, "_native_langs"): + # ovos-workshop < 0.0.15 + self.assertIn(lang, self.skill._native_langs, lang) + else: + self.assertIn(lang, self.skill.native_langs, lang) + if use_padacioso: + intent_containers = self.intent_service.padacioso_service.containers + else: + intent_containers = self.intent_service.padatious_service.containers + self.assertIn(lang, intent_containers) + + def test_ressources(self): + """ + test if all resources are present with all languages + """ + inventory = self.skill.resources.get_inventory() + self.assertEqual(inventory["languages"], self.supported_languages) + for lang in self.supported_languages: + lang_inventory = self.skill.load_lang(lang=lang).get_inventory() + self.assertEqual(inventory, lang_inventory) + + def test_intent_registration(self): + """ + Test if all intents are registered + """ + registered_intents = set(self.skill.intent_service.intent_names) + registered_vocab = dict() + registered_regex = dict() + for msg in self.messages: + if msg.msg_type == "register_vocab": + if msg.data.get('regex'): + regex = msg.data["regex"].split( + '<', 1)[1].split('>', 1)[0].replace( + self.test_skill_id.replace('.', '_'), '') + registered_regex.setdefault(regex, list()) + registered_regex[regex].append(msg["data"]["regex"]) + else: + voc_filename = msg.data.get("entity_type", "").replace( + self.test_skill_id.replace('.', '_'), '').lower() + registered_vocab.setdefault(voc_filename, list()) + registered_vocab[voc_filename].append( + msg.data.get("entity_value", "")) + self.assertEqual(registered_intents, self.intents, + registered_intents) + if self.vocab: + self.assertEqual(set(registered_vocab.keys()), + self.vocab) + if self.regex: + self.assertEqual(set(registered_regex.keys()), + self.regex, registered_regex) + for voc in self.vocab: + # Ensure every vocab file has at least one entry + self.assertGreater(len(registered_vocab[voc]), 0) + for rx in self.regex: + # Ensure every rx file has exactly one entry + self.assertTrue(all((rx in line for line in + registered_regex[rx])), self.regex) + # TODO + + def test_intents(self): + """ + Test if all intents are correctly recognized by the intent parser + """ + for lang in self.valid_intents: + self.assertIsInstance(lang.split('-')[0], str) + self.assertIsInstance(lang.split('-')[1], str) + for intent, examples in self.valid_intents[lang].items(): + intent_event = f'{self.test_skill_id}:{intent}' + self.skill.events.remove(intent_event) + intent_handler = Mock() + self.skill.events.add(intent_event, intent_handler) + for utt in examples: + LOG.info(f"Testing utterance '{utt}'") + if isinstance(utt, dict): + data = list(utt.values())[0] + utt = list(utt.keys())[0] + else: + data = list() + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + try: + intent_handler.assert_called_once() + except AssertionError as e: + LOG.error(f"sent:{message.serialize()}") + LOG.error(f"received:{self.last_message.serialize()}") + raise AssertionError(utt) from e + intent_message = intent_handler.call_args[0][0] + self.assertIsInstance(intent_message, Message, utt) + self.assertEqual(intent_message.msg_type, intent_event, utt) + for datum in data: + if isinstance(datum, dict): + name = list(datum.keys())[0] + value = list(datum.values())[0] + else: + name = datum + value = None + if name in intent_message.data: + # This is an entity + voc_id = name + else: + # We mocked the handler, data is munged + voc_id = f'{self.test_skill_id.replace(".", "_")}' \ + f'{name}' + self.assertIsInstance(intent_message.data.get(voc_id), + str, intent_message.data) + if value: + self.assertEqual(intent_message.data.get(voc_id), + value, utt) + intent_handler.reset_mock() + + @patch("ovos_core.intent_services.padacioso_service.PadaciosoService", + new=MockPadatiousMatcher) + def test_negative_intents(self): + test_config = self.negative_intents.pop('config', None) + if test_config: + MockPadatiousMatcher.include_med = test_config.get('include_med', + True) + MockPadatiousMatcher.include_low = test_config.get('include_low', + False) + + intent_failure = Mock() + original_failure = self.intent_service.send_complete_intent_failure + self.intent_service.send_complete_intent_failure = intent_failure + + # # Skip any fallback/converse handling + # self.intent_service.fallback = Mock() + # self.intent_service.converse = Mock() + # if not self.common_query: + # # Skip common_qa unless explicitly testing a Common QA skill + # self.intent_service.common_qa = Mock() + + for lang in self.negative_intents.keys(): + for utt in self.negative_intents[lang]: + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + try: + intent_failure.assert_called_once_with(message) + intent_failure.reset_mock() + except AssertionError as e: + LOG.error(self.last_message) + raise AssertionError(utt) from e + + self.intent_service.send_complete_intent_failure = original_failure + + def test_common_query(self): + qa_callback = Mock() + qa_response = Mock() + self.skill.events.add('question:action', qa_callback) + self.skill.events.add('question:query.response', qa_response) + for lang in self.common_query.keys(): + for utt in self.common_query[lang]: + if isinstance(utt, dict): + data = list(utt.values())[0] + utt = list(utt.keys())[0] + else: + data = dict() + message = Message('test_utterance', + {"utterances": [utt], "lang": lang}) + self.intent_service.handle_utterance(message) + response = qa_response.call_args[0][0] + callback = qa_response.call_args[0][0] + self.assertIsInstance(response, Message) + self.assertTrue(response.data["phrase"] in utt) + self.assertEqual(response.data["skill_id"], self.skill.skill_id) + self.assertIn("callback_data", response.data.keys()) + self.assertIsInstance(response.data["conf"], float) + self.assertIsInstance(response.data["answer"], str) + + self.assertIsInstance(callback, Message) + self.assertEqual(callback.data['skill_id'], self.skill.skill_id) + self.assertEqual(callback.data['phrase'], + response.data['phrase']) + if not data: + continue + if isinstance(data.get('callback'), dict): + self.assertEqual(callback.data['callback_data'], + data['callback']) + elif isinstance(data.get('callback'), list): + self.assertEqual(set(callback.data['callback_data'].keys()), + set(data.get('callback'))) + if data.get('min_confidence'): + self.assertGreaterEqual(response.data['conf'], + data['min_confidence']) + if data.get('max_confidence'): + self.assertLessEqual(response.data['conf'], + data['max_confidence']) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file