diff --git a/.github/workflows/e2e-upgrade-tests-matrix-full.yml b/.github/workflows/e2e-upgrade-tests-matrix-full.yml index f1e26e77..26aacdb0 100644 --- a/.github/workflows/e2e-upgrade-tests-matrix-full.yml +++ b/.github/workflows/e2e-upgrade-tests-matrix-full.yml @@ -7,11 +7,11 @@ on: inputs: pmm_ui_tests_branch: description: 'pmm-ui-tests repository branch' - default: 'main' + default: 'v3' required: true pmm_qa_branch: description: 'pmm-qa repository branch(for setup)' - default: 'main' + default: 'v3' required: true repository: description: 'Upgrade to:' @@ -24,7 +24,7 @@ on: - dev-latest versions_range: description: 'Last versions range:' - default: 5 + default: '5' required: true jobs: @@ -46,13 +46,13 @@ jobs: old_version: ${{ fromJSON(needs.get_versions.outputs.version_matrix) }} upgrade_type: ["UI way", "Docker way", "Podman way"] with: - pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'main' }} + pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'v3' }} pre_upgrade_tests: '@config-pre-upgrade' post_upgrade_tests: '@config-post-upgrade' pmm_server_start_version: ${{ matrix.old_version }} pmm_client_start_version: ${{ matrix.old_version }} upgrade_type: ${{ matrix.upgrade_type }} - pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'main' }} + pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'v3' }} services_list: '' repository: ${{ inputs.repository || 'dev-latest'}} version_string_from: ${{needs.get_versions.outputs.start_version}} @@ -69,13 +69,13 @@ jobs: old_version: ${{ fromJSON(needs.get_versions.outputs.version_matrix) }} upgrade_type: ["UI way", "Docker way", "Podman way"] with: - pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'main' }} + pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'v3' }} pre_upgrade_tests: '@rbac-pre-upgrade' post_upgrade_tests: '@rbac-post-upgrade' pmm_server_start_version: ${{ matrix.old_version }} pmm_client_start_version: ${{ matrix.old_version }} upgrade_type: ${{ matrix.upgrade_type }} - pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'main' }} + pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'v3' }} services_list: '--addclient=ps,1 --addclient=pdpgsql,1' repository: ${{ inputs.repository || 'dev-latest'}} version_string_from: ${{needs.get_versions.outputs.start_version}} diff --git a/.github/workflows/e2e-upgrade-tests-matrix.yml b/.github/workflows/e2e-upgrade-tests-matrix.yml index 584b1b8b..035ed4e7 100644 --- a/.github/workflows/e2e-upgrade-tests-matrix.yml +++ b/.github/workflows/e2e-upgrade-tests-matrix.yml @@ -7,11 +7,11 @@ on: inputs: pmm_ui_tests_branch: description: 'pmm-ui-tests repository branch' - default: 'main' + default: 'v3' required: true pmm_qa_branch: description: 'pmm-qa repository branch(for setup)' - default: 'main' + default: 'v3' required: true upgrade_type: description: 'Upgrade way:' @@ -50,13 +50,13 @@ jobs: matrix: old_version: ${{ fromJSON(needs.get_versions.outputs.version_matrix) }} with: - pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'main' }} + pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'v3' }} pre_upgrade_tests: '@config-pre-upgrade' post_upgrade_tests: '@config-post-upgrade' pmm_server_start_version: ${{ matrix.old_version }} pmm_client_start_version: ${{ matrix.old_version }} upgrade_type: ${{ inputs.upgrade_type || 'Docker way' }} - pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'main' }} + pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'v3' }} services_list: '' repository: ${{ inputs.repository || 'dev-latest'}} version_string_from: ${{needs.get_versions.outputs.start_version}} @@ -72,13 +72,13 @@ jobs: matrix: old_version: ${{ fromJSON(needs.get_versions.outputs.version_matrix) }} with: - pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'main' }} + pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'v3' }} pre_upgrade_tests: '@rbac-pre-upgrade' post_upgrade_tests: '@rbac-post-upgrade' pmm_server_start_version: ${{ matrix.old_version }} pmm_client_start_version: ${{ matrix.old_version }} upgrade_type: ${{ inputs.upgrade_type || 'Docker way' }} - pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'main' }} + pmm_qa_branch: ${{ inputs.pmm_qa_branch || 'v3' }} services_list: '--addclient=ps,1 --addclient=pdpgsql,1' repository: ${{ inputs.repository || 'dev-latest'}} version_string_from: ${{needs.get_versions.outputs.start_version}} diff --git a/.github/workflows/e2e-upgrade-tests.yml b/.github/workflows/e2e-upgrade-tests.yml index 04c82f04..e47e5336 100644 --- a/.github/workflows/e2e-upgrade-tests.yml +++ b/.github/workflows/e2e-upgrade-tests.yml @@ -5,11 +5,11 @@ on: inputs: pmm_ui_tests_branch: description: 'pmm-ui-tests repository branch' - default: 'main' + default: 'v3' required: true pmm_qa_branch: description: 'pmm-qa repository branch(for setup)' - default: 'main' + default: 'v3' required: true pmm_server_start_version: description: 'PMM Server version to upgrade (latest|dev-latest|x.xx.x|x.xx.x-rc)' @@ -42,12 +42,12 @@ on: inputs: pmm_ui_tests_branch: description: 'pmm-ui-tests repository branch' - default: 'main' + default: 'v3' type: string required: true pmm_qa_branch: description: 'pmm-qa repository branch(for setup)' - default: 'main' + default: 'v3' type: string required: true pmm_server_start_version: diff --git a/.github/workflows/integration-cli-tests.yml b/.github/workflows/integration-cli-tests.yml index ad7dc126..a610aee5 100644 --- a/.github/workflows/integration-cli-tests.yml +++ b/.github/workflows/integration-cli-tests.yml @@ -387,3 +387,18 @@ jobs: cli_test: 'proxySql' services_list: '--database pxc=8.0' test_name: 'PXC 8.0' + + external-tests: + name: 'CLI / Integration' + uses: ./.github/workflows/runner-integration-cli-tests.yml + secrets: inherit + with: + sha: ${{ inputs.sha || github.event.pull_request.head.sha || 'null' }} + pmm_ui_tests_branch: ${{ inputs.pmm_ui_tests_branch || 'v3' }} + qa_integration_branch: ${{ inputs.qa_integration_branch || 'v3' }} + pmm_server_image: ${{ inputs.pmm_server_image || 'perconalab/pmm-server:3-dev-latest' }} + pmm_client_image: ${{ inputs.pmm_client_image || 'perconalab/pmm-client:3-dev-latest' }} + pmm_client_version: ${{ inputs.pmm_client_version || '3-dev-latest' }} + cli_test: 'external' + services_list: '--database=external' + test_name: 'External' diff --git a/.github/workflows/package-test-matrix-full.yml b/.github/workflows/package-test-matrix-full.yml index 38ec6018..4a570ed6 100644 --- a/.github/workflows/package-test-matrix-full.yml +++ b/.github/workflows/package-test-matrix-full.yml @@ -57,7 +57,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -68,7 +68,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -79,7 +79,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -90,7 +90,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -101,7 +101,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -112,7 +112,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -123,7 +123,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -134,7 +134,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -145,7 +145,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -156,7 +156,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -167,7 +167,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} diff --git a/.github/workflows/package-test-matrix.yml b/.github/workflows/package-test-matrix.yml index 87dda1bd..07bec4f2 100644 --- a/.github/workflows/package-test-matrix.yml +++ b/.github/workflows/package-test-matrix.yml @@ -54,7 +54,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -67,7 +67,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} @@ -80,7 +80,7 @@ jobs: uses: ./.github/workflows/package-test-single.yml secrets: inherit with: - package_testing_branch: ${{ inputs.package_testing_branch || 'master' }} + package_testing_branch: ${{ inputs.package_testing_branch || 'v3' }} package: ${{ inputs.package || 'pmm3-client' }} repository: ${{ inputs.repository || 'dev-latest' }} metrics_mode: ${{ inputs.metrics_mode || 'auto' }} diff --git a/.github/workflows/runner-e2e-upgrade-tests.yml b/.github/workflows/runner-e2e-upgrade-tests.yml index 55ef219e..a776582a 100644 --- a/.github/workflows/runner-e2e-upgrade-tests.yml +++ b/.github/workflows/runner-e2e-upgrade-tests.yml @@ -6,7 +6,7 @@ on: inputs: pmm_ui_tests_branch: description: 'pmm-ui-tests repository branch' - default: 'main' + default: 'v3' type: string required: true pre_upgrade_tests: @@ -34,7 +34,7 @@ on: type: string pmm_qa_branch: description: 'pmm-qa repository branch(for setup)' - default: 'main' + default: 'v3' type: string required: true services_list: diff --git a/.github/workflows/runner-package-test.yml b/.github/workflows/runner-package-test.yml index b56d8d18..1ca2f747 100644 --- a/.github/workflows/runner-package-test.yml +++ b/.github/workflows/runner-package-test.yml @@ -123,7 +123,7 @@ jobs: timeout-minutes: 60 env: SHA: ${{ inputs.sha || 'null' }} - PACKAGE_TESTING_BRANCH: ${{ inputs.package_testing_branch || 'master' }} + PACKAGE_TESTING_BRANCH: ${{ inputs.package_testing_branch || 'v3' }} PMM_SERVER_IMAGE: ${{ inputs.pmm_server_image }} TARBALL: ${{ inputs.pmm_client_tarball || 'null' }} EXPECTED_VERSION: ${{ inputs.expected_version }} diff --git a/.gitignore b/.gitignore index 6f067d2f..6a32424a 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,7 @@ pmm-tests/vendor/* ps_socket* .DS_Store + +pmm-ui-tests/* +qa-integration/* +package-testing/* \ No newline at end of file diff --git a/README.md b/README.md index 35323b63..6943b2d0 100644 --- a/README.md +++ b/README.md @@ -1,45 +1,96 @@ # PMM-QA Automated tests for Percona Monitoring and Management -GUI tests are created for testing frontend of PMM. They include tests for Query Analytics and for Grafana dashboards -## Using Selenoid for running tests in Local -1. Install Node.js and atleast npm 8.x on your system -2. Selenoid and Selenoid UI use port 4444 and 8080 respectively, -make sure they are not being used, otherwise update docker-compose.yml file -3. run npm install in project root. -4. run prepare_ui_test.sh script in the root directory. -`bash -x ./prepare_ui_test.sh` -5. This should start running UI tests in 4 parallel browser sessions inside chrome containers with help of selenoid -6. Check live execution by launching http://localhost:8080 in your browser. +> **⚠️ IMPORTANT**: The `pmm-tests/` directory containing BATS tests is **deprecated**. See [documentation](docs/README.md#important-notice-legacy-tests-deprecation) and [pmm-tests/DEPRECATED.md](pmm-tests/DEPRECATED.md) for migration guidance. -## If you'd like to have more control over the UI test framework parameters, please check out next sections +## Test Architecture Overview -### Installation (UI tests version 2.0) -1. Install Node.js and atleast npm 8.x on your system -2. Checkout `main` branch for pmm-qa Repo -3. To run tests on your local systems, delete `codecept.json` and rename `local.codecept.json` to `codecept.json` -4. Make sure to update URL of the application in the `webdriver` helper in the configuration file (codecept.json) -5. Install latest version of JDK on your system +This project employs a comprehensive testing strategy, utilizing various frameworks and methodologies to ensure the quality and stability of Percona Monitoring and Management (PMM). The tests are broadly categorized by their focus and the tools they use: -> Follow any one of these: +- **End-to-End (E2E) UI Tests**: These tests validate the PMM user interface and user workflows. They are primarily written using Playwright and CodeceptJS. +- **CLI/Integration Tests**: These tests focus on the functionality of the `pmm-admin` command-line interface and the integration between PMM components and monitored services. They are typically written using Playwright for CLI interactions and Python for service setup. +- **Package Tests**: These tests verify the installation and functionality of PMM client packages across various operating systems. They leverage Vagrant for virtualized environments and Ansible for automation. +- **Infrastructure Tests**: These tests validate PMM deployments in different environments, including Kubernetes/Helm and using the Easy Install script. They utilize Bats for testing Helm deployments. -6. Install Selenium Standalone server via npm globally using `npm install selenium-standalone -g` -7. Run the following `selenium-standalone start` -> OR -6. Install Selenium Standalone server locally via npm `npm install selenium-standalone --save-dev` -7. Run the following `./node_modules/.bin/selenium-standalone install && ./node_modules/.bin/selenium-standalone start` +**Note**: The legacy BATS tests in `pmm-tests/` are deprecated. Current testing uses TypeScript/Playwright frameworks described in the [documentation](docs/). -8. Inside the root folder for `pmm-qa` run `npm install` this will install all required packages +Each test type has its own dedicated documentation, detailing how to run and write tests, along with their specific directory structures and conventions. + + + +### Repository Directory Structures + +Understanding the layout of the key repositories involved in PMM QA is essential for navigating the codebase and contributing to tests. + +#### `pmm-qa` (This Repository) -### How to use -Run all Tests: ``` -./node_modules/.bin/codeceptjs run --steps +. +├── .github/ # GitHub Actions workflows +├── docs/ # Project documentation +├── k8s/ # Kubernetes/Helm test scripts (Bats) +├── pmm-integration/ # PMM integration setup scripts (TypeScript) +├── pmm-tests/ # ⚠️ DEPRECATED PMM test scripts (BATS/Bash) +├── tests/ # General test utilities +├── .gitignore +├── docker-compose.yml +├── LICENSE +├── package-lock.json +├── README.md # This file +└── TEST_EXECUTION_GUIDE.md ``` -Run individual Tests: + +#### `pmm-ui-tests` + +This repository contains the UI End-to-End tests for PMM. + ``` -./node_modules/.bin/codeceptjs run --steps tests/verifyMysqlDashboards_test.js +pmm-ui-tests/ +├── playwright-tests/ # ⚠️ DEPRECATED +├── cli/ # Playwright tests for CLI interactions +│ ├── tests/ # CLI test files (.spec.ts) +│ └── ... +├── tests/ # CodeceptJS tests and related code +├── helpers/ # CodeceptJS custom helpers +├── config/ # CodeceptJS configuration files +├── pr.codecept.js # Main CodeceptJS configuration +├── docker-compose.yml # Docker Compose for PMM server setup +└── ... ``` -We have implemented the tests to run in parallel chunks of 3, which will basically launch 3 browsers and execute different tests, -to make any change to that, modify the configuration file `codecept.json` +#### `qa-integration` + +This repository provides Python-based scripts for setting up and managing PMM test environments and services. + +``` +qa-integration/ +├── pmm_psmdb-pbm_setup/ # PSMDB replica setup from PSMDB QA team +├── pmm_psmdb_diffauth_setup/ # PSMDB replica setup from PSMDB QA team +├── pmm_qa/ # Core Python setup scripts +│ ├── pmm-framework.py # Main script for setting up services +│ ├── helpers/ # Helper modules for pmm-framework.py +│ ├── mysql/ +│ ├── mongoDb/ +│ ├── postgres/ +│ └── ... +├── requirements.txt # Python dependencies +└── ... +``` + +#### `package-testing` + +This repository contains Ansible playbooks for testing PMM client package installations across various operating systems. + +``` +package-testing/ +├── playbooks/ # Ansible playbooks for different test scenarios +│ ├── pmm3-client_integration.yml +│ └── ... +├── tasks/ # Reusable Ansible tasks (e.g., verify_pmm3_metric.yml) +├── scripts/ # Reusable scripts (e.g., pmm3_client_install_tarball.sh) +├── inventory.ini # Ansible inventory file +├── Vagrantfile # Vagrant configuration for test VMs +└── ... +``` + + diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..1de7be6c --- /dev/null +++ b/docs/README.md @@ -0,0 +1,237 @@ +# PMM-QA Testing Documentation + +Welcome to the PMM-QA comprehensive testing documentation. This directory contains detailed guides for running various types of tests in the PMM (Percona Monitoring and Management) QA repository. + +## ⚠️ **Important Notice: Legacy Tests Deprecation** + +> **DEPRECATION NOTICE**: The `pmm-tests/` directory containing BATS (Bash Automated Testing System) tests is **deprecated** and should not be used for new test development. +> +> **Use instead**: +> - **CLI Testing**: TypeScript/Playwright tests in `cli-tests/` (see [Integration & CLI Tests](integration-cli-tests.md)) +> - **UI Testing**: CodeceptJS tests (see [End-to-End Tests](e2e-tests.md)) +> - **Infrastructure Setup**: Python framework in `qa-integration/pmm_qa/` and other Percona QA team setups (see [Adding New Environments](adding-new-environments.md)) +> +> **Status**: +> - ✅ **pmm-ui-tests/cli-tests** - Current CLI testing framework +> - ✅ **pmm-ui-tests/** - Current UI testing framework (CodeceptJS) +> - ✅ **qa-integration/pmm_qa/** - Current infrastructure framework +> - ✅ **qa-integration/pmm_psmdb_diffauth_setup and etc..** - Other Percona QA team setups + +## 📚 **Documentation Overview** + +This documentation is organized by test type to provide focused guidance for different testing scenarios: + +### **Core Testing Guides** + +| Document | Description | Use Case | +|----------|-------------|----------| +| [Integration & CLI Tests](integration-cli-tests.md) | PMM CLI functionality testing | Daily development validation | +| [End-to-End Tests](e2e-tests.md) | UI testing with CodeceptJS | Feature validation | +| [Upgrade Tests](upgrade-tests.md) | PMM upgrade scenarios | Upgrade validation | +| [Package Tests](package-tests.md) | Package installation testing | PMM Client Distribution validation | +| [Infrastructure Tests](infrastructure-tests.md) | Kubernetes and platform testing | Infrastructure validation | +| [Feature Build Tests](feature-build-tests.md) | Docker images with new features testing | Feature validation | + +### **Reference Guides** + +| Document | Description | +|----------|-------------| +| [Adding New Environments](adding-new-environments.md) | Guide for extending the PMM framework | +| [Test Parameters Reference](test-parameters.md) | Complete parameter documentation | +| [Troubleshooting Guide](troubleshooting.md) | Common issues and solutions | + +--- + +## 🚀 **Quick Start Guide** + +### Prerequisites +- Access to the `percona/pmm-qa`, `percona/pmm-ui-tests`, `PerconaLab/qa-integration` and `Percona-QA/package-testing` repositories +- Permissions to trigger GitHub Actions workflows +- Understanding of PMM architecture and components + +### Most Common Testing Scenarios + +#### 🔄 **Daily Development Testing** +```yaml +Workflow: PMM Integration Tests +Purpose: Validate CLI functionality +Duration: ~10 minutes (all jobs) +Frequency: Daily/Per FB creation/On demand +``` +**[→ Go to Integration & CLI Tests Guide](integration-cli-tests.md)** + +#### 🎭 **Feature Validation** +```yaml +Workflow: E2E tests Matrix (CodeceptJS) +Purpose: Validate UI E2E functionality +Duration: ~40 minutes +Frequency: Daily/On demand +``` +**[→ Go to End-to-End Tests Guide](e2e-tests.md)** + +```yaml +Workflow: _FB e2e tests +Purpose: Validate core E2E functionality +Duration: ~40 minutes +Frequency: Per FB creation/On demand +``` +**[→ Go to End-to-End Tests Guide](e2e-tests.md)** + +#### ⬆️ **Release Validation** +```yaml +Workflow: PMM Upgrade Tests +Purpose: Validate upgrade scenarios +Duration: ~1 hour +Frequency: Daily/Pre-release +``` +**[→ Go to Upgrade Tests Guide](upgrade-tests.md)** + +#### 📦 **Distribution Validation** +```yaml +Workflow: Package Test Matrix +Purpose: Validate package installation on different OS +Duration: ~50 minutes +Frequency: Daily/Pre-release +``` +**[→ Go to Package Tests Guide](package-tests.md)** + +--- + +## 🏗️ **Test Infrastructure Overview** + +### **Supported Platforms** +- **Operating Systems**: Ubuntu (Noble, Jammy), Oracle Linux (8, 9), Oracle Linux 9 +- **Container Runtimes**: Docker, Podman +- **Orchestration**: Kubernetes (via Helm), Docker Compose +- **Cloud**: GitHub Actions runners + +### **Database Coverage** +- **MySQL Family**: Percona Server (5.7, 8.0, 8.4), MySQL (8.0) +- **PostgreSQL Family**: Percona Distribution for PostgreSQL ( 15-17) +- **MongoDB Family**: Percona Server for MongoDB (6.0, 7.0, 8.0) +- **Proxy/Load Balancers**: ProxySQL, HAProxy + +### **Testing Frameworks** +- **CLI Testing**: Playwright (TypeScript) - Current framework +- **UI Testing**: CodeceptJS - Current framework +- **Infrastructure Setup**: BATS (Bash) - Current framework +- **Package Testing**: Ansible playbooks - Current framework + +--- + +## 📊 **Workflow Architecture** + +### **Workflow Categories** + +```mermaid +graph TB + A[PMM-QA Workflows] --> B[Integration Tests] + A --> C[E2E Tests] + A --> D[Jenkins Upgrade Tests] + A --> E[Package Tests] + A --> F[Infrastructure Tests] + A --> G[Feature Build Tests] +``` + +### **Reusable Workflow Pattern** + +Most workflows follow a reusable pattern: +1. **Main Workflow** - Defines parameters and orchestrates jobs +2. **Runner Workflow** - Reusable component that executes tests +3. **Matrix Strategy** - Tests across multiple versions/platforms + +--- + +## ⚡ **Emergency Testing** + +### **Critical Path Testing** +```yaml +# Core functionality +Workflows: PMM Integration Tests +Duration: ~10 minutes + +# UI critical path +Workflows: _FB e2e tests +Duration: ~40 minutes +``` + +--- + +## 🛠️ **Development Workflow Integration** + +### **Pre-Commit Testing** +1. Run local CLI tests for changed components +2. Validate specific database integration if DB-related changes +3. Test UI components if frontend changes + +### **Pull Request Testing** +1. Full integration test suite +2. Relevant E2E test categories +3. Package tests if packaging changes + +### **Release Testing** + +Refer to Release Sign Off document in Notion + +--- + +## 📋 **Test Execution Checklist** + +### **Before Running Tests** +- [ ] Verify repository access and permissions +- [ ] Check if required versions/images are available +- [ ] Review resource availability (avoid concurrent large tests) +- [ ] Confirm external service availability (if applicable) + +### **During Test Execution** +- [ ] Monitor test progress for early failure detection +- [ ] Check logs for setup issues +- [ ] Track test duration vs. expectations + +### **After Test Completion** +- [ ] Review all test results and reports +- [ ] Download and analyze failure artifacts +- [ ] Document any new issues discovered +- [ ] Update test configurations if needed +- [ ] Share results/findings with QA team members + +--- + +## 🔗 **Additional Resources** + +### **Related Repositories** +- [pmm-ui-tests](https://github.com/percona/pmm-ui-tests) - UI test suite +- [qa-integration](https://github.com/Percona-Lab/qa-integration/tree/v3) - PMM Framework +- [package-testing](https://github.com/Percona-QA/package-testing/tree/v3) - PMM Framework +- [qa-integration](https://github.com/Percona-Lab/qa-integration) - Integration setup +- [pmm-server](https://github.com/percona/pmm) - PMM Server and PMM Clientcodebase + +### **External Documentation** +- [PMM Documentation](https://docs.percona.com/percona-monitoring-and-management/) +- [BATS Documentation](https://bats-core.readthedocs.io/en/stable/) +- [CodeceptJS Documentation](https://codecept.io/helpers/Playwright/) +- [Playwright Documentation](https://playwright.dev/) +- [GitHub Actions Documentation](https://docs.github.com/en/actions) + +### **Support Channels** +- **Issues**: [PMM-QA GitHub Issues](https://github.com/percona/pmm-qa/issues) +- **Discussions**: PMM team internal channels +- **Documentation**: This documentation set + +--- + +## 🏷️ **Version Information** + +| Component | Version | Notes | +|-----------|---------|-------| +| PMM Server | 3-dev-latest | Default development version | +| PMM Client | 3-dev-latest | Default development version | +| Testing Framework | v3 | Current major version | +| Documentation | v1.0 | This documentation version | + +--- + +**Last Updated**: July 2025 +**Maintained By**: PMM QA Team +**Repository**: [percona/pmm-qa](https://github.com/percona/pmm-qa) + \ No newline at end of file diff --git a/docs/adding-new-environments.md b/docs/adding-new-environments.md new file mode 100644 index 00000000..52a57019 --- /dev/null +++ b/docs/adding-new-environments.md @@ -0,0 +1,350 @@ +# Adding New Environments to PMM Framework + +This guide explains how to add new database types and environments to the PMM qa-integration framework. + +## Overview + +The PMM framework uses a Python-based system (`pmm-framework.py`) with Ansible playbooks and Docker to set up various database and service environments for testing. Adding a new environment involves several coordinated changes. + +## Architecture + +The framework consists of: + +- **`pmm-framework.py`** - Main Python script that orchestrates setup +- **`database_options.py`** - Configuration definitions for all database types +- **Ansible playbooks** (`.yml` files) - Infrastructure automation scripts +- **Helper scripts** - Supporting bash/shell scripts + +### Leveraging Other Percona QA Team Setups + +In addition to the core PMM framework, we also leverage setups from other Percona QA teams that are available in the `qa-integration` repository. These setups provide specialized configurations and testing environments: + +- **`qa-integration/pmm_psmdb_diffauth_setup/`** - MongoDB authentication setups from the PSMDB QA team + +These external setups can be integrated into the PMM framework by referencing their setups and configurations in your custom setup functions. + +## Step-by-Step Guide + +### 1. Define Database Configuration + +Edit `qa-integration/pmm_qa/scripts/database_options.py`: + +```python +# Add your new database type to the database_options dictionary +"YOUR_DB_TYPE": { + "versions": ["1.0", "2.0", "latest"], # Available versions + "configurations": { + "CLIENT_VERSION": "3-dev-latest", # Default PMM client version + "CUSTOM_OPTION": "default_value", # Your custom configuration options + # Add more configuration options as needed + } +}, +``` + +**Example from our external TLS implementation:** +```python +"EXTERNAL_TLS": { + "versions": ["0.15.1", "0.16.0"], + "configurations": { + "CLIENT_VERSION": "3-dev-latest", + "SKIP_TLS_VERIFY": "true" + } +}, +``` + +### 2. Create Setup Function + +Add a new setup function in `qa-integration/pmm_qa/pmm-framework.py`: + +```python +def setup_your_db_type(db_type, db_version=None, db_config=None, args=None): + # Check if PMM server is running + container_name = get_running_container_name() + if container_name is None and args.pmm_server_ip is None: + print(f"Check if PMM Server is Up and Running..Exiting") + exit() + + # Gather version details + your_version = os.getenv('YOUR_VERSION') or db_version or database_configs[db_type]["versions"][-1] + + # Define environment variables for playbook + env_vars = { + 'PMM_SERVER_IP': args.pmm_server_ip or container_name or '127.0.0.1', + 'YOUR_VERSION': your_version, + 'YOUR_CONTAINER': 'your_container_name', + 'CLIENT_VERSION': get_value('CLIENT_VERSION', db_type, args, db_config), + 'ADMIN_PASSWORD': os.getenv('ADMIN_PASSWORD') or args.pmm_server_password or 'admin', + 'PMM_QA_GIT_BRANCH': os.getenv('PMM_QA_GIT_BRANCH') or 'v3', + # Add your custom environment variables + 'CUSTOM_OPTION': get_value('CUSTOM_OPTION', db_type, args, db_config), + } + + # Ansible playbook filename + playbook_filename = 'your_setup.yml' + + # Call the function to run the Ansible playbook + run_ansible_playbook(playbook_filename, env_vars, args) +``` + +### 3. Register in Setup Database Function + +Add your database type to the routing logic in the `setup_database()` function: + +```python +def setup_database(db_type, db_version=None, db_config=None, args=None): + # ... existing code ... + + elif db_type == 'YOUR_DB_TYPE': + setup_your_db_type(db_type, db_version, db_config, args) + + # ... rest of existing code ... +``` + +### 4. Create Ansible Playbook + +Create `qa-integration/pmm_qa/your_setup.yml`: + +```yaml +--- +# Description of what this playbook does + +- hosts: all + vars: + # Define variables using the lookup pattern + your_version: "{{ lookup('vars', 'extra_your_version', default=lookup('env','YOUR_VERSION') | default('1.0', true) ) }}" + your_container: "{{ lookup('vars', 'extra_your_container', default=lookup('env','YOUR_CONTAINER') | default('your_default_container', true) ) }}" + pmm_server_ip: "{{ lookup('vars', 'extra_pmm_server_ip', default=lookup('env','PMM_SERVER_IP') | default('127.0.0.1', true) ) }}" + client_version: "{{ lookup('vars', 'extra_client_version', default=lookup('env','CLIENT_VERSION') | default('3-dev-latest', true) ) }}" + admin_password: "{{ lookup('vars', 'extra_admin_password', default=lookup('env','ADMIN_PASSWORD') | default('admin', true) ) }}" + pmm_qa_branch: "{{ lookup('vars', 'extra_pmm_qa_branch', default=lookup('env','PMM_QA_GIT_BRANCH') | default('v3', true) ) }}" + + tasks: + - name: Create pmm-qa network if not exist + shell: docker network create pmm-qa + ignore_errors: true + + - name: Cleanup existing containers + shell: > + docker ps -a --filter "name={{ your_container }}" | grep -q . && docker stop {{ your_container }} && docker rm -fv {{ your_container }} + ignore_errors: true + tags: + - cleanup + + # Add your setup tasks here + - name: Setup your environment + shell: echo "Setting up your environment" + + # Standard PMM client setup pattern + - name: Prepare Container + shell: > + docker run -d --name={{ your_container }} + --network pmm-qa + phusion/baseimage:jammy-1.0.1 + + - name: Install basic packages + shell: "{{ item }}" + with_items: + - docker exec {{ your_container }} apt-get update + - docker exec {{ your_container }} apt-get -y install wget curl git gnupg2 lsb-release + + - name: Setup PMM client + shell: "{{ item }}" + with_items: + - docker cp ./pmm3-client-setup.sh {{ your_container }}:/ + - docker exec {{ your_container }} bash -x ./pmm3-client-setup.sh --pmm_server_ip {{ pmm_server_ip }} --client_version {{ client_version }} --admin_password {{ admin_password }} --use_metrics_mode no + + # Add your services to PMM monitoring + - name: Set Random Number Fact + set_fact: + random_number: "{{ (10000 | random) | int }}" + + - name: Add service to PMM monitoring + shell: > + docker exec {{ your_container }} bash -c 'source ~/.bash_profile || true; + pmm-admin add external --listen-port=YOUR_PORT --group="your_group" + --service-name=your_service_{{ random_number }}' + + - name: Display service information + shell: > + docker exec {{ your_container }} bash -c 'source ~/.bash_profile || true; + pmm-admin list' + register: pmm_services + + - name: Show PMM services + debug: + msg: "{{ pmm_services.stdout }}" +``` + +### 5. Test Your Implementation + +```bash +# Test syntax +python3 -m py_compile qa-integration/pmm_qa/pmm-framework.py + +# Test Ansible syntax +ansible-playbook --syntax-check qa-integration/pmm_qa/your_setup.yml + +# Test the functionality +python3 pmm-framework.py --database your_db_type +``` + +## Best Practices + +### 1. Naming Conventions + +- **Database types**: Use UPPERCASE with underscores (e.g., `EXTERNAL_TLS`, `MY_DATABASE`) +- **Container names**: Use lowercase with underscores (e.g., `my_database_container`) +- **Playbook files**: Use lowercase with underscores (e.g., `my_database_setup.yml`) + +### 2. Environment Variables + +- Use consistent naming patterns +- Provide sensible defaults +- Support both environment variables and command-line arguments + +### 3. Error Handling + +```python +# Always check if PMM server is running +container_name = get_running_container_name() +if container_name is None and args.pmm_server_ip is None: + print(f"Check if PMM Server is Up and Running..Exiting") + exit() +``` + +### 4. Cleanup Support + +Always include cleanup tasks in your Ansible playbook: + +```yaml +- name: Cleanup existing containers + shell: > + docker ps -a --filter "name={{ your_container }}" | grep -q . && docker stop {{ your_container }} && docker rm -fv {{ your_container }} + ignore_errors: true + tags: + - cleanup +``` + +### 5. Documentation + +Create a README file for your new environment: + +```markdown +# Your Database Type Setup + +## Usage +python3 pmm-framework.py --database your_db_type + +## What it Creates +- Description of containers and services + +## Configuration Options +- List of available options + +## Testing +- How to verify the setup works +``` + +## Example: External Setup with TLS Enhancement + +Here's how we enhanced the existing External setup to support TLS testing: + +1. **Database Configuration Enhancement** (`database_options.py`): +```python +"EXTERNAL": { + "REDIS": { + "versions": ["1.14.0", "1.58.0"], + }, + "NODEPROCESS": { + "versions": ["0.7.5", "0.7.10"], + }, + "configurations": {"CLIENT_VERSION": "3-dev-latest", "USE_TLS": "false"} +}, +``` + +2. **Setup Function Enhancement** (`pmm-framework.py`): +```python +def setup_external(db_type, db_version=None, db_config=None, args=None): + # Added TLS support to existing function + env_vars = { + # ... existing vars ... + 'USE_TLS': get_value('USE_TLS', db_type, args, db_config), + } +``` + +3. **Ansible Playbook Enhancement** (`external_setup.yml`): + - Keeps existing Redis and Node Process functionality + - Conditionally creates TLS test server when USE_TLS=true + - Uses `--tls-skip-verify` flag when TLS mode is enabled + - Maintains backward compatibility + +4. **Usage**: +```bash +# Default behavior (unchanged) +python3 pmm-framework.py --database external + +# Enhanced with TLS testing +python3 pmm-framework.py --database external,USE_TLS=true +``` + +This approach demonstrates how to enhance existing environments rather than creating entirely new ones. + +## Common Patterns + +### Database with Version Support + +```python +# In setup function +db_version = os.getenv('DB_VERSION') or db_version or database_configs[db_type]["versions"][-1] +``` + +### Multiple Container Setup + +```yaml +# In Ansible playbook +- name: Start database container + shell: docker run -d --name database_container ... + +- name: Start exporter container + shell: docker run -d --name exporter_container ... +``` + +### Custom Configuration Options + +```python +# In setup function +custom_option = get_value('CUSTOM_OPTION', db_type, args, db_config) + +# In environment variables +'CUSTOM_OPTION': custom_option, +``` + +## Troubleshooting + +### Common Issues + +1. **Python Syntax Errors**: Use `python3 -m py_compile` to check +2. **Ansible Syntax Errors**: Use `ansible-playbook --syntax-check` +3. **Missing Dependencies**: Ensure all required packages are installed in containers +4. **Network Issues**: Always use the `pmm-qa` Docker network +5. **PMM Client Issues**: Verify PMM server is running and accessible + +### Testing Steps + +1. Syntax validation +2. Framework recognition (`python3 pmm-framework.py --help`) +3. Dry run with verbose output +4. Full integration test +5. Cleanup verification + +## Contributing + +When contributing new environments: + +1. Follow the established patterns +2. Add documentation +3. Ensure cleanup works properly +4. Test with different PMM server configurations +5. Execute e2e-codeceptjs-matrix workflow with contributor branch to ensure all setups are working correctly and nothing is broken/impacted + +This approach ensures consistency and maintainability across all PMM framework environments. \ No newline at end of file diff --git a/docs/e2e-codeceptjs-tests.md b/docs/e2e-codeceptjs-tests.md new file mode 100644 index 00000000..e69de29b diff --git a/docs/e2e-tests.md b/docs/e2e-tests.md new file mode 100644 index 00000000..f392798f --- /dev/null +++ b/docs/e2e-tests.md @@ -0,0 +1,230 @@ +# E2E CodeceptJS Tests + +This guide provides instructions for running the PMM E2E tests that use the CodeceptJS framework. These tests cover a wide range of scenarios, including SSL, experimental features, and more. + +## 💡 **What are E2E CodeceptJS Tests?** + +These tests are designed to validate specific and advanced PMM functionalities. They ensure that: + +- **SSL connections are secure**: Verifying that PMM can connect to databases over SSL. +- **Experimental features are stable**: Testing features that are not yet released to the general public. +- **Core functionality is robust**: Covering scenarios like disconnecting and reconnecting services. +- **Etc..** + +## 🤖 **How to Run E2E CodeceptJS Tests Locally** + +The following steps will guide you through setting up the environment and running the CodeceptJS tests locally, based on the `e2e-codeceptjs-matrix.yml` CI workflow. + +### **Prerequisites** + +- **Git**: To clone the required repositories. +- **Docker** and **Docker Compose**: To run the PMM server and other services. +- **Node.js (v20+)** and **npm**: For running the test frameworks. +- **Python 3** and **pip**: For running setup scripts. +- **System Dependencies**: `ansible`, `clickhouse-client`, `dbdeployer`, and others. + +### **Step 1: Clone Repositories** + +First, clone the `pmm-ui-tests` and `qa-integration` repositories. These contain the test code and setup scripts. + +```bash +git clone --branch v3 https://github.com/percona/pmm-ui-tests.git +git clone --branch v3 https://github.com/Percona-Lab/qa-integration.git +``` + +### **Step 2: Set Up PMM Server** + +Next, set up and start the PMM server using Docker Compose. + +```bash +cd pmm-ui-tests + +# Create a docker network for PMM +docker network create pmm-qa || true + +# Start PMM Server +PMM_SERVER_IMAGE=perconalab/pmm-server:3-dev-latest docker compose -f docker-compose.yml up -d + +# Wait for the server to be ready and change the admin password +sleep 60 +docker exec pmm-server change-admin-password admin-password +docker network connect pmm-qa pmm-server || true + +cd .. +``` + +### **Step 3: Set Up Required Services** + +Now, set up the PMM client and the database services you want to monitor. + +```bash +cd qa-integration/pmm_qa + +# Set up the test environment and services (e.g., a single Percona Server instance) +python3 -m venv virtenv +source virtenv/bin/activate +pip install --upgrade pip +pip install setuptools +pip install -r requirements.txt +python3 pmm-framework.py --pmm-server-password=admin-password --database ps + +cd ../.. +``` +**Note:** You can customize the services by changing the arguments passed to `pmm-framework.py`. For example, to set up multiple databases for inventory tests, use `--database ps --database psmdb --database pdpgsql`. + +### **Step 4: Install Test Dependencies** + +Install the Node.js dependencies required for the UI tests. + +```bash +cd pmm-ui-tests +npm ci +npx playwright install --with-deps +``` + +### **Step 5: Run the Tests** + +Run the CodeceptJS tests using the appropriate tags. The setup for the services will vary depending on the test. + +#### **SSL Tests** + +```bash +# Set up the environment for MySQL SSL tests +python qa-integration/pmm_qa/pmm-framework.py --pmm-server-password=admin-password --database ssl_mysql + +# Run the MySQL SSL tests +./node_modules/.bin/codeceptjs run -c pmm-ui-tests/pr.codecept.js --grep "@ssl-mysql" +``` + +#### **Experimental Tests** + +```bash +# Set up the environment for experimental tests +python qa-integration/pmm_qa/pmm-framework.py --pmm-server-password=admin-password --database pdpgsql + +# Run the experimental tests +./node_modules/.bin/codeceptjs run -c pmm-ui-tests/pr.codecept.js --grep "@experimental" +``` + +## 📋 **Available Test Suites** + +### **Core E2E CodeceptJS Matrix Test Suites** + +| Test Suite | Test Tag(s) | Description | +|---|---|---| +| Settings and CLI | `@settings\|@cli` | General settings and CLI tests. | +| SSL Tests | `@ssl-mysql`, `@ssl-mongo`, `@ssl-postgres` | Tests for SSL connections to different databases. | +| Experimental | `@experimental` | Tests for experimental features. | +| Disconnect | `@disconnect` | Tests for disconnecting and reconnecting services. | +| Backup Management MongoDB | `@bm-mongo` | MongoDB backup and restore functionality. | +| Backup Management Common | `@bm-locations` | Backup location management and common features. | +| Exporters | `@exporters` | Various exporter functionality tests. | +| MongoDB Exporter | `@mongodb-exporter` | MongoDB-specific exporter tests. | +| Instances | `@fb-instances` | Instance management UI tests. | +| Alerting and Settings | `@fb-alerting\|@fb-settings` | Alerting and settings UI components. | +| User and Password | `@user-password` | User authentication with changed password. | +| PGSM Integration | `@pgsm-pmm-integration` | PostgreSQL pg_stat_monitor integration. | +| PGSS Integration | `@pgss-pmm-integration` | PostgreSQL pg_stat_statements integration. | +| PSMDB Replica | `@pmm-psmdb-replica-integration` | MongoDB replica set integration. | +| PSMDB Arbiter | `@pmm-psmdb-arbiter-integration` | MongoDB arbiter replica integration. | +| Dump Tool | `@dump` | Database dump tool functionality. | +| Service Account | `@service-account` | Service account management tests. | +| PS Integration | `@fb-pmm-ps-integration` | Percona Server integration tests. | +| RBAC | `@rbac` | Role-based access control tests. | +| Encryption | `@fb-encryption` | Encryption functionality tests. | +| Docker Configuration | `@docker-configuration` | Docker configuration tests. | +| Nomad | `@nomad` | Nomad orchestration tests. | + +### **Jenkins E2E CodeceptJS Test Suites** +| Test Suite | Test Tag(s) | Description | +|---|---|---| +| Query Analytics | `@qan` | Tests for QAN features. | +| Dashboards | `@nightly`, `@dashboards` | Tests that make sure Dashboards have data. | +| Alerting | `@ia` | Alerting tests. | +| Remote instances | `@instances` | Tests for AWS and Azure integration. | +| GCP Remote instances | `@gcp` | Tests for GCP integration. | + +## 📝 **How to Write CodeceptJS Tests** + +All paths mentioned in this section are relative to the root of the `pmm-ui-tests` repository, which can be found [here](https://github.com/percona/pmm-ui-tests/tree/v3). + +CodeceptJS tests are written in JavaScript and provide a high-level, readable syntax for UI interactions. They are built on top of WebDriver or Playwright and use a BDD-style syntax. + +### **Test Structure and Directory Layout** + +CodeceptJS tests for PMM UI are primarily located in the `pmm-ui-tests/tests` directory. Tests are organized by feature or functional area. + +``` +pmm-ui-tests/ +├── tests/ # Actual test files +│ ├── pages/ # Page Object Model definitions +│ │ ├── LoginPage.js +│ │ └── DashboardPage.js +│ ├── login_test.js +│ ├── inventory_test.js +├── helpers/ # Custom helpers for common actions +├── config/ # Configuration files +└── pr.codecept.js # Main CodeceptJS configuration +``` + +- **`tests/`**: This directory contains the main test files (`_test.js`). Each file typically covers a specific feature or a logical group of functionalities. +- **`pages/`**: Similar to Playwright, CodeceptJS also supports the Page Object Model. This directory holds page object definitions, abstracting UI interactions. +- **`helpers/`**: Custom helpers can be created to encapsulate common actions or assertions, promoting reusability. +- **`pr.codecept.js`**: This is the primary configuration file for CodeceptJS, defining helpers, plugins, and test paths. + +### **Writing Conventions** + +- **BDD Style**: Tests are written using `Scenario` and `I` (the actor) to describe user interactions in a readable way. +- **Page Objects**: Utilize Page Objects for interacting with UI elements to improve maintainability. +- **Tags**: Use `@` tags in `Scenario` or `Feature` blocks to categorize tests (e.g., `@bm-mongo`, `@exporters`). These tags are used for selective test execution. +- **Comments**: Add comments for complex logic or to explain the *why* behind certain steps. + +### **Basic Test Example** + +A typical CodeceptJS test file (`_test.js`) will look like this: + +```javascript +Feature('Login'); + +Scenario('should display login form', ({ I }) => { + I.amOnPage('http://localhost/'); + I.seeElement('input[name="username"]'); + I.seeElement('input[name="password"]'); + I.seeElement('button[type="submit"]'); +}); + +Scenario('should allow user to login', ({ I }) => { + I.amOnPage('http://localhost/'); + I.fillField('input[name="username"]', 'admin'); + I.fillField('input[name="password"]', 'admin'); + I.click('button[type="submit"]'); + I.see('Dashboard'); +}); +``` + +### **Key Concepts** + +- **`Feature`**: Defines a test suite. +- **`Scenario`**: Represents an individual test case. +- **`I` (the actor)**: The global object for performing UI actions (e.g., `I.amOnPage()`, `I.click()`). +- **Helpers**: Provide methods for `I` to interact with the browser. +- **Tags**: Used for categorizing and selectively running tests. + +### **Running New Tests** + +After creating a new test file, you can run it using the `codeceptjs run` command, specifying the path to your test file or using a `grep` pattern for its title or tags. + +```bash +cd pmm-ui-tests +./node_modules/.bin/codeceptjs run -c pr.codecept.js tests/my_new_feature_test.js +# Or with a grep pattern +./node_modules/.bin/codeceptjs run -c pr.codecept.js --grep="@my-new-feature" +``` + +--- + +**Related Documentation**: +- [Feature Build Tests](feature-build-tests.md) +- [Integration & CLI Tests](integration-cli-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) \ No newline at end of file diff --git a/docs/feature-build-tests.md b/docs/feature-build-tests.md new file mode 100644 index 00000000..bcb4bc65 --- /dev/null +++ b/docs/feature-build-tests.md @@ -0,0 +1,115 @@ +# Feature Build Tests + +This guide provides instructions for running the PMM Feature Build (FB) tests locally. These tests are designed to validate Docker images built with new features before they are merged into the main codebase. + +## 💡 **What are Feature Build Tests?** + +Feature Build tests are comprehensive UI testing suites that validate new features in PMM. They ensure that: + +- **New features work correctly**: Verifying that the new functionality behaves as expected. +- **There are no regressions**: Ensuring that existing functionality is not broken by the new feature. +- **The UI remains consistent**: Checking that the new feature integrates well with the existing UI. + +## 🤖 **How to Run Feature Build E2E Tests Locally** + +You can reproduce the CI runner workflow for Feature Build E2E tests on your local machine. This is useful for debugging, development, or validating changes before pushing to CI. The steps below mirror what happens in the CI runner, with local commands and explanations. + +### **Prerequisites** +- **Docker** and **Docker Compose** installed +- **Node.js** (v18+) and **npm** +- **Python 3** and **pip** +- **Ansible**, **Clickhouse client**, and other system dependencies (see below) +- Sufficient disk space and permissions to run containers + +#### **Step-by-Step Local Execution (CI Runner Steps)** + +1. **Clone the Required Repositories** + + Clone both the UI tests and QA integration repositories at the correct branch: + ```bash + git clone --branch v3 https://github.com/percona/pmm-ui-tests.git + git clone --branch v3 https://github.com/Percona-Lab/qa-integration.git + ``` + +2. **Install System Dependencies** + + Install all required system packages and tools (Ansible, Clickhouse client, dbdeployer, etc): + ```bash + sudo apt-get update + sudo apt-get install -y apt-transport-https ca-certificates dirmngr ansible libaio1 libaio-dev libnuma-dev libncurses5 socat sysbench clickhouse-client + curl -s https://raw.githubusercontent.com/datacharmer/dbdeployer/master/scripts/dbdeployer-install.sh | sudo bash -s -- -b /usr/local/bin + ``` + +3. **Start PMM Server with Docker Compose** + + This step sets up the PMM Server container, changes the admin password, and runs initial DB setup scripts: + ```bash + cd pmm-ui-tests + docker network create pmm-qa || true + PMM_SERVER_IMAGE=perconalab/pmm-server-fb:feature-xyz docker compose -f docker-compose.yml up -d + sleep 60 + docker exec pmm-server change-admin-password admin-password + bash -x testdata/db_setup.sh + docker network connect pmm-qa pmm-server || true + cd .. + ``` + +4. **Prepare Python Environment and Run Setup** + + This step prepares the test environment and configures databases/services as needed for the test suite. Replace `[SETUP_ARGS]` with the appropriate setup string, e.g. `--database psmdb,SETUP_TYPE=pss`: + ```bash + cd qa-integration/pmm_qa + mkdir -m 777 -p /tmp/backup_data + python3 -m venv virtenv + source virtenv/bin/activate + pip install --upgrade pip + pip install -r requirements.txt + pip install setuptools + python3 pmm-framework.py --pmm-server-password=admin-password --verbose [SETUP_ARGS] + cd ../.. + ``` + +5. **Install Node.js Dependencies for UI Tests** + + Installs all required Node.js modules and Playwright browser dependencies for UI testing: + ```bash + cd pmm-ui-tests + npm ci + npx playwright install --with-deps + ``` + +### **Step 8: Run the Tests** + +Finally, run the E2E tests for the specific feature. Use the appropriate tag for the test suite you want to run. + +```bash +# Example for MongoDB backup management tests: +./node_modules/.bin/codeceptjs run -c pr.codecept.js --grep "@bm-mongo" + +# Example for exporter tests: +./node_modules/.bin/codeceptjs run -c pr.codecept.js --grep "@exporters" +``` + +## 📋 **[Available Test Suites](e2e-tests.md#-available-test-suites)** + +## 📝 **How to Write E2E Tests** + +Feature Build tests are essentially End-to-End (E2E) UI tests that focus on validating new features. Therefore, the principles and practices for writing these tests are the same as for general E2E UI tests. + +- For writing **CodeceptJS** tests, refer to the [How to Write CodeceptJS Tests](e2e-tests.md#how-to-write-codeceptjs-tests) section in the E2E CodeceptJS Tests documentation. + +When writing Feature Build tests, pay special attention to: + +- **Targeting new features**: Ensure your tests specifically cover the new functionality. +- **Regression prevention**: Include checks for existing features that might be affected by the new changes. +- **Using appropriate tags**: Tag your tests with relevant `@fb-` tags (e.g., `@fb-instances`, `@fb-alerting`) to categorize them as feature build tests. + +--- + +**Related Documentation**: +- [E2E Tests](e2e-tests.md) +- [Integration & CLI Tests](integration-cli-tests.md) +- [Package Tests](package-tests.md) +- [Upgrade Tests](upgrade-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) diff --git a/docs/infrastructure-tests.md b/docs/infrastructure-tests.md new file mode 100644 index 00000000..d79e158e --- /dev/null +++ b/docs/infrastructure-tests.md @@ -0,0 +1,177 @@ +# Infrastructure Tests + +This guide provides instructions for running the PMM infrastructure tests locally. These tests validate PMM deployments in various environments, including Kubernetes/Helm and simplified installations using the Easy Install script. + +## 💡 **What are Infrastructure Tests?** + +Infrastructure tests are designed to ensure that PMM can be deployed and configured correctly in different environments. They cover: + +- **Kubernetes/Helm**: Validating PMM deployment using Helm charts on a Kubernetes cluster. +- **Easy Install - not automated**: Testing the simplified installation script on various supported operating systems. + +## 🤖 **How to Run Infrastructure Tests Locally** + +### **Helm Tests (Kubernetes)** + +These steps will guide you through setting up a local Kubernetes cluster using Minikube and deploying PMM with Helm. + +#### **Prerequisites** + +- **Minikube**: For running a local Kubernetes cluster. +- **kubectl**: The Kubernetes command-line tool. +- **Helm**: The package manager for Kubernetes. + +#### **Step 1: Start Minikube** + +Start a Minikube cluster. This will create a local single-node Kubernetes cluster. Disable the default storage provisioner and enable the CSI hostpath driver for persistent storage. + +```bash +minikube delete && \ + minikube start && \ + minikube addons disable storage-provisioner && \ + kubectl delete storageclass standard && \ + minikube addons enable csi-hostpath-driver && \ + minikube addons enable volumesnapshots && \ + kubectl patch storageclass csi-hostpath-sc -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}' &&\ + kubectl wait --for=condition=Ready node --timeout=90s minikube +``` + +#### **Step 2: Run Helm Tests** + +Clone the `pmm-qa` repository and run the Helm tests using `bats`. + +```bash +git clone https://github.com/percona/pmm-qa.git +cd pmm-qa/k8s + +# Set up bats (BASH Automated Testing System) +sudo ./setup_bats_libs.sh + +# Run the tests +SERVER_IMAGE=perconalab/pmm-server:3-dev-latest bats --tap helm-test.bats +``` + +### **Easy Install Tests - not automated** + +These steps will show you how to test the Easy Install script on a supported operating system. + +#### **Prerequisites** + +- A clean installation of a supported OS (e.g., Ubuntu 24.04, Oracle Linux 9, Rocky Linux 9). +- `curl` or `wget` to download the script. + +#### **Step 1: Download the Script** + +Download the Easy Install script from the Percona website. + +```bash +curl -fsSL https://www.percona.com/get/pmm > pmm-installer.sh +``` + +#### **Step 2: Run the Script** + +Execute the script with `bash`. The script will automatically detect the OS and install PMM. + +```bash +sudo bash pmm-installer.sh +``` + +#### **Step 3: Validate the Installation** + +After the script finishes, you can check the status of the PMM server and other components. + +```bash +docker ps -a +``` + +You should see the `pmm-server` and `watchtower` containers running. + +## 📝 **How to Write Helm Tests** + +All paths mentioned in this section are relative to the root of the `pmm-qa` repository, which can be found [here](https://github.com/percona/pmm-qa/tree/v3). + +Helm tests in this project are written using Bats (Bash Automated Testing System). **Note**: This is different from the deprecated BATS tests in `pmm-tests/` - Helm-specific BATS tests in `k8s/` directory are still actively maintained for Kubernetes testing. Bats provides a simple way to test shell scripts and command-line tools. Helm tests typically involve deploying a Helm chart and then asserting on the state of the Kubernetes resources or the behavior of the deployed application. + +### **Test Structure and Directory Layout** + +Helm tests are located in the `pmm-qa/k8s` directory. + +``` +pmm-qa/ +├── k8s/ +│ ├── helm-test.bats # Main Bats test file for Helm +│ ├── k8s_helper.sh # Helper functions for Kubernetes interactions +│ ├── pmm_helper.sh # Helper functions for PMM-specific actions +│ └── setup_bats_libs.sh # Script to set up Bats libraries +``` + +- **`helm-test.bats`**: This is the main Bats test file. It contains the test cases for deploying PMM using Helm and verifying its functionality. +- **`k8s_helper.sh`**: This script contains reusable Bash functions for interacting with Kubernetes, such as checking pod status, deploying resources, and running `kubectl` commands. +- **`pmm_helper.sh`**: This script provides helper functions specific to PMM, such as checking PMM server status or client registration. + +### **Writing Conventions** + +- **Bats Syntax**: Tests are written using Bats syntax, which is essentially Bash scripting with special Bats commands for defining tests (`@test`), assertions (`run`, `assert_success`, `assert_output`), and setup/teardown (`setup`, `teardown`). +- **Helper Functions**: Utilize helper functions in `k8s_helper.sh` and `pmm_helper.sh` to abstract complex Kubernetes and PMM interactions. This promotes reusability and readability. +- **Clear Assertions**: Assertions should clearly define the expected outcome of a command or the state of a resource. +- **Test Isolation**: Each test should aim to be as isolated as possible, cleaning up resources after execution to prevent interference. + +### **Basic Helm Test Example** + +A typical Bats test in `helm-test.bats` might look like this: + +```bash +#!/usr/bin/env bats + +load 'test_helper/bats-support/load' +load 'test_helper/bats-assert/load' + +@test "PMM Helm chart deploys successfully" { + run helm install my-pmm ./pmm-helm-chart + assert_success + assert_output --partial "STATUS: deployed" + + run kubectl get pods -l app.kubernetes.io/instance=my-pmm + assert_success + assert_output --partial "pmm-server" +} + +@test "PMM server is reachable after deployment" { + run kubectl get service my-pmm-server -o jsonpath='{.status.loadBalancer.ingress[0].ip}' + assert_success + PMM_IP="$output" + + # Assuming a simple ping endpoint for demonstration + run curl -s "http://$PMM_IP/ping" + assert_success + assert_output "PMM Server is running" +} +``` + +During the development you may want to run only test you're working on. To achieve this you need to add comment `#bats test_tags=bats:focus` above the test annotation + + +```bash +#bats test_tags=bats:focus +@test "PMM server is reachable after deployment" { + run kubectl get service my-pmm-server -o jsonpath='{.status.loadBalancer.ingress[0].ip}' + assert_success + PMM_IP="$output" + + run curl -s "http://$PMM_IP/ping" + assert_success + assert_output "PMM Server is running" +} +``` + +**Note**: The actual `helm-test.bats` file in the project will be more complex, involving detailed setup, deployment, and validation steps specific to PMM. The example above is simplified to illustrate the basic structure. + +--- + +**Related Documentation**: +- [E2E Tests](e2e-tests.md) +- [Integration & CLI Tests](integration-cli-tests.md) +- [Package Tests](package-tests.md) +- [Upgrade Tests](upgrade-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) \ No newline at end of file diff --git a/docs/integration-cli-tests.md b/docs/integration-cli-tests.md new file mode 100644 index 00000000..91d848be --- /dev/null +++ b/docs/integration-cli-tests.md @@ -0,0 +1,211 @@ +# Integration & CLI Tests + +This guide provides instructions for running the PMM Integration and Command-Line Interface (CLI) tests locally. These tests validate the interaction between the PMM server and client, as well as the functionality of the `pmm-admin` CLI tool. + +> **⚠️ Note**: This document covers the **current TypeScript/Playwright-based CLI testing framework**. The legacy BATS tests in `pmm-tests/` directory are deprecated and should not be used for new test development. See [main documentation](README.md#important-notice-legacy-tests-deprecation) for details. + +## 💡 **What are Integration & CLI Tests?** + +These tests are designed to: + +- **Validate client-server communication**: Ensuring that the PMM client can successfully register with and send data to the PMM server. +- **Test database integration**: Verifying that PMM can monitor various database technologies (MySQL, MongoDB, PostgreSQL, etc.). +- **Ensure CLI functionality**: Testing the different commands, flags, and options of the `pmm-admin` CLI. + +## 🤖 **How to Run Integration & CLI Tests Locally** + +The following steps will guide you through setting up the necessary environment and running the integration and CLI tests on your local machine. These instructions are based on the `runner-integration-cli-tests.yml` CI workflow. + +### **Prerequisites** + +- **Git**: To clone the required repositories. +- **Docker**: To run the PMM server and other services. +- **Node.js (v18+)** and **npm**: For running the test framework. +- **Python 3** and **pip**: For running setup scripts. +- **System Dependencies**: `ansible`, `clickhouse-client`, `dbdeployer`, etc. + +### **Step 1: Clone Repositories** + +Clone the `pmm-ui-tests` and `qa-integration` repositories. + +```bash +git clone --branch v3 https://github.com/percona/pmm-ui-tests.git +git clone --branch v3 https://github.com/Percona-Lab/qa-integration.git +``` + +### **Step 2: Install System Dependencies** + +Install the required system packages. The command below is for Debian/Ubuntu-based systems. + +```bash +sudo apt-get update +sudo apt-get install -y apt-transport-https ca-certificates dirmngr ansible libaio1 libaio-dev libnuma-dev libncurses5 socat sysbench clickhouse-client +curl -s https://raw.githubusercontent.com/datacharmer/dbdeployer/master/scripts/dbdeployer-install.sh | sudo bash -s -- -b /usr/local/bin +``` + +### **Step 3: Set Up PMM Server** + +Set up and start the PMM server using Docker. + +```bash +docker create -v /srv --name pmm-server-data perconalab/pmm-server:3-dev-latest +docker run -d -p 80:80 -p 443:8443 --volumes-from pmm-server-data --name pmm-server --restart always perconalab/pmm-server:3-dev-latest +timeout 240 bash -c 'while [[ "$(curl -k -s -o /dev/null -w ''%{http_code}'' https://127.0.0.1:443/v1/readyz)" != "200" ]]; do sleep 2; done' || false +``` + +### **Step 4: Set Up PMM Client and Services** + +Set up the PMM client and the database services you want to monitor. + +```bash +cd qa-integration/pmm_qa + +# Install the PMM client (used only for help, unregister, generic test suites. These suites need to be moved to some db container) +sudo bash -x pmm3-client-setup.sh --pmm_server_ip 127.0.0.1 --client_version 3-dev-latest --admin_password admin --use_metrics_mode no + +# Set up the test environment and services (e.g., a single Percona Server instance) +python3 -m venv virtenv +source virtenv/bin/activate +pip install --upgrade pip +pip install setuptools +pip install -r requirements.txt +python3 pmm-framework.py --database ps + +cd ../.. +``` + +### **Step 5: Install Test Dependencies** + +Install the Node.js dependencies for the CLI tests. + +```bash +cd pmm-ui-tests/cli +npm ci +``` + +### **Step 6: Run the Tests** + +Finally, run the CLI tests using Playwright. You can run specific test files or all of them. + +```bash +cd pmm-ui-tests/cli + +# Run the help tests +npx playwright test tests/help.spec.ts + +# Run the Percona Server tests +npx playwright test tests/perconaMySqlServer.spec.ts + +# Run all tests +npx playwright test +``` + +## 🚀 **Feature Build Integration Suite** + +The Feature Build Integration Suite (`fb-integration-suite.yml`) is used to test feature builds of the PMM server and client. It runs the same set of integration and CLI tests against a specified feature build image. + +To run these tests locally, follow the same steps as above, but in Step 3, use the feature build Docker image for the PMM server: + +```bash +docker create -v /srv --name pmm-server-data perconalab/pmm-server-fb:feature-xyz +docker run -d -p 80:80 -p 443:8443 --volumes-from pmm-server-data --name pmm-server --restart always perconalab/pmm-server-fb:feature-xyz +``` + +Replace `perconalab/pmm-server:feature-xyz` with the actual tag of the feature build image. + +## 📝 **How to Write CLI/Integration Tests** + +All paths mentioned in this section are relative to the root of the `pmm-ui-tests` repository ([here](https://github.com/percona/pmm-ui-tests/tree/v3)) or the `qa-integration` repository ([here](https://github.com/Percona-Lab/qa-integration/tree/v3)). + +CLI/Integration tests in this project are primarily written using Playwright for interacting with the command line and asserting outputs, combined with Python scripts (`pmm-framework.py`) for setting up and managing the test environment and services. + +### **Test Structure and Directory Layout** + +CLI tests are located in the `pmm-ui-tests/cli/tests` directory. Each test file (`.spec.ts`) typically focuses on a specific `pmm-admin` command or a set of related commands. + +``` +pmm-ui-tests/ +├── cli/ +│ ├── tests/ # Playwright test files for CLI +│ │ ├── help.spec.ts +│ │ ├── inventory.spec.ts +│ │ └── mysql.spec.ts +│ ├── playwright.config.ts # Playwright configuration for CLI tests +│ └── package.json # Node.js dependencies for CLI tests +qa-integration/ +├── pmm_qa/ # Python scripts for environment setup +│ ├── pmm-framework.py # Main script for setting up services +│ ├── helpers/ # Helper modules for pmm-framework.py +│ └── requirements.txt # Python dependencies +``` + +- **`pmm-ui-tests/cli/tests/`**: Contains the Playwright test files written in TypeScript. These files use Playwright's `expect` assertions to validate CLI output and behavior. +- **`qa-integration/pmm_qa/pmm-framework.py`**: This is a crucial Python script responsible for setting up the PMM server, PMM clients, and various database services required for testing. It abstracts away the complexities of environment provisioning. + +### **Writing Conventions** + +- **CLI Interaction**: Use `cliHelper` to execute CLI commands. +- **Python for Environment Setup**: Leverage `pmm-framework.py` to programmatically set up databases, PMM clients, and other services. This ensures a consistent and reproducible test environment. +- **Clear Assertions**: Assertions should clearly define the expected CLI output, service status, or data collected by PMM. +- **Test Isolation**: Each test should aim to be as isolated as possible, setting up and tearing down its own resources to prevent interference. + +### **Basic CLI Test Example** + +CLI/Integration tests in this project typically use a custom `cli-helper` module (located in `pmm-ui-tests/helpers/cli-helper.ts`) to execute `pmm-admin` commands and capture their output. The `cli-helper` returns an `ExecReturn` object, which provides convenient methods for assertions. + +```typescript +import { test } from '@playwright/test'; +import * as cli from '@helpers/cli-helper'; // Project-specific CLI helper +import ExecReturn from '@support/types/exec-return.class'; // Type definition for command output + +let addMongoHelp: ExecReturn; + +test.describe('pmm-admin help output', () => { + test.beforeAll(async () => { + // Execute a pmm-admin command silently and store its output + addMongoHelp = await cli.execSilent('sudo pmm-admin add mongodb --help'); + await addMongoHelp.assertSuccess(); // Assert that the command exited successfully + }); + + test('pmm-admin add mongodb --help should contain key options', async () => { + // Assert that the output contains specific lines or patterns + await addMongoHelp.outContainsMany([ + 'Usage: pmm-admin add mongodb [ [
]]', + '--socket=STRING', + 'metrics-mode="auto"', + 'host', + 'port', + 'service-name', + ]); + }); + + test('pmm-admin add mongodb --help should contain TLS flags', async () => { + await addMongoHelp.outContainsMany([ + 'tls Use TLS to connect to the database', + 'tls-skip-verify Skip TLS certificate verification', + 'tls-certificate-key-file=STRING', + 'tls-ca-file=STRING Path to certificate authority file', + ]); + }); +}); +``` + +**Explanation of the Example:** + +- **`import * as cli from '@helpers/cli-helper';`**: Imports the custom CLI helper module that wraps shell command execution. +- **`import ExecReturn from '@support/types/exec-return.class';`**: Imports the type definition for the object returned by the CLI helper, which includes `stdout`, `stderr`, `exitCode`, and assertion methods. +- **`cli.execSilent('sudo pmm-admin add mongodb --help')`**: Executes the `pmm-admin` command. `execSilent` runs the command without printing its output to the console, which is useful for tests where you only care about the return value or specific output assertions. +- **`await addMongoHelp.assertSuccess()`**: An assertion method provided by `ExecReturn` to verify that the command executed successfully (exit code 0). +- **`await addMongoHelp.outContainsMany([...])`**: An assertion method to check if the standard output of the command contains all the specified strings. This is a common way to verify help messages or command outputs. + +This example demonstrates how to execute a `pmm-admin` command, check its success, and assert on its output using the project's established helper functions, providing a more accurate representation of how CLI tests are written here. + +--- + +**Related Documentation**: +- [E2E Tests](e2e-tests.md) +- [Infrastructure Tests](infrastructure-tests.md) +- [Package Tests](package-tests.md) +- [Upgrade Tests](upgrade-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) diff --git a/docs/package-tests.md b/docs/package-tests.md new file mode 100644 index 00000000..a328bad4 --- /dev/null +++ b/docs/package-tests.md @@ -0,0 +1,222 @@ +# Package Tests + +This guide provides instructions for running the PMM client package installation tests locally. These tests validate that PMM client packages install and function correctly on various supported operating systems and configurations. + +## 💡 **What are Package Tests?** + +Package tests are designed to verify the PMM client installation process from start to finish. They ensure: + +- **Platform Compatibility**: That packages install correctly on all supported Linux distributions (Debian, Ubuntu, RHEL, etc.). +- **Installation Scenarios**: That different installation types, such as standard, custom path, and tarball, all work as expected. +- **Package Integrity**: That the packages themselves are not corrupt and contain all the necessary files and dependencies. + +## 🤖 **How to Run Package Tests Locally** + +The following steps will guide you through setting up a virtualized environment using Vagrant and running the package tests with Ansible, mirroring the process used in the `runner-package-test.yml` CI workflow. + +### **Prerequisites** + +- **Git**: To clone the required repositories. +- **Docker**: To run the PMM server. +- **Vagrant**: To create and manage virtual machine environments. +- **VirtualBox** (or another Vagrant provider): To run the virtual machines. +- **Ansible**: To automate the test execution within the VM. + +### **Step 1: Clone the `package-testing` Repository** + +First, clone the `package-testing` repository, which contains the Ansible playbooks for the tests. + +```bash +git clone https://github.com/Percona-QA/package-testing.git +cd package-testing +``` + +### **Step 2: Set Up PMM Server** + +Before running the client installation tests, you need a running PMM server for the client to connect to. Start one using Docker. + +```bash +docker create -v /srv --name pmm-server-data perconalab/pmm-server:3-dev-latest +docker run -d -p 80:80 -p 443:8443 --volumes-from pmm-server-data --name pmm-server --restart always perconalab/pmm-server:3-dev-latest +timeout 240 bash -c 'while [[ "$(curl -k -s -o /dev/null -w '%{http_code}' https://127.0.0.1:443/v1/readyz)" != "200" ]]; do sleep 2; done' || false +``` + +### **Step 3: Configure and Run Vagrant** + +Vagrant will create a clean VM, install the necessary dependencies, and run the Ansible playbook to perform the test. + +1. **Create a `Vagrantfile`**: Create a file named `Vagrantfile` in the `package-testing` directory with the following content. This example is for Ubuntu 22.04 (Jammy). + + ```ruby + Vagrant.require_version ">= 1.7.0" + Vagrant.configure(2) do |config| + # Use a specific OS box for the test + config.vm.box = "generic/ubuntu2204" + + config.ssh.insert_key = false + config.vm.define :CLIENT_TEST + + # Sync the current directory to the VM + config.vm.synced_folder ".", "/package-testing/" + + # Provision the VM with a shell script + config.vm.provision "shell", privileged: true, inline: <<-SHELL + # Set environment variables for the test + export PMM_SERVER_IP=10.0.2.2:443 + export METRICS_MODE=auto + export install_repo=experimental + export install_package=pmm3-client + + # Install Ansible + apt-get update -y + apt-get install -y software-properties-common + apt-add-repository --yes --update ppa:ansible/ansible + apt-get install -y ansible git wget + + # Run the Ansible playbook for the test + cd /package-testing/playbooks + ansible-playbook --connection=local --inventory 127.0.0.1, --limit 127.0.0.1 pmm3-client_integration.yml + SHELL + end + ``` + +2. **Run Vagrant**: Start the VM and the provisioning process. + + ```bash + vagrant up + ``` + +### **Customizing Your Test** + +- **To test a different OS**: Change `config.vm.box` in the `Vagrantfile` to another supported box (e.g., `generic/debian11`, `generic/oracle9`). You may also need to adjust the Ansible installation commands for different package managers (e.g., `yum` or `dnf`). +- **To run a different test scenario**: Change the playbook file in the `ansible-playbook` command (e.g., to `pmm3-client_integration_custom_path.yml`). + +## 🚀 **Feature Build Tarball Suite** + +The Feature Build Tarball Suite (`fb-tarball-suite.yml`) is used to test feature builds of the PMM client distributed as a tarball. It runs the package tests against a specified tarball URL. + +To run these tests locally, follow the same steps as above, but in the `Vagrantfile`, set the `TARBALL_LINK` environment variable to the URL of the feature build tarball: + +```ruby +# ... (Vagrantfile content) ... + config.vm.provision "shell", privileged: true, inline: <<-SHELL + # Set environment variables for the test + export PMM_SERVER_IP=10.0.2.2:443 + export TARBALL_LINK="https://example.com/pmm-client-feature-xyz.tar.gz" + + # ... (rest of the script) ... + SHELL +# ... (Vagrantfile content) ... +``` + +Replace `https://example.com/pmm-client-feature-xyz.tar.gz` with the actual URL of the feature build tarball. + +## 📝 **How to Write Package Tests (Ansible)** + +All paths mentioned in this section are relative to the root of the `package-testing` repository, which can be found [here](https://github.com/Percona-QA/package-testing/tree/v3). + +Package tests are primarily written as Ansible playbooks. Ansible allows for declarative definition of system states and automates the installation, configuration, and validation of software packages across various operating systems. + +### **Test Structure and Directory Layout** + +Ansible playbooks for package testing are located in the `package-testing/playbooks` directory. Each playbook (`.yml`) defines a specific test scenario (e.g., standard installation, custom path installation). + +``` +package-testing/ +├── playbooks/ # Ansible playbooks for different test scenarios +│ ├── pmm3-client_integration.yml +│ ├── pmm3-client_integration_custom_path.yml +│ └── ... +├── roles/ # Reusable Ansible roles +│ ├── pmm-client/ # Role for PMM client installation and configuration +│ └── ... +├── inventory.ini # Ansible inventory file (defines hosts) +└── Vagrantfile # Vagrant configuration for test VMs +``` + +- **`playbooks/`**: Contains the main Ansible playbooks. Each playbook orchestrates a series of tasks to perform a specific package test scenario. +- **`roles/`**: Contains reusable Ansible roles. Roles encapsulate a set of tasks, variables, and handlers for a specific purpose (e.g., installing and configuring the PMM client). +- **`inventory.ini`**: Defines the hosts that Ansible will manage. In local testing with Vagrant, this typically points to the local VM. +- **`Vagrantfile`**: Configures the virtual machine environment where the Ansible playbooks will be executed. + +### **Writing Conventions** + +- **Declarative Style**: Ansible playbooks are declarative, describing the desired state rather than the steps to achieve it. +- **Idempotency**: Playbooks should be idempotent, meaning running them multiple times will have the same result as running them once. +- **Roles**: Utilize Ansible roles to organize tasks, variables, and handlers into logical, reusable units. +- **Variables**: Use variables to make playbooks flexible and reusable across different environments or test scenarios. +- **Assertions**: Use Ansible's `assert` module or conditional tasks to validate the success of installation steps and the state of the system. + +### **Basic Ansible Playbook Example** + +A simplified Ansible playbook (`pmm3-client_integration.yml`) might look like this: + +```yaml +--- +- name: Install PMM Client (Standard Integration) + hosts: all + become: yes + vars: + pmm_server_ip: "{{ lookup('env', 'PMM_SERVER_IP') }}" + metrics_mode: "{{ lookup('env', 'METRICS_MODE') }}" + install_repo: "{{ lookup('env', 'install_repo') }}" + install_package: "{{ lookup('env', 'install_package') }}" + + tasks: + - name: Ensure Percona repository is configured + ansible.builtin.shell: |- + curl -fsSL https://www.percona.com/get/percona-release | bash + percona-release enable-only {{ install_package }} {{ install_repo }} + + - name: Install PMM Client package + ansible.builtin.package: + name: "{{ install_package }}" + state: present + update_cache: yes + + - name: Configure PMM Client to connect to PMM Server + ansible.builtin.command: |- + pmm-admin config --server-url=https://{{ pmm_server_ip }}:443 --server-username=admin --server-password=admin + + - name: Add MySQL service + ansible.builtin.command: |- + pmm-admin add mysql --query-source=perfschema --username=root --password=root + + - name: Verify PMM Client status + ansible.builtin.command: pmm-admin status + register: pmm_status + until: pmm_status.stdout.find("PMM Client is running") != -1 + retries: 10 + delay: 10 + + - name: Assert MySQL service is added + ansible.builtin.command: pmm-admin list + register: pmm_list + failed_when: pmm_list.stdout.find("mysql") == -1 +``` + +### **Key Concepts** + +- **Playbook**: The entry point for an Ansible run, defining the hosts to target and the tasks to execute. +- **Hosts**: Specifies which machines the playbook will run against (e.g., `all` for all hosts in the inventory, or a specific group). +- **`become: yes`**: Instructs Ansible to escalate privileges (e.g., use `sudo`) for tasks that require root access. +- **`vars`**: Defines variables that can be used within the playbook. These can be sourced from environment variables (`lookup('env', ...)`), files, or command-line arguments. +- **Tasks**: Individual actions that Ansible performs. Tasks use modules (e.g., `ansible.builtin.package`, `ansible.builtin.command`, `ansible.builtin.shell`) to interact with the remote hosts. +- **Modules**: Pre-built units of code that Ansible executes. They perform specific functions like installing packages, running commands, or managing services. +- **`register`**: Captures the output of a task into a variable for later use or assertion. +- **`until` / `retries` / `delay`**: Used for retrying tasks until a certain condition is met, useful for waiting on services to start or become healthy. +- **`failed_when`**: Defines a condition under which a task should be considered failed. + +### **Running New Tests** + +After creating a new playbook or modifying an existing one, you can run it by updating your `Vagrantfile` to point to the new playbook and then running `vagrant up`. + +--- + +**Related Documentation**: +- [E2E Tests](e2e-tests.md) +- [Infrastructure Tests](infrastructure-tests.md) +- [Integration & CLI Tests](integration-cli-tests.md) +- [Upgrade Tests](upgrade-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) diff --git a/docs/test-parameters.md b/docs/test-parameters.md new file mode 100644 index 00000000..09131145 --- /dev/null +++ b/docs/test-parameters.md @@ -0,0 +1,388 @@ +# Test Parameters Reference + +This comprehensive reference guide covers all parameters, configurations, and options available across PMM-QA workflows. + +## 📚 **Overview** + +This document provides detailed information about: +- Common workflow parameters +- Service setup configurations +- Version and image specifications +- Test flags and categories +- Environment variables +- Platform and OS options + +## 🔧 **Common Workflow Parameters** + +### Branch Configuration +```yaml +pmm_ui_tests_branch: "v3" # PMM UI tests repository branch +pmm_qa_branch: "v3" # PMM QA repository branch +qa_integration_branch: "v3" # QA integration repository branch +package_testing_branch: "v3" # Package testing branch +``` + +### Version and Image Parameters +```yaml +# Server Configuration +pmm_server_image: "perconalab/pmm-server:3-dev-latest" +pmm_server_version: "perconalab/pmm-server:3-dev-latest" +pmm_server_start_version: "latest" + +# Client Configuration +pmm_client_image: "perconalab/pmm-client:3-dev-latest" +pmm_client_version: "3-dev-latest" +pmm_client_start_version: "pmm2-latest" +pmm_client_tarball: "" # Custom tarball URL + +# Status Reporting +sha: "null" # Commit SHA for status reporting +``` + +## 🗄️ **Database Service Setup** + +### Single Database Configurations +```yaml +# MySQL Family +--database mysql # MySQL (latest) +--database mysql=8.0 # MySQL 8.0 +--database ps # Percona Server (latest) +--database ps=5.7 # Percona Server 5.7 +--database ps=8.0 # Percona Server 8.0 + +# PostgreSQL Family +--database pdpgsql # Percona Distribution for PostgreSQL (latest) +--database pdpgsql=14 # PostgreSQL 14 +--database pdpgsql=15 # PostgreSQL 15 +--database pdpgsql=16 # PostgreSQL 16 + +# MongoDB Family +--database psmdb # Percona Server for MongoDB +--database modb # MongoDB + +# Proxy/Load Balancers +--database haproxy # HAProxy +--database proxysql # ProxySQL + +# Special Configurations +--database external # External exporter testing +--database dockerclients # Docker client testing +``` + +### Advanced Database Options +```yaml +# MongoDB with SSL +--database psmdb,SETUP_TYPE=pss + +# MongoDB with extra profiles +--database psmdb,COMPOSE_PROFILES=extra + +# MySQL/PS with slow query log +--database ps,QUERY_SOURCE=slowlog +--database mysql,QUERY_SOURCE=slowlog + +# PostgreSQL with extensions +--database pdpgsql,EXTENSION=pg_stat_monitor +--database pdpgsql,EXTENSION=pg_stat_statements +``` + +### Multi-Database Setups +```yaml +# Basic multi-database +--database ps --database psmdb --database pdpgsql + +# Comprehensive setup +--database ps=8.0 --database psmdb --database pdpgsql=15 --database haproxy + +# Client addition patterns +--addclient=ps,1 # Add 1 Percona Server client +--addclient=pdpgsql,1 # Add 1 PostgreSQL client +--addclient=modb,1 # Add 1 MongoDB client +``` + +## 🏷️ **Test Tags and Categories** + +### E2E Test Tags +```yaml +# Core Functionality +@portal # Portal functionality +@inventory # Inventory management +@dashboards # Dashboard functionality +@qan # Query Analytics + +# Feature-Specific +@backup-management # Backup features +@alerting # Alerting functionality +@rbac # Role-based access control +@settings-fb # Settings feature build tests + +# Security and Authentication +@security # Security features +@user-password # User authentication testing +@oauth # OAuth integration + +# API and Integration +@api # API testing +@exporters # Exporter functionality +@mongodb-exporter # MongoDB-specific exporters +``` + +### Feature Build Test Tags +```yaml +# Backup Management +@bm-mongo # MongoDB backup tests +@bm-mysql # MySQL backup tests +@bm-common # Common backup features +@bm-locations # Backup location testing + +# Database-Specific +@pgsm-pmm-integration # PostgreSQL pg_stat_monitor +@pgss-pmm-integration # PostgreSQL pg_stat_statements + +# UI Components +@fb-instances # Instance management UI +@fb-alerting # Alerting UI components +@fb-settings # Settings UI components +``` + +### Upgrade Test Tags +```yaml +# Pre-upgrade Tests +@config-pre-upgrade # Configuration documentation +@rbac-pre-upgrade # RBAC state capture +@portal-pre-upgrade # Portal state capture +@inventory-pre-upgrade # Inventory state capture + +# Post-upgrade Tests +@config-post-upgrade # Configuration validation +@rbac-post-upgrade # RBAC validation +@portal-post-upgrade # Portal validation +@inventory-post-upgrade # Inventory validation +``` + +## 📦 **Package Testing Parameters** + +### Package Types +```yaml +package: "original" # Legacy PMM package +package: "pmm3-client" # PMM3 client package +package: "tools" # PMM tools package +``` + +### Repository Types +```yaml +repository: "release" # Stable release repository +repository: "release candidate" # RC repository +repository: "dev-latest" # Development repository +``` + +### Metrics Modes +```yaml +metrics_mode: "auto" # Automatic mode selection +metrics_mode: "push" # Client pushes metrics +metrics_mode: "pull" # Server pulls metrics +``` + +### Installation Scenarios +```yaml +# Playbook Types +playbook: "pmm3-client_integration" +playbook: "pmm3-client_integration_custom_path" +playbook: "pmm3-client_integration_custom_port" +``` + +## ⬆️ **Upgrade Testing Parameters** + +### Upgrade Methods +```yaml +upgrade_type: "UI way" # Web interface upgrade +upgrade_type: "Docker way" # Container replacement +upgrade_type: "Podman way" # Podman-based upgrade +``` + +### Version Specifications +```yaml +# Start Versions +pmm_server_start_version: "latest" # Latest stable +pmm_server_start_version: "dev-latest" # Development +pmm_server_start_version: "2.41.0" # Specific version +pmm_server_start_version: "3.0.0-rc" # Release candidate + +# Target Repositories +repository: "release" # To stable release +repository: "release candidate" # To RC +repository: "dev-latest" # To development +``` + +## 🖥️ **Platform and OS Parameters** + +### Supported Operating Systems +```yaml +# Debian/Ubuntu Family +"bullseye" # Debian 11 +"bookworm" # Debian 12 +"jammy" # Ubuntu 22.04 LTS +"noble" # Ubuntu 24.04 LTS + +# Red Hat Family +"ol-8" # Oracle Linux 8 +"ol-9" # Oracle Linux 9 +"rocky-8" # Rocky Linux 8 +"rocky-9" # Rocky Linux 9 +"centos-7" # CentOS 7 (legacy) +``` + +### Architecture Support +```yaml +"x86_64" # Intel/AMD 64-bit +"aarch64" # ARM 64-bit +``` + +## 🌐 **Environment Variables** + +### Authentication Variables +```yaml +OAUTH_CLIENT_ID # OAuth client identifier +OAUTH_CLIENT_SECRET # OAuth client secret +OAUTH_PMM_CLIENT_ID # PMM-specific OAuth client ID +OAUTH_PMM_CLIENT_SECRET # PMM-specific OAuth secret +ADMIN_PASSWORD # PMM admin password (default: admin) +``` + +### External Service Integration +```yaml +MAILOSAUR_API_KEY # Email testing service +MAILOSAUR_UI_TESTS_SERVER_ID # UI tests email server +MAILOSAUR_API_TESTS_SERVER_ID # API tests email server +SERVICENOW_PASSWORD # ServiceNow integration +ZEPHYR_PMM_API_KEY # Test management integration +``` + +### Testing Configuration +```yaml +PMM_BASE_URL # PMM server URL (default: https://127.0.0.1) +TIMEOUT # Test timeout settings +BROWSER # Browser selection +DOCKER_VERSION # Docker image version +CLIENT_VERSION # Client version +``` + +### Backup Testing +```yaml +BACKUP_LOCATION_ACCESS_KEY # Backup storage access key +BACKUP_LOCATION_SECRET_KEY # Backup storage secret key +``` + +## 🔧 **CLI Test Specific Parameters** + +### Test Execution Parameters +```yaml +cli_test: "help.spec.ts" # Specific test file +cli_test: "pmm-server-only" # Server-only tests +cli_test: "pmm-client-docker" # Client container tests +cli_test: "generic unregister --workers=1" # Generic tests with workers +cli_test: "postgreSql --workers=1" # PostgreSQL tests +``` + +### Service List Parameters +```yaml +services_list: "--database ps=8.0" +services_list: "--database dockerclients" +services_list: "--addclient=ps,1 --addclient=pdpgsql,1" +``` + +## 🏗️ **Infrastructure Testing Parameters** + +### Kubernetes/Helm Parameters +```yaml +server_image: "perconalab/pmm-server:3-dev-latest" +client_image: "perconalab/pmm-client:3-dev-latest" +pmm_qa_branch: "v3" +``` + +### Easy Install Parameters +```yaml +easy_install_branch: "v3" # Installation script branch +os: "ubuntu-noble" # Target operating system +os: "ol-9" # Oracle Linux 9 +os: "rocky-9" # Rocky Linux 9 +``` + +## 📊 **Matrix Testing Parameters** + +### Version Matrix +```yaml +matrix_range: "10" # Number of versions to test +version_matrix: ["3.0.0", "3.1.0", "3.2.0"] +pt_os_matrix: "[\"bullseye\", \"bookworm\", \"noble\"]" +``` + +### Platform Matrix +```yaml +[ + { os: "ubuntu-noble", package: "pmm3-client", metrics: "auto" }, + { os: "debian-bookworm", package: "pmm3-client", metrics: "push" }, + { os: "ol-9", package: "pmm3-client", metrics: "pull" } +] +``` + +## 🕒 **Timing and Duration Parameters** + +### Test Timeouts +```yaml +timeout-minutes: 40 # Job timeout (Integration tests) +timeout-minutes: 60 # Job timeout (E2E tests) +timeout-minutes: 1 # Job timeout (Version getter) +``` + +### Expected Durations +```yaml +Help Tests: 5 minutes +Server Container: 10 minutes +Database Tests: 20-30 minutes +E2E Portal: 30 minutes +E2E Inventory: 25 minutes +Package Installation: 20 minutes +Helm Tests: 30 minutes +Upgrade Tests: 45-60 minutes +``` + +## 🔄 **Special Configuration Patterns** + +### Setup Enhancement Flags +```yaml +--setup-portal-oauth # OAuth configuration for portal +--enable-portal-features # Portal-specific features +--enable-service-discovery # Automatic service discovery +--setup-multiple-clients # Multiple client instances +--enable-backup-management # Backup functionality +--setup-alerting # Alerting configuration +--mongo-replica-for-backup # MongoDB replica for backup testing +--setup-bm-mysql # Backup management for MySQL +``` + +### Custom Configuration Examples +```yaml +# Comprehensive test setup +--database ps=8.0,QUERY_SOURCE=slowlog \ +--database psmdb,SETUP_TYPE=pss,COMPOSE_PROFILES=extra \ +--database pdpgsql=15,EXTENSION=pg_stat_monitor \ +--database haproxy \ +--enable-backup-management \ +--setup-alerting \ +--create-test-users + +# Minimal test setup +--database ps + +# Multi-service basic setup +--database ps --database psmdb --database pdpgsql +``` + +--- + +**Related Documentation**: +- [Integration & CLI Tests](integration-cli-tests.md) +- [E2E Tests](e2e-tests.md) +- [Package Tests](package-tests.md) +- [Troubleshooting Guide](troubleshooting.md) \ No newline at end of file diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 00000000..46b5c220 --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,612 @@ +# Troubleshooting Guide + +This guide provides solutions for common issues encountered when running PMM-QA tests, debugging workflows, and resolving test failures. + +## 🔍 **Overview** + +This troubleshooting guide covers: +- Common workflow issues and solutions +- Test failure debugging strategies +- Infrastructure and environment problems +- Performance and timeout issues +- Access and permission problems + +## 🚨 **Common Workflow Issues** + +### Permission and Access Issues + +#### Workflow Permission Denied +```yaml +Issue: Cannot trigger GitHub Actions workflow +Error: "Permission denied" or workflow not visible + +Solutions: +✅ Verify repository access permissions +✅ Ensure "Actions: Write" permission +✅ Check organization/repository settings +✅ Validate user account status +✅ Review branch protection rules +``` + +#### Secret Access Issues +```yaml +Issue: Workflow fails due to missing secrets +Error: "Secret not found" or authentication failures + +Solutions: +✅ Verify secrets are configured in repository settings +✅ Check secret names match workflow requirements +✅ Ensure secrets are available to the branch +✅ Validate secret values are correct +✅ Review organization-level secret settings +``` + +### Version and Image Issues + +#### Image Not Found +```yaml +Issue: Docker image pull failures +Error: "Image not found" or "Pull access denied" + +Solutions: +✅ Verify image name and tag are correct +✅ Check image exists in registry +✅ Validate registry access permissions +✅ Try alternative image tags +✅ Check network connectivity to registry + +Example Fix: +# Instead of non-existent version +pmm_server_image: "perconalab/pmm-server:3.1.0" +# Use available version +pmm_server_image: "perconalab/pmm-server:3-dev-latest" +``` + +#### Version Compatibility Issues +```yaml +Issue: "Upgrade to the same version is forbidden!" +Error: Start and target versions are identical + +Solutions: +✅ Choose different start/target version combinations +✅ Verify version strings are different +✅ Check version detection logic +✅ Use explicit version numbers instead of aliases + +Example Fix: +# Problematic configuration +pmm_server_start_version: "latest" +repository: "release" +# Fixed configuration +pmm_server_start_version: "latest" +repository: "dev-latest" +``` + +## ⏱️ **Timeout and Performance Issues** + +### Test Timeouts + +#### General Test Timeouts +```yaml +Issue: Tests timing out after 40 minutes +Common Causes: +- Infrastructure setup delays +- Network connectivity issues +- Resource constraints +- Database startup problems + +Solutions: +✅ Check PMM server startup logs +✅ Verify database container health +✅ Review network connectivity +✅ Monitor resource usage +✅ Check for stuck processes + +Debugging Commands: +docker ps -a # Check container status +docker logs pmm-server # Review server logs +kubectl get pods # Check K8s pod status (for Helm tests) +``` + +#### Database Setup Timeouts +```yaml +Issue: Database service setup takes too long +Common Causes: +- Image download delays +- Container resource constraints +- Network connectivity issues +- Database initialization problems + +Solutions: +✅ Check container image availability +✅ Verify adequate system resources +✅ Review database startup logs +✅ Check network connectivity +✅ Validate database configuration + +Monitoring Commands: +docker stats # Monitor resource usage +docker logs # Check container logs +netstat -tuln # Check port availability +``` + +### Performance Degradation + +#### Slow Test Execution +```yaml +Issue: Tests running slower than expected +Common Causes: +- Resource contention +- Network latency +- Database performance issues +- UI rendering delays + +Solutions: +✅ Monitor system resources +✅ Check for concurrent test runs +✅ Optimize database configurations +✅ Review network connectivity +✅ Check browser/UI performance + +Performance Monitoring: +top # System resource usage +iotop # Disk I/O monitoring +nethogs # Network usage per process +``` + +## 🗄️ **Database and Service Issues** + +### Database Connection Failures + +#### Service Setup Failures +```yaml +Issue: Database service fails to start +Common Causes: +- Port conflicts +- Configuration errors +- Resource constraints +- Image compatibility issues + +Solutions: +✅ Check port availability +✅ Verify container health status +✅ Review database credentials +✅ Check resource limits +✅ Validate image compatibility + +Debugging Steps: +1. Check container status: docker ps -a +2. Review container logs: docker logs +3. Verify port availability: netstat -tuln | grep +4. Check resource usage: docker stats +5. Test connectivity: telnet +``` + +#### Metrics Collection Issues +```yaml +Issue: Metrics not being collected +Common Causes: +- Service registration failures +- Network connectivity issues +- Authentication problems +- Exporter configuration errors + +Solutions: +✅ Verify service registration +✅ Check exporter configuration +✅ Review database permissions +✅ Validate metrics endpoints +✅ Check authentication credentials + +Verification Commands: +curl http://localhost:9090/metrics # Check metrics endpoint +pmm inventory list # Verify service registration +pmm status # Check client status +``` + +### MongoDB-Specific Issues + +#### Replica Set Configuration +```yaml +Issue: MongoDB replica set setup fails +Common Causes: +- Network configuration issues +- Timing problems in initialization +- Resource constraints +- Authentication issues + +Solutions: +✅ Check replica set configuration +✅ Verify network connectivity between nodes +✅ Review MongoDB logs +✅ Check authentication setup +✅ Validate resource allocation + +MongoDB Debugging: +mongo --eval "rs.status()" # Check replica set status +mongo --eval "db.stats()" # Check database status +docker exec mongo mongo --eval "rs.initiate()" # Initialize replica set +``` + +### PostgreSQL-Specific Issues + +#### Extension Loading Issues +```yaml +Issue: pg_stat_monitor or pg_stat_statements not working +Common Causes: +- Extension not installed +- Configuration not updated +- PostgreSQL restart required +- Permission issues + +Solutions: +✅ Install required extensions +✅ Update postgresql.conf +✅ Restart PostgreSQL service +✅ Check extension permissions +✅ Verify extension functionality + +PostgreSQL Debugging: +psql -c "SELECT * FROM pg_extension;" # List installed extensions +psql -c "SHOW shared_preload_libraries;" # Check loaded libraries +psql -c "SELECT * FROM pg_stat_statements LIMIT 1;" # Test extension +``` + +## 🎭 **UI and Browser Issues** + +### Browser-Related Failures + +#### Element Not Found Errors +```yaml +Issue: UI tests fail with "Element not found" +Common Causes: +- UI layout changes +- Timing issues (elements not loaded) +- Browser compatibility issues +- Dynamic content loading + +Solutions: +✅ Update element selectors +✅ Add explicit waits for elements +✅ Check for dynamic content loading +✅ Verify page layout changes +✅ Test with different browsers + +Playwright Debugging: +npx playwright test --headed # Run with visible browser +npx playwright test --debug # Run in debug mode +npx playwright codegen # Generate selectors +``` + +#### Authentication Issues +```yaml +Issue: Login failures in UI tests +Common Causes: +- Incorrect credentials +- OAuth configuration issues +- Session management problems +- Authentication flow changes + +Solutions: +✅ Verify login credentials +✅ Check OAuth configuration +✅ Review session management +✅ Validate authentication flow +✅ Check for CAPTCHA or 2FA + +Authentication Debugging: +# Check PMM server authentication +curl -k https://localhost/v1/auth/login \ + -d '{"username":"admin","password":"admin"}' + +# Verify OAuth configuration +echo $OAUTH_CLIENT_ID +echo $OAUTH_CLIENT_SECRET +``` + +### Page Load Issues + +#### Slow Page Loading +```yaml +Issue: Pages load slowly or timeout +Common Causes: +- Server performance issues +- Network latency +- Large data sets +- JavaScript execution problems + +Solutions: +✅ Increase timeout settings +✅ Check server performance +✅ Optimize data queries +✅ Review JavaScript errors +✅ Monitor network requests + +Performance Debugging: +# Browser developer tools +1. Open F12 developer tools +2. Go to Network tab +3. Monitor request timing +4. Check for slow requests +5. Review JavaScript console for errors +``` + +## 🏗️ **Infrastructure Issues** + +### Kubernetes/Helm Issues + +#### Minikube Startup Failures +```yaml +Issue: Minikube fails to start +Common Causes: +- Insufficient system resources +- Virtualization not enabled +- Network configuration issues +- Driver compatibility problems + +Solutions: +✅ Check system resources (CPU, memory) +✅ Enable virtualization in BIOS +✅ Update Minikube version +✅ Try different drivers +✅ Clear Minikube cache + +Minikube Debugging: +minikube status # Check cluster status +minikube logs # View cluster logs +minikube delete && minikube start # Reset cluster +minikube config view # Check configuration +``` + +#### Storage Driver Issues +```yaml +Issue: CSI storage driver installation fails +Common Causes: +- Kubernetes version incompatibility +- Insufficient permissions +- Resource constraints +- Driver configuration errors + +Solutions: +✅ Check Kubernetes version compatibility +✅ Verify cluster permissions +✅ Review driver installation logs +✅ Check resource availability +✅ Validate storage class configuration + +Storage Debugging: +kubectl get pods -n kube-system # Check system pods +kubectl get storageclass # List storage classes +kubectl describe pv # Check persistent volumes +kubectl logs -n kube-system # Check CSI driver logs +``` + +### Container Issues + +#### Docker Daemon Issues +```yaml +Issue: Docker operations fail +Common Causes: +- Docker daemon not running +- Permission issues +- Disk space problems +- Network configuration issues + +Solutions: +✅ Start Docker daemon +✅ Add user to docker group +✅ Free up disk space +✅ Check Docker configuration +✅ Restart Docker service + +Docker Debugging: +systemctl status docker # Check daemon status +docker system df # Check disk usage +docker system prune # Clean up space +docker info # Check Docker info +``` + +## 📦 **Package Installation Issues** + +### Repository Configuration + +#### Package Not Found +```yaml +Issue: Package installation fails with "not found" +Common Causes: +- Repository not configured +- Package version unavailable +- Repository URL incorrect +- GPG key issues + +Solutions: +✅ Configure package repository +✅ Update package cache +✅ Verify package version exists +✅ Check repository URL +✅ Import GPG keys + +APT Debugging: +apt update # Update package cache +apt search pmm # Search for packages +apt-cache policy pmm3-client # Check available versions +apt-key list # List GPG keys +``` + +#### Permission Issues +```yaml +Issue: Package installation fails with permission errors +Common Causes: +- Insufficient privileges +- SELinux/AppArmor restrictions +- File system permissions +- User account limitations + +Solutions: +✅ Run with sudo/root privileges +✅ Check SELinux/AppArmor settings +✅ Verify file system permissions +✅ Review user account capabilities +✅ Check package manager configuration + +Permission Debugging: +sudo -l # Check sudo permissions +getenforce # Check SELinux status +aa-status # Check AppArmor status +ls -la /etc/apt/sources.list.d/ # Check repository files +``` + +## 🔄 **Network and Connectivity Issues** + +### Network Configuration + +#### Connectivity Problems +```yaml +Issue: Network connectivity failures +Common Causes: +- Firewall blocking connections +- DNS resolution issues +- Proxy configuration problems +- Network routing issues + +Solutions: +✅ Check firewall settings +✅ Verify DNS resolution +✅ Configure proxy settings +✅ Test network connectivity +✅ Review routing tables + +Network Debugging: +ping google.com # Test internet connectivity +nslookup pmm-server # Test DNS resolution +telnet # Test port connectivity +curl -I https://github.com # Test HTTPS connectivity +netstat -rn # Check routing table +``` + +#### Port Conflicts +```yaml +Issue: Services fail to start due to port conflicts +Common Causes: +- Ports already in use +- Multiple service instances +- System services using ports +- Previous test cleanup incomplete + +Solutions: +✅ Check port availability +✅ Stop conflicting services +✅ Use alternative ports +✅ Complete cleanup from previous tests +✅ Configure port forwarding + +Port Debugging: +netstat -tuln | grep :80 # Check port 80 usage +lsof -i :3306 # Check MySQL port usage +ss -tuln # Modern netstat alternative +fuser 9090/tcp # Find process using port +``` + +## 🔧 **Debugging Strategies** + +### Log Analysis + +#### Collecting Logs +```yaml +Workflow Logs: +1. Go to GitHub Actions tab +2. Click on failed workflow run +3. Expand failed job steps +4. Copy/download log content + +Container Logs: +docker logs pmm-server # PMM server logs +docker logs pmm-client # PMM client logs +kubectl logs # Kubernetes pod logs + +Application Logs: +tail -f /var/log/pmm/*.log # PMM application logs +journalctl -u pmm-agent # Systemd service logs +``` + +#### Log Analysis Techniques +```yaml +Common Log Patterns to Look For: +- "ERROR" or "FATAL" messages +- "Connection refused" or "timeout" errors +- "Permission denied" messages +- "Out of memory" or resource errors +- HTTP error codes (4xx, 5xx) + +Useful Commands: +grep -i error /var/log/pmm/* # Find error messages +journalctl -f # Follow system logs +dmesg | tail # Check kernel messages +``` + +### Test Isolation + +#### Reproducing Issues Locally +```yaml +Steps to Reproduce: +1. Use same parameters as failed workflow +2. Set up identical environment +3. Run tests step by step +4. Monitor logs and resources +5. Identify failure point + +Local Testing Commands: +# Set up local environment +docker-compose up -d + +# Run specific test +npx playwright test + +# Run with debugging +npx playwright test --headed --debug +``` + +## 📞 **Getting Help** + +### Information to Collect +```yaml +When Reporting Issues: +✅ Workflow name and run ID +✅ Complete error messages +✅ Configuration parameters used +✅ Environment details +✅ Steps to reproduce +✅ Expected vs actual behavior +``` + +### Escalation Process +```yaml +1. Check this troubleshooting guide +2. Search existing GitHub issues +3. Review workflow logs thoroughly +4. Try reproducing locally +5. Create detailed issue report +6. Contact PMM QA team if needed +``` + +### Useful Resources +```yaml +Documentation: +- PMM Documentation: https://docs.percona.com/pmm/ +- Playwright Docs: https://playwright.dev/ +- Docker Docs: https://docs.docker.com/ +- Kubernetes Docs: https://kubernetes.io/docs/ + +Community: +- PMM GitHub Issues +- Percona Community Forums +- PMM QA Team Channels +``` + +--- + +**Related Documentation**: +- [Integration & CLI Tests](integration-cli-tests.md) +- [E2E Tests](e2e-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Main Documentation](README.md) \ No newline at end of file diff --git a/docs/upgrade-tests.md b/docs/upgrade-tests.md new file mode 100644 index 00000000..5397c652 --- /dev/null +++ b/docs/upgrade-tests.md @@ -0,0 +1,84 @@ +# Upgrade Tests + +This guide provides instructions for running the PMM upgrade tests locally. These tests validate the PMM upgrade process, ensuring data integrity and functionality are maintained across versions. + +## 💡 **What are Upgrade Tests?** + +Upgrade tests are critical for ensuring a smooth user experience when new versions of PMM are released. They verify that: + +- **The upgrade process is successful**: Whether using the UI, Docker, or Podman, the upgrade completes without errors. +- **Data is preserved**: All historical monitoring data, user configurations, and settings are maintained after the upgrade. +- **Functionality remains intact**: All features of PMM, from monitoring and alerting to QAN, continue to work correctly. + +## 🤖 **How to Run Upgrade Tests Locally** + + +### **Prerequisites** + +- **Git**: To clone the required repositories. +- **Docker**: To run the PMM server and other services. +- **Node.js (v18+)** and **npm**: For running the test framework. +- **Python 3** and **pip**: For running setup scripts. + +### **Step 1: Set Up the Initial PMM Environment** + +First, set up the environment with the *starting* version of PMM Server and Client that you want to test the upgrade from. + +1. **Clone the repositories**: + + ```bash + git clone --branch v3 https://github.com/percona/pmm-ui-tests.git + git clone --branch v3 https://github.com/percona/pmm-qa.git + ``` + +2. **Set up the PMM Server**: + + +3. **Set up the PMM Client and Services**: + + +### **Step 2: Run Pre-Upgrade Tests** + +Before performing the upgrade, run the pre-upgrade tests. These tests capture the state of the system before the upgrade to compare it with the post-upgrade state. + + +### **Step 3: Perform the PMM Upgrade** + +1. **Stop and replace the PMM server container** with the new version. + + ```bash + docker stop pmm-integration-server + docker pull perconalab/pmm-server:3-dev-latest + docker run --detach --restart always --network="pmm-integration-network" -p 80:80 -p 443:443 --volumes-from pmm-integration-server-data --name pmm-integration-server perconalab/pmm-server:3-dev-latest + ``` + +### **Step 4: Run Post-Upgrade Tests** + +After the upgrade is complete, run the post-upgrade tests to validate that everything is still working as expected. + +By comparing the results of the pre-upgrade and post-upgrade tests, you can verify the success of the upgrade process. + +## 📝 **How to Write Upgrade Tests** + +Upgrade tests are complex and typically involve a sequence of steps across different tools and environments. They combine environment setup, UI interactions, and assertions to verify the upgrade process and data integrity. + +### **Test Structure and Directory Layout** + +### **Writing Conventions** + +- **Orchestration**: Playwright tests act as the orchestrator, calling external scripts (e.g., Python `pmm-framework.py` via `cli.exec` or similar helper) to set up the initial PMM environment with a specific older version. +- **Pre-Upgrade Validation**: +- **Upgrade Execution**: Execute the upgrade process by replacing the PMM server container with the new version, ensuring all services are restarted and functional. +- **Post-Upgrade Validation**: +- **Version Management**: +- **Tags**: + +--- + +**Related Documentation**: +- [E2E Tests](e2e-tests.md) +- [Infrastructure Tests](infrastructure-tests.md) +- [Integration & CLI Tests](integration-cli-tests.md) +- [Package Tests](package-tests.md) +- [Test Parameters Reference](test-parameters.md) +- [Troubleshooting Guide](troubleshooting.md) \ No newline at end of file diff --git a/pmm-tests/DEPRECATED.md b/pmm-tests/DEPRECATED.md new file mode 100644 index 00000000..7beb2f35 --- /dev/null +++ b/pmm-tests/DEPRECATED.md @@ -0,0 +1,42 @@ +# ⚠️ DEPRECATED: PMM Tests Directory + +## This directory is deprecated and in maintenance mode only + +**Status**: 🚫 **DEPRECATED - DO NOT USE FOR NEW DEVELOPMENT** + +The BATS (Bash Automated Testing System) tests in this directory are **deprecated** and should not be used for new test development. + +## What to use instead + +For new test development, use the current testing frameworks: + +### CLI Testing +- **Framework**: TypeScript/Playwright +- **Location**: `cli-tests/` directory in [pmm-ui-tests](https://github.com/percona/pmm-ui-tests/tree/v3) repository +- **Documentation**: [Integration & CLI Tests](../docs/integration-cli-tests.md) + +### UI Testing +- **Framework**: Playwright +- **Location**: `playwright-tests/` directory in [pmm-ui-tests](https://github.com/percona/pmm-ui-tests/tree/v3) repository +- **Documentation**: [End-to-End Tests](../docs/e2e-tests.md) + +### Infrastructure Setup +- **Framework**: Python/Ansible +- **Location**: `qa-integration/pmm_qa/` directory in [qa-integration](https://github.com/Percona-Lab/qa-integration/tree/v3) repository +- **Documentation**: [Adding New Environments](../docs/adding-new-environments.md) + +## Migration Timeline + +- **Current Status**: Maintenance mode only - critical bug fixes only +- **New Development**: Use TypeScript/Playwright frameworks listed above +- **Existing Tests**: Will be gradually migrated to new frameworks +- **Future**: This directory will be removed in a future release + +## For More Information + +See the main documentation: [PMM-QA Testing Documentation](../docs/README.md#important-notice-legacy-tests-deprecation) + +--- + +**Last Updated**: December 2024 +**Deprecation Notice Added**: December 2024 \ No newline at end of file