From 20ecf1a2d49303ff6210541f4136bb13a8948ba9 Mon Sep 17 00:00:00 2001 From: woblerr Date: Sun, 17 Aug 2025 21:32:41 +0300 Subject: [PATCH 01/23] Refactor e2e tests for backup-info command. - Replace individual test containers with single Greenplum container. - Switch from static test data to real gpbackup-generated backups. - Add database initialization and backup preparation scripts. - Reorganize test structure with prepare/ and run_tests/ directories. - Update S3 plugin path to use Greenplum built-in binary. - Simplify Makefile targets to single backup-info test. - Remove S3 plugin build Dockerfile (use built-in plugin). - Update MinIO and Greenplum image versions to latest. --- Makefile | 58 +--- e2e_tests/.env | 11 +- e2e_tests/conf/Dockerfile.s3_plugin | 21 -- e2e_tests/conf/gpbackup_s3_plugin.yaml | 2 +- e2e_tests/docker-compose.yml | 121 ++------ .../scripts/prepare/gpdb_init/tables_init.sql | 16 ++ .../scripts/prepare/prepare_gpdb_backups.sh | 75 +++++ e2e_tests/scripts/prepare/prepare_minio.sh | 7 + e2e_tests/scripts/prepare_minio.sh | 18 -- e2e_tests/scripts/run_backup-info.sh | 263 ------------------ .../scripts/run_tests/run_backup-info.sh | 113 ++++++++ e2e_tests/scripts/run_tests/run_test.sh | 60 ++++ 12 files changed, 311 insertions(+), 454 deletions(-) delete mode 100644 e2e_tests/conf/Dockerfile.s3_plugin create mode 100644 e2e_tests/scripts/prepare/gpdb_init/tables_init.sql create mode 100755 e2e_tests/scripts/prepare/prepare_gpdb_backups.sh create mode 100755 e2e_tests/scripts/prepare/prepare_minio.sh delete mode 100755 e2e_tests/scripts/prepare_minio.sh delete mode 100755 e2e_tests/scripts/run_backup-info.sh create mode 100755 e2e_tests/scripts/run_tests/run_backup-info.sh create mode 100755 e2e_tests/scripts/run_tests/run_test.sh diff --git a/Makefile b/Makefile index c70c93c..eaa4318 100755 --- a/Makefile +++ b/Makefile @@ -7,6 +7,8 @@ ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) MUSL_CROSS := $(shell brew list| grep musl-cross) UID := $(shell id -u) GID := $(shell id -g) +GPDB_CONTAINER_NAME := greenplum +GPDB_USER := gpadmin .PHONY: test test: @@ -17,53 +19,14 @@ test: test-e2e: @echo "Run end-to-end tests for $(APP_NAME)" @make docker - @make test-e2e_backup-clean - @make test-e2e_backup-delete @make test-e2e_backup-info - @make test-e2e_history-clean - @make test-e2e_history-migrate - @make test-e2e_report-info .PHONY: test-e2e_backup-info test-e2e_backup-info: @echo "Run end-to-end tests for $(APP_NAME) for backup-info command" $(call down_docker_compose) - $(call run_docker_compose,backup-info) - $(call down_docker_compose) - -.PHONY: test-e2e_backup-delete -test-e2e_backup-delete: - @echo "Run end-to-end tests for $(APP_NAME) for backup-delete command" - $(call down_docker_compose) - $(call run_docker_compose,backup-delete) - $(call down_docker_compose) - -.PHONY: test-e2e_backup-clean -test-e2e_backup-clean: - @echo "Run end-to-end tests for $(APP_NAME) for backup-clean command" - $(call down_docker_compose) - $(call run_docker_compose,backup-clean) - $(call down_docker_compose) - -.PHONY: test-e2e_history-clean -test-e2e_history-clean: - @echo "Run end-to-end tests for $(APP_NAME) for history-clean command" - $(call down_docker_compose) - $(call run_docker_compose,history-clean) - $(call down_docker_compose) - -.PHONY: test-e2e_history-migrate -test-e2e_history-migrate: - @echo "Run end-to-end tests for $(APP_NAME) for history-migrate command" - $(call down_docker_compose) - $(call run_docker_compose,history-migrate) - $(call down_docker_compose) - -.PHONY: test-e2e_report-info -test-e2e_report-info: - @echo "Run end-to-end tests for $(APP_NAME) for report-info command" - $(call down_docker_compose) - $(call run_docker_compose,report-info) + $(call run_docker_compose) + $(call run_e2e_tests,backup-info) $(call down_docker_compose) .PHONY: test-e2e-down @@ -116,16 +79,15 @@ docker-alpine: @echo "Version $(BRANCH)-$(GIT_REV)" DOCKER_BUILDKIT=1 docker build --pull -f Dockerfile.alpine --build-arg REPO_BUILD_TAG=$(BRANCH)-$(GIT_REV) -t $(APP_NAME)-alpine . -define e2e_command - @echo "Run end-to-end tests for $(APP_NAME) for ${1} command" - docker run --rm -v $(ROOT_DIR)/e2e_tests/:/home/gpbackman/e2e_tests --name="$(APP_NAME)" "$(APP_NAME)" /home/gpbackman/e2e_tests/run_e2e_${1}.sh -endef define run_docker_compose - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml build --force-rm --parallel ${1} - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml run --rm --name ${1} ${1} + docker compose -f e2e_tests/docker-compose.yml up -d endef define down_docker_compose - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml down -v + docker compose -f e2e_tests/docker-compose.yml down -v +endef + +define run_e2e_tests + docker exec "$(GPDB_CONTAINER_NAME)" su - ${GPDB_USER} -c "/home/$(GPDB_USER)/run_tests/run_test.sh $(1)" endef \ No newline at end of file diff --git a/e2e_tests/.env b/e2e_tests/.env index 01560fa..7bede92 100644 --- a/e2e_tests/.env +++ b/e2e_tests/.env @@ -1,9 +1,7 @@ IMAGE_GPBACKMAN=gpbackman -IMAGE_TAG_MINIO=RELEASE.2023-09-07T02-05-02Z -IMAGE_TAG_MINIO_MC=RELEASE.2023-09-07T22-48-55Z -# Don't upgade s3 plugin version until https://github.com/greenplum-db/gpbackup-s3-plugin/issues/61 -# will be fixed. -S3_PLUGIN_VERSION=1.10.0 + +IMAGE_TAG_MINIO=RELEASE.2025-04-22T22-12-26Z +IMAGE_TAG_MINIO_MC=RELEASE.2025-04-16T18-13-26Z MINIO_ROOT_USER=minio MINIO_ROOT_PASSWORD=minioBackup MINIO_SITE_REGION=us-west-1 @@ -12,3 +10,6 @@ S3_MINIO_BUCKET=backup S3_MINIO_HOSTNAME=myminio S3_MINIO_KEY=demo S3_MINIO_KEY_SECRET=demoBackup + +IMAGE_TAG_GREENPLUM=6.27.1 +GREENPLUM_PASSWORD=gparray diff --git a/e2e_tests/conf/Dockerfile.s3_plugin b/e2e_tests/conf/Dockerfile.s3_plugin deleted file mode 100644 index 84bd009..0000000 --- a/e2e_tests/conf/Dockerfile.s3_plugin +++ /dev/null @@ -1,21 +0,0 @@ -ARG S3_PLUGIN_VERSION="1.10.1" - -# Starting from 25.05.2024, all Greenplum repositories (including gpbackup-s3-plugin) were transferred -# to the archive on GitHub. At the same time, all tags have been deleted from the archives. -# The fork containing the necessary tags is used for testing. - -FROM golang:1.24-bookworm AS s3_plugin-builder -ARG S3_PLUGIN_VERSION -RUN apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential bash perl wget ca-certificates \ - # && wget https://github.com/greenplum-db/gpbackup-s3-plugin/archive/refs/tags/${S3_PLUGIN_VERSION}.tar.gz -O /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz \ - && wget https://github.com/woblerr/gpbackup-s3-plugin/archive/refs/tags/${S3_PLUGIN_VERSION}.tar.gz -O /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz \ - && mkdir -p /tmp/gpbackup-s3-plugin \ - && tar -xzf /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz --strip-components=1 -C /tmp/gpbackup-s3-plugin \ - && cd /tmp/gpbackup-s3-plugin \ - && make build \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -FROM gpbackman AS gpbackman-plugins -COPY --from=s3_plugin-builder /go/bin/gpbackup_s3_plugin /home/gpbackman/gpbackup_s3_plugin diff --git a/e2e_tests/conf/gpbackup_s3_plugin.yaml b/e2e_tests/conf/gpbackup_s3_plugin.yaml index 10902e9..7a7bf0a 100644 --- a/e2e_tests/conf/gpbackup_s3_plugin.yaml +++ b/e2e_tests/conf/gpbackup_s3_plugin.yaml @@ -1,5 +1,5 @@ --- -executablepath: /home/gpbackman/gpbackup_s3_plugin +executablepath: /usr/local/greenplum-db/bin/gpbackup_s3_plugin options: region: us-west-1 endpoint: minio:9000 diff --git a/e2e_tests/docker-compose.yml b/e2e_tests/docker-compose.yml index 76faee6..8badab3 100644 --- a/e2e_tests/docker-compose.yml +++ b/e2e_tests/docker-compose.yml @@ -37,123 +37,48 @@ services: minio: condition: service_healthy volumes: - - "./scripts/prepare_minio.sh:/prepare_minio.sh" - - "./src_data:/tmp/src_data" + - "./scripts/prepare/prepare_minio.sh:/prepare_minio.sh" entrypoint: /prepare_minio.sh networks: - e2e -################################################################ - # Test backup-info command. - backup-info: - image: ${IMAGE_GPBACKMAN} - container_name: backup-info - hostname: backup-info - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-info.sh:/home/gpbackman/run_backup-info.sh" - command: /home/gpbackman/run_backup-info.sh - networks: - - e2e - - ################################################################ - # Test backup-delete command. - backup-delete: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: backup-delete - container_name: backup-delete - hostname: backup-delete - depends_on: - minio: - condition: service_started - prepare_minio: - condition: service_completed_successfully - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-delete.sh:/home/gpbackman/run_backup-delete.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - - "./conf/gpbackup_s3_plugin_invalid.yaml:/home/gpbackman/gpbackup_s3_plugin_invalid.yaml" - command: /home/gpbackman/run_backup-delete.sh - networks: - - e2e - - ################################################################ - # Test history-clean command. - history-clean: - image: ${IMAGE_GPBACKMAN} - container_name: history-clean - hostname: history-clean - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_history-clean.sh:/home/gpbackman/run_history-clean.sh" - command: /home/gpbackman/run_history-clean.sh - networks: - - e2e - ################################################################ - # Test history-migrate command. - history-migrate: + # Export gpbackman binary to shared volume. + gpbackman-export: image: ${IMAGE_GPBACKMAN} - container_name: history-migrate - hostname: history-migrate + container_name: gpbackman-export + hostname: gpbackman-export volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_history-migrate.sh:/home/gpbackman/run_history-migrate.sh" - command: /home/gpbackman/run_history-migrate.sh + - gpbackman_bin:/export + entrypoint: ["/bin/sh","-c","cp /usr/bin/gpbackman /export/gpbackman && chmod 755 /export/gpbackman && sleep infinity"] networks: - e2e ################################################################ - # Test report-info command. - report-info: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: report-info - container_name: report-info - hostname: report-info + greenplum: + image: woblerr/greenplum:${IMAGE_TAG_GREENPLUM} + container_name: greenplum + hostname: greenplum depends_on: minio: - condition: service_started + condition: service_healthy prepare_minio: condition: service_completed_successfully - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_report-info.sh:/home/gpbackman/run_report-info.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - command: /home/gpbackman/run_report-info.sh - networks: - - e2e - - ################################################################ - # Test backup-clean command. - backup-clean: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: backup-clean - container_name: backup-clean - hostname: backup-clean - depends_on: - minio: + gpbackman-export: condition: service_started - prepare_minio: - condition: service_completed_successfully + environment: + - "GREENPLUM_PASSWORD" volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-clean.sh:/home/gpbackman/run_backup-clean.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - command: /home/gpbackman/run_backup-clean.sh + - ./conf/gpbackup_s3_plugin.yaml:/home/gpadmin/gpbackup_s3_plugin.yaml + - ./scripts/prepare/gpdb_init:/docker-entrypoint-initdb.d + - gpbackman_bin:/home/gpadmin/gpbackman + - ./scripts/prepare/prepare_gpdb_backups.sh:/home/gpadmin/prepare_gpdb_backups.sh + - ./scripts/run_tests:/home/gpadmin/run_tests networks: - e2e networks: e2e: + +volumes: + gpbackman_bin: diff --git a/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql b/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql new file mode 100644 index 0000000..f01adc8 --- /dev/null +++ b/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql @@ -0,0 +1,16 @@ +CREATE SCHEMA IF NOT EXISTS sch1; +CREATE SCHEMA IF NOT EXISTS sch2; + +DROP TABLE IF EXISTS sch1.tbl_a; +DROP TABLE IF EXISTS sch1.tbl_b; +DROP TABLE IF EXISTS sch2.tbl_c; +DROP TABLE IF EXISTS sch2.tbl_d; + +CREATE TABLE sch1.tbl_a AS SELECT i FROM generate_series(1,100000) AS i; +CREATE TABLE sch1.tbl_b AS SELECT i FROM generate_series(1,100000) AS i; + +CREATE TABLE sch2.tbl_c (a int, b int) WITH (appendoptimized = true) DISTRIBUTED BY (a); +INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i; + +CREATE TABLE sch2.tbl_d (a int, b int) WITH (appendoptimized = true, orientation = column) DISTRIBUTED BY (a); +INSERT INTO sch2.tbl_d SELECT i, i FROM generate_series(1,100000) i; diff --git a/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh new file mode 100755 index 0000000..f542f57 --- /dev/null +++ b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# Backup sequence overview: +# 1. full_local : Full LOCAL backup (all tables) +# 2. full_local_include_table : Full LOCAL backup including only sch1.tbl_a +# 3. full_local_exclude_table : Full LOCAL backup excluding sch1.tbl_b +# 4. metadata_only_s3 : Metadata-only S3 backup (no data) +# 5. full_s3 : Full S3 backup (all tables, leaf partition data) +# 6. full_s3_include_table : Full S3 backup including only sch2.tbl_c +# 7. full_s3_exclude_table : Full S3 backup excluding sch2.tbl_d +# 8. (data change) : Insert into sch2.tbl_c and sch2.tbl_d +# 9. incr_s3 : Incremental S3 backup +# 10. incr_s3_include_table : Incremental S3 backup including only sch2.tbl_c +# 11. (data change) : Insert more rows into sch2.tbl_c +# 12. incr_s3_exclude_table : Incremental S3 backup excluding sch2.tbl_d +# 13. data_only_local : Data-only LOCAL backup (no metadata) +# 14. full_local : Final full LOCAL backup (all tables) + +DB_NAME="demo" +PLUGIN_CFG=/home/gpadmin/gpbackup_s3_plugin.yaml +COMMON_PLUGIN_FLAGS=(--plugin-config "$PLUGIN_CFG") + +run_backup(){ + local label="$1"; shift + echo "[INFO] Running backup: $label" + gpbackup --dbname ${DB_NAME} "$@" || { echo "[ERROR] Backup $label failed"; exit 1; } + sleep 10 +} + +# Full LOCAL no filters +run_backup full_local + +# Full LOCAL include-table sch1.tbl_a +run_backup full_local_include_table --include-table sch1.tbl_a + +# Full LOCAL exclude-table sch1.tbl_b +run_backup full_local_exclude_table --exclude-table sch1.tbl_b + +# Metadata-only s3 +run_backup metadata_only_s3 "${COMMON_PLUGIN_FLAGS[@]}" --metadata-only + +# Full S3 no filters +run_backup full_s3 "${COMMON_PLUGIN_FLAGS[@]}" --leaf-partition-data + +# Full S3 include-table sch1.tbl_c +run_backup full_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --include-table sch2.tbl_c --leaf-partition-data + +# Full S3 exclude-table sch1.tbl_d +run_backup full_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --exclude-table sch2.tbl_d --leaf-partition-data + +# Insert data +psql -d demo -c "INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i;" +psql -d demo -c "INSERT INTO sch2.tbl_d SELECT i, i FROM generate_series(1,100000) AS i;" + +# Incremental S3 no filters +run_backup incr_s3 "${COMMON_PLUGIN_FLAGS[@]}" --incremental --leaf-partition-data + +# Incremental S3 include-table sch1.tbl_c +run_backup incr_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --include-table sch2.tbl_c --leaf-partition-data + +# Insert data +psql -d demo -c "INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i;" + +# Incremental S3 exclude-table sch1.tbl_d +run_backup incr_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --exclude-table sch2.tbl_d --leaf-partition-data + +# Data-only LOCAL no filters +run_backup data_only_local --data-only + +# Full LOCAL no filters +run_backup full_local + +echo "[INFO] Backups prepared successfully" +exit 0 diff --git a/e2e_tests/scripts/prepare/prepare_minio.sh b/e2e_tests/scripts/prepare/prepare_minio.sh new file mode 100755 index 0000000..f5de392 --- /dev/null +++ b/e2e_tests/scripts/prepare/prepare_minio.sh @@ -0,0 +1,7 @@ +#!/bin/sh +set -eu + +mc config host add ${S3_MINIO_HOSTNAME} http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD}; +mc mb ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}; +mc admin user add ${S3_MINIO_HOSTNAME} ${S3_MINIO_KEY} ${S3_MINIO_KEY_SECRET}; +mc admin policy attach ${S3_MINIO_HOSTNAME} readwrite --user ${S3_MINIO_KEY} diff --git a/e2e_tests/scripts/prepare_minio.sh b/e2e_tests/scripts/prepare_minio.sh deleted file mode 100755 index 705bf45..0000000 --- a/e2e_tests/scripts/prepare_minio.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -set -e - -mc config host add ${S3_MINIO_HOSTNAME} http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD}; -mc mb ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}; -mc admin user add ${S3_MINIO_HOSTNAME} ${S3_MINIO_KEY} ${S3_MINIO_KEY_SECRET}; -mc admin policy attach ${S3_MINIO_HOSTNAME} readwrite --user ${S3_MINIO_KEY} - -TIMESTAMP="20230724090000" -touch /tmp/test.txt -mc cp /tmp/test.txt ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${TIMESTAMP:0:8}/${TIMESTAMP}/test.txt -mc cp /tmp/src_data/gpbackup_${TIMESTAMP}_report ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${TIMESTAMP:0:8}/${TIMESTAMP}/gpbackup_${TIMESTAMP}_report - -TIMESTAMPS="20230721090000 20230722100000 20230723082000 20230725101115 20230725101152 20230725101959 20230725102831 20230725102950 20230725110051" -for i in ${TIMESTAMPS}; do - mc cp /tmp/test.txt ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${i:0:8}/${i}/test.txt -done diff --git a/e2e_tests/scripts/run_backup-info.sh b/e2e_tests/scripts/run_backup-info.sh deleted file mode 100755 index cee3a18..0000000 --- a/e2e_tests/scripts/run_backup-info.sh +++ /dev/null @@ -1,263 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-info" - -SRC_DIR="/home/gpbackman/src_data" - -# backup-info commnad for sqlite backup history format. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---deleted \ ---failed) - -IFS=$'\n' -################################################################ -# Test 1. -# Simple test to check the number of provided backups. -# Format: -# status | type | object filtering| plugin | date deleted | repetitions. -# For backup without plugin info - blank line, so them skips in this test. -TEST_ID="1" - -REGEX_LIST='''Success|data-only|gpbackup_s3_plugin|1 -Success|metadata-only|gpbackup_s3_plugin|2 -Success|full|gpbackup_s3_plugin|4 -Failure|full|gpbackup_s3_plugin|3 -Success|incremental|gpbackup_s3_plugin|10''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - bckp_plugin=$(echo "${i}" | cut -f3 -d'|') - cnt=$(echo "${i}" | cut -f4 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | grep -w "${bckp_plugin}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Simple test to check full info about backups. -# Format: -# timestamp| date | status | database| type| plugin | duration | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering, plugin info and non-empty dete deleted fields. -TEST_ID="2" - -REGEX_LIST='''20230806230400|Sun Aug 06 2023 23:04:00|Failure|demo|full|gpbackup_s3_plugin|00:00:38|1 -20230725102950|Tue Jul 25 2023 10:29:50|Success|demo|incremental|gpbackup_s3_plugin|00:00:19|1 -20230725110051|Tue Jul 25 2023 11:00:51|Success|demo|incremental|gpbackup_s3_plugin|00:00:20|1 -20230725102831|Tue Jul 25 2023 10:28:31|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|1 -20230725101959|Tue Jul 25 2023 10:19:59|Success|demo|incremental|gpbackup_s3_plugin|00:00:22|1 -20230725101152|Tue Jul 25 2023 10:11:52|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|1 -20230725101115|Tue Jul 25 2023 10:11:15|Success|demo|full|gpbackup_s3_plugin|00:00:20|1 -20230724090000|Mon Jul 24 2023 09:00:00|Success|demo|metadata-only|gpbackup_s3_plugin|00:05:17|1 -20230723082000|Sun Jul 23 2023 08:20:00|Success|demo|data-only|gpbackup_s3_plugin|00:35:17|1 -20230722100000|Sat Jul 22 2023 10:00:00|Success|demo|full|gpbackup_s3_plugin|00:25:17|1 -20230721090000|Fri Jul 21 2023 09:00:00|Success|demo|metadata-only|gpbackup_s3_plugin|00:04:17|1''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_plugin=$(echo "${i}" | cut -f6 -d'|') - bckp_duration=$(echo "${i}" | cut -f7 -d'|') - cnt=$(echo "${i}" | cut -f8 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_plugin}" | \ - grep -w "${bckp_duration}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 3. -# Simple test to check full info about backups with deleted field. -# Format: -# timestamp| date | status | database| type | plugin | duration | date deleted | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering field. -TEST_ID="3" - -REGEX_LIST="20230725110310|Tue Jul 25 2023 11:03:10|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|Wed Jul 26 2023 11:03:28|1" - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_plugin=$(echo "${i}" | cut -f6 -d'|') - bckp_duration=$(echo "${i}" | cut -f7 -d'|') - bckp_date_deleted=$(echo "${i}" | cut -f8 -d'|') - cnt=$(echo "${i}" | cut -f9 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_plugin}" | \ - grep -w "${bckp_duration}" | \ - grep -w "${bckp_date_deleted}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 4. -# Simple test to check full info about local backups. -# Format: -# timestamp| date | status | database| type| duration | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering and date deleted fields. -# For local backups plugin field is empty. -TEST_ID="4" - -REGEX_LIST='''20240505201504|Sun May 05 2024 20:15:04|Success|demo|full|00:00:15|1 -20240506201504|Mon May 06 2024 20:15:04|Success|demo|full|00:00:15|1''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_duration=$(echo "${i}" | cut -f6 -d'|') - cnt=$(echo "${i}" | cut -f7 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_duration}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 5. -# Simple test to check type option -# Format: -# status | type| repetitions. -# For backup without plugin info - blank line, so them skips in this test. -TEST_ID="5" - -REGEX_LIST='''Success|full|6 -Failure|full|3''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - cnt=$(echo "${i}" | cut -f3 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 6. -# Simple test to check filtering by --type flag. -# Format: -# status| type| repetitions. -# Testing on incremental backup type. -TEST_ID="6" - -# backup-info commnad for sqlite backup history format. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---type incremental) - -REGEX_LIST='''Success|incremental|8''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - cnt=$(echo "${i}" | cut -f3 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 7. -# Simple test to check filtering by --schema flag. -# Format: -# status| repetitions. -# Testing on include test1 schema. -TEST_ID="7" - -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---deleted \ ---schema test1) - -REGEX_LIST='''Success|3''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - cnt=$(echo "${i}" | cut -f2 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/run_backup-info.sh b/e2e_tests/scripts/run_tests/run_backup-info.sh new file mode 100755 index 0000000..3dd8ad8 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-info.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +COMMAND="backup-info" +BIN_DIR="/home/gpadmin/gpbackman" +DATA_DIR="/data/master/gpseg-1" + +run_command(){ + local label="$1"; shift + echo "[INFO] Running ${COMMAND}: $label" + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } +} + +################################################################ +# Count of all backups in the history database +test_id=1 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=12 +got=$(run_command total_backups | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Count of all full backups in the history database +# Compare the number of backups from the output of all backups and +# from the output with the --type full flag +test_id=2 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=7 +got1=$(run_command total_full_backups | grep -E '^[[:space:]][0-9]{14} ' | grep full | wc -l) +got2=$(run_command filter_full_backups --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got1" -eq "$want" ] && [ "$got2" -eq "$want" ] || { echo "[ERROR] Expected $want , got1=$got1, got2=$got2"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Count of all incremental backups in the history database +# Compare the number of backups from the output of all backups and +# from the output with the --type incremental flag +test_id=3 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=3 +got1=$(run_command total_incremental_backups | grep -E '^[[:space:]][0-9]{14} ' | grep incremental | wc -l) +got2=$(run_command filter_incremental_backups --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got1" -eq "$want" ] && [ "$got2" -eq "$want" ] || { echo "[ERROR] Expected $want , got1=$got1, got2=$got2"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Count of backups which include table sch2.tbl_c +test_id=4 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=2 +got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Count of backups which exclude table sch2.tbl_d + +test_id=5 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=2 +got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Count of full backups which include table sch2.tbl_c +# Use --type full to filter only full backups +test_id=6 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=1 +got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + + +################################################################ +# Count of incremental backups which exclude table sch2.tbl_d +# Use --type incremental to filter only incremental backups +test_id=7 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +want=1 +got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + +[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" diff --git a/e2e_tests/scripts/run_tests/run_test.sh b/e2e_tests/scripts/run_tests/run_test.sh new file mode 100755 index 0000000..bc3c676 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_test.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +TEST_COMMAND=${1:-} +GP_DB_NAME="demo" +HOME_DIR="/home/gpadmin" +SCRIPTS_DIR="${HOME_DIR}/run_tests" + +wait_for_service() { + local max_attempts=${1:-10} + + for i in $(seq 1 ${max_attempts}); do + if psql -d ${GP_DB_NAME} -t -c "SELECT 1;" >/dev/null 2>&1; then + echo "[INFO] Cluster ready" + return 0 + fi + echo "[INFO] Waiting cluster startup ($i/${max_attempts})" + sleep 10 + done + echo "[ERROR] Cluster failed to start within timeout" + return 1 +} + + +exec_test_for_command() { + case "${TEST_COMMAND}" in + backup-info) + "${SCRIPTS_DIR}/run_backup-info.sh" + ;; + report-info) + "${SCRIPTS_DIR}/run_report-info.sh" + ;; + backup-delete) + "${SCRIPTS_DIR}/run_backup-delete.sh" + ;; + backup-clean) + "${SCRIPTS_DIR}/run_backup-clean.sh" + ;; + history-clean) + "${SCRIPTS_DIR}/run_history-clean.sh" + ;; + history-migrate) + "${SCRIPTS_DIR}/run_history-migrate.sh" + ;; + *) + echo "[ERROR] Unknown test command: ${TEST_COMMAND}" + exit 1 + ;; + esac +} + +echo "[INFO] Check Greenplum cluster" +sleep 90 +wait_for_service + +echo "[INFO] Prepare Greenplum backups" +"${HOME_DIR}/prepare_gpdb_backups.sh" + +echo "[INFO] Run e2e tests for command: ${TEST_COMMAND}" +exec_test_for_command \ No newline at end of file From 3592a9e04b7ff8bbdea887c608d6b27273ca3033 Mon Sep 17 00:00:00 2001 From: woblerr Date: Tue, 26 Aug 2025 20:20:03 +0300 Subject: [PATCH 02/23] Refactor e2e tests for report-info command. - Replace individual test targets with unified Makefile test generation. - Switch from static file comparison to real Greenplum-based testing. - Move test script from run_report-info.sh to run_tests/run_report-info.sh. - Remove deprecated static file-based test approach. --- Makefile | 26 +-- e2e_tests/scripts/run_report-info.sh | 162 ------------------ .../scripts/run_tests/run_report-info.sh | 114 ++++++++++++ 3 files changed, 128 insertions(+), 174 deletions(-) delete mode 100755 e2e_tests/scripts/run_report-info.sh create mode 100755 e2e_tests/scripts/run_tests/run_report-info.sh diff --git a/Makefile b/Makefile index eaa4318..fa8941f 100755 --- a/Makefile +++ b/Makefile @@ -9,25 +9,27 @@ UID := $(shell id -u) GID := $(shell id -g) GPDB_CONTAINER_NAME := greenplum GPDB_USER := gpadmin +# List of all e2e test commands +E2E_COMMANDS := backup-info report-info backup-delete backup-clean history-clean history-migrate .PHONY: test test: @echo "Run tests for $(APP_NAME)" TZ="Etc/UTC" go test -mod=vendor -timeout=60s -count 1 ./... -.PHONY: test-e2e -test-e2e: - @echo "Run end-to-end tests for $(APP_NAME)" - @make docker - @make test-e2e_backup-info +# Define function to create e2e test targets +define define_e2e_test +.PHONY: test-e2e_$(1) +test-e2e_$(1): + @echo "Run end-to-end tests for $(APP_NAME) for $(1) command" + $$(call down_docker_compose) + $$(call run_docker_compose) + $$(call run_e2e_tests,$(1)) + $$(call down_docker_compose) +endef -.PHONY: test-e2e_backup-info -test-e2e_backup-info: - @echo "Run end-to-end tests for $(APP_NAME) for backup-info command" - $(call down_docker_compose) - $(call run_docker_compose) - $(call run_e2e_tests,backup-info) - $(call down_docker_compose) +# Generate e2e test targets for all commands +$(foreach cmd,$(E2E_COMMANDS),$(eval $(call define_e2e_test,$(cmd)))) .PHONY: test-e2e-down test-e2e-down: diff --git a/e2e_tests/scripts/run_report-info.sh b/e2e_tests/scripts/run_report-info.sh deleted file mode 100755 index 0d04c38..0000000 --- a/e2e_tests/scripts/run_report-info.sh +++ /dev/null @@ -1,162 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="report-info" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -# Prepare general data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${SRC_DIR}/gpbackup_history_full_local.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Get report info for specified backup with gpbackup_s3_plugin. -TEST_ID="1" - -TIMESTAMP="20230724090000" - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml | grep -v 'Reading Plugin Config') - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Get report info for specified local backup with specifying backup directory without single-backup-dir format. -# Set backup directory from console. -TEST_ID="2" - -TIMESTAMP="20240505201504" -BACKUP_DIR="/tmp/testWithPrefix" -REPORT_DIR="${BACKUP_DIR}/segment-1/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---backup-dir ${BACKUP_DIR}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 3. -# Get report info for specified local backup with specifying backup directory without single-backup-dir format. -# Set backup directory from history database. -TEST_ID="3" - -TIMESTAMP="20240505201504" -BACKUP_DIR="/tmp/testWithPrefix" -REPORT_DIR="${BACKUP_DIR}/segment-1/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 4s. -# Get report info for specified local backup with specifying backup directory with single-backup-dir format. -# Set backup directory from console. -TEST_ID="4" - -TIMESTAMP="20240506201504" -BACKUP_DIR="/tmp/testNoPrefix" -REPORT_DIR="${BACKUP_DIR}/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---backup-dir ${BACKUP_DIR}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 4. -# Get report info for specified local backup with specifying backup directory with single-backup-dir format. -# Set backup directory from history database. -TEST_ID="5" - -TIMESTAMP="20240506201504" -BACKUP_DIR="/tmp/testNoPrefix" -REPORT_DIR="${BACKUP_DIR}/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh new file mode 100755 index 0000000..7940848 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -0,0 +1,114 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +COMMAND="report-info" +BIN_DIR="/home/gpadmin/gpbackman" +DATA_DIR="/data/master/gpseg-1" +BACKUP_DIR_PREFIX="/tmp/testWithPrefix" +BACKUP_DIR_SINGLE="/tmp/testNoPrefix" + +run_command(){ + local label="$1"; shift + echo "[INFO] Running ${COMMAND}: $label" + ${BIN_DIR}/gpbackman report-info --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } +} + +get_backup_info(){ + local label="$1"; shift + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] backup-info $label failed"; exit 1; } +} + +################################################################ +# Test 1: Get report info for full local backup (without using backup-dir) + +test_id=1 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +# Get timestamp for first full local backup +timestamp=$(get_backup_info "get_full_local" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 +fi + +report_output=$(run_command "full_local_no_dir" --timestamp "$timestamp") + +echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } +echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } +echo "$report_output" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Test 2: Get report info for full local backup (with using backup-dir) + +test_id=2 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find full local backup timestamp for backup-dir test" + exit 1 +fi + +report_dir="/data/master/gpseg-1" + +report_output=$(run_command "local_with_backup_dir_console" --timestamp "$timestamp" --backup-dir "${report_dir}") + +echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } +echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } +echo "$report_output" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Test 3: Get report info for full s3 backup (without using plugin-report-file-path) + +test_id=3 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find full s3 backup timestamp" + exit 1 +fi + +report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "$timestamp" --plugin-config ~/gpbackup_s3_plugin.yaml) + +echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } +echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } +echo "$report_output" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Test 4: Get report info for full s3 backup (with using plugin-report-file-path) + +test_id=4 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" + exit 1 +fi + +report_dir="/backup/test/backups/${timestamp:0:8}/${timestamp}" + +report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "$timestamp" --plugin-config ~/gpbackup_s3_plugin.yaml --plugin-report-file-path ${report_dir}) + +echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } +echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } +echo "$report_output" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +echo "[INFO] ${COMMAND} all tests passed" From 10135a02f41881a349dba0c0d4192155c3dd72f1 Mon Sep 17 00:00:00 2001 From: woblerr Date: Mon, 1 Sep 2025 23:39:58 +0300 Subject: [PATCH 03/23] Refactor e2e tests for backup-delete command. --- e2e_tests/scripts/run_backup-delete.sh | 92 ------------- .../scripts/run_tests/run_backup-delete.sh | 127 ++++++++++++++++++ 2 files changed, 127 insertions(+), 92 deletions(-) delete mode 100755 e2e_tests/scripts/run_backup-delete.sh create mode 100755 e2e_tests/scripts/run_tests/run_backup-delete.sh diff --git a/e2e_tests/scripts/run_backup-delete.sh b/e2e_tests/scripts/run_backup-delete.sh deleted file mode 100755 index 58ab663..0000000 --- a/e2e_tests/scripts/run_backup-delete.sh +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-delete" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" -TIMESTAMP="" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# All ther calls are executed for the same timestamp. -# At the first call, the backup is deleted from the s3. -# The yaml history file format is used. -# History file yaml format is used, there are a real s3 call and a real backup deletion. - -# At second call, there are a real s3 call and no real backup deletion. -# The sqlite history file format is used. -# Because this backup was deleted in first call, there are no files in the s3. -# But the info about deletion attempt is written to log file and DATE DELETED is updated in history file. -TEST_ID="1" - -TIMESTAMP="20230724090000" - -# Execute backup-delete commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml \ ---force \ ---ignore-errors - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted | grep -w ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_date_deleted=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX}) -if [ $? != 0 ]; then - echo -e "[ERROR] r${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite:\n${bckp_date_deleted}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Test cascade delete option -TEST_ID="2" - -TIMESTAMP="20230725101959" -# After successful delete, in history there should be 5 backup with dete deleted info. -# 2 from source + 1 from test 1 + 3 from this test. -TEST_CNT=6 - -# Execute backup-delete commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml \ ---cascade \ ---force \ ---ignore-errors - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted) - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite=${result_cnt_sqlite}, want=${TEST_CNT}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh new file mode 100755 index 0000000..97820e4 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +COMMAND="backup-delete" +BIN_DIR="/home/gpadmin/gpbackman" +DATA_DIR="/data/master/gpseg-1" + +run_command(){ + local label="$1"; shift + echo "[INFO] Running ${COMMAND}: $label" + ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } +} + +get_backup_info(){ + local label="$1"; shift + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] backup-info $label failed"; exit 1; } +} + +get_backup_info_for_timestamp(){ + local timestamp="$1" + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed | grep "$timestamp" || echo "No info found for timestamp $timestamp" +} + +################################################################ +# Test 1: Delete local full backup + +test_id=1 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_local_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 +fi + +run_command "delete_local_full" --timestamp "$timestamp" + +deleted_backup=$(get_backup_info_for_timestamp "$timestamp") + +date_deleted=$(echo "$deleted_backup" | grep "$timestamp" | awk -F'|' '{print $NF}' | xargs) + +if [ -n "$date_deleted" ]; then + echo "[INFO] Backup $timestamp successfully marked as deleted" +else + echo "[ERROR] Backup should be marked as deleted" + exit 1 +fi + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + + +################################################################ +# Test 2: Delete S3 incremental backup + +test_id=2 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find S3 incremental backup" + exit 1 +fi + +run_command "delete_s3_incremental" --timestamp "$timestamp" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml + +deleted_backup=$(get_backup_info_for_timestamp "$timestamp") + +date_deleted=$(echo "$deleted_backup" | grep "$timestamp" | awk -F'|' '{print $NF}' | xargs) +if [ -n "$date_deleted" ]; then + echo "[INFO] S3 backup $timestamp successfully marked as deleted" +else + echo "[ERROR] S3 backup should be marked as deleted" + exit 1 +fi + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Test 3: Delete S3 full backup with cascade + +test_id=3 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +timestamp=$(get_backup_info "get_s3_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | tail -1 | awk '{print $1}') + +if [ -z "$timestamp" ]; then + echo "[ERROR] Could not find S3 full backup" + exit 1 +fi + +run_command "delete_s3_full_cascade" --timestamp "$timestamp" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + +deleted_count=$(get_backup_info "count_deleted" --deleted | grep -E '^[[:space:]][0-9]{14} ' | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l) + +# Delete one backup from test 1 and one from test 2 +# Plus 2 backups (incr + full) from this test +[ "$deleted_count" -eq 4 ] || { echo "[ERROR] Expected 4 backups to be deleted, but found $deleted_count"; exit 1; } + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +# Test 4: Try to delete non-existent backup (should fail) + +test_id=4 + +echo "[INFO] ${COMMAND} TEST ${test_id}" + +fake_timestamp="19990101000000" + +echo "[INFO] Attempting to delete non-existent backup: $fake_timestamp" + +if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "$fake_timestamp" --force 2>/dev/null; then + echo "[ERROR] Expected deletion of non-existent backup to fail, but it succeeded" + exit 1 +else + echo "[INFO] Deletion of non-existent backup correctly failed as expected" +fi + +echo "[INFO] ${COMMAND} TEST ${test_id} is successful" + +################################################################ +echo "[INFO] ${COMMAND} all tests passed" From 98d36cd0e094f38f1963012b5274a8e9f47a9139 Mon Sep 17 00:00:00 2001 From: woblerr Date: Tue, 2 Sep 2025 07:39:34 +0300 Subject: [PATCH 04/23] Refactor e2e tests with unified architecture and common functions. - Add common_functions.sh with shared logging, assertions and utilities - Standardize variable format to ${var} with proper case conventions - Refactor all test scripts to use modular test functions - Reduce code duplication and improve maintainability - Unify error handling and test execution patterns --- .../scripts/run_tests/common_functions.sh | 59 ++++++ .../scripts/run_tests/run_backup-delete.sh | 187 ++++++++---------- .../scripts/run_tests/run_backup-info.sh | 151 ++++++-------- .../scripts/run_tests/run_report-info.sh | 169 +++++++--------- e2e_tests/scripts/run_tests/run_test.sh | 2 +- 5 files changed, 268 insertions(+), 300 deletions(-) create mode 100644 e2e_tests/scripts/run_tests/common_functions.sh diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh new file mode 100644 index 0000000..8a15d63 --- /dev/null +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +readonly BIN_DIR="/home/gpadmin/gpbackman" +readonly DATA_DIR="/data/master/gpseg-1" + +log_test_start() { + local command="${1}" + local test_id="${2}" + echo "[INFO] ${command} TEST ${test_id}" +} + +log_test_success() { + local command="${1}" + local test_id="${2}" + echo "[INFO] ${command} TEST ${test_id} is successful" +} + +log_all_tests_passed() { + local command="${1}" + echo "[INFO] ${command} all tests passed" +} + +get_backup_info() { + local label="${1}"; shift + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { + echo "[ERROR] backup-info ${label} failed"; exit 1; + } +} + +assert_equals() { + local expected="${1}" + local actual="${2}" + local message="${3:-}" + + [ "${actual}" -eq "${expected}" ] || { + echo "[ERROR] Expected ${expected}, got ${actual}${message:+ - ${message}}"; exit 1; + } +} + +assert_equals_both() { + local expected="${1}" + local actual1="${2}" + local actual2="${3}" + local message="${4:-}" + + [ "${actual1}" -eq "${expected}" ] && [ "${actual2}" -eq "${expected}" ] || { + echo "[ERROR] Expected ${expected}, got1=${actual1}, got2=${actual2}${message:+ - ${message}}"; exit 1; + } +} + +run_test() { + local command="${1}" + local test_id="${2}" + local test_function="${3}" + + log_test_start "${command}" "${test_id}" + ${test_function} + log_test_success "${command}" "${test_id}" +} diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index 97820e4..e664418 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -1,127 +1,100 @@ #!/usr/bin/env bash set -Eeuo pipefail +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + COMMAND="backup-delete" -BIN_DIR="/home/gpadmin/gpbackman" -DATA_DIR="/data/master/gpseg-1" run_command(){ - local label="$1"; shift - echo "[INFO] Running ${COMMAND}: $label" - ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } -} - -get_backup_info(){ - local label="$1"; shift - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] backup-info $label failed"; exit 1; } + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } } get_backup_info_for_timestamp(){ - local timestamp="$1" - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed | grep "$timestamp" || echo "No info found for timestamp $timestamp" + local timestamp="${1}" + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed | grep "${timestamp}" || echo "No info found for timestamp ${timestamp}" } -################################################################ # Test 1: Delete local full backup +test_delete_local_full() { + local timestamp=$(get_backup_info "get_local_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 + fi + + run_command "delete_local_full" --timestamp "${timestamp}" + + local deleted_backup=$(get_backup_info_for_timestamp "${timestamp}") + local date_deleted=$(echo "${deleted_backup}" | grep "${timestamp}" | awk -F'|' '{print $NF}' | xargs) + + if [ -n "${date_deleted}" ]; then + echo "[INFO] Backup ${timestamp} successfully marked as deleted" + else + echo "[ERROR] Backup should be marked as deleted" + exit 1 + fi +} -test_id=1 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_local_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find full local backup timestamp" - exit 1 -fi - -run_command "delete_local_full" --timestamp "$timestamp" - -deleted_backup=$(get_backup_info_for_timestamp "$timestamp") - -date_deleted=$(echo "$deleted_backup" | grep "$timestamp" | awk -F'|' '{print $NF}' | xargs) - -if [ -n "$date_deleted" ]; then - echo "[INFO] Backup $timestamp successfully marked as deleted" -else - echo "[ERROR] Backup should be marked as deleted" - exit 1 -fi - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - - -################################################################ # Test 2: Delete S3 incremental backup +test_delete_s3_incremental() { + local timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find S3 incremental backup" + exit 1 + fi + + run_command "delete_s3_incremental" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml + + local deleted_backup=$(get_backup_info_for_timestamp "${timestamp}") + local date_deleted=$(echo "${deleted_backup}" | grep "${timestamp}" | awk -F'|' '{print $NF}' | xargs) + + if [ -n "${date_deleted}" ]; then + echo "[INFO] S3 backup ${timestamp} successfully marked as deleted" + else + echo "[ERROR] S3 backup should be marked as deleted" + exit 1 + fi +} -test_id=2 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find S3 incremental backup" - exit 1 -fi - -run_command "delete_s3_incremental" --timestamp "$timestamp" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml - -deleted_backup=$(get_backup_info_for_timestamp "$timestamp") - -date_deleted=$(echo "$deleted_backup" | grep "$timestamp" | awk -F'|' '{print $NF}' | xargs) -if [ -n "$date_deleted" ]; then - echo "[INFO] S3 backup $timestamp successfully marked as deleted" -else - echo "[ERROR] S3 backup should be marked as deleted" - exit 1 -fi - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ # Test 3: Delete S3 full backup with cascade +test_delete_s3_full_cascade() { + local timestamp=$(get_backup_info "get_s3_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | tail -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find S3 full backup" + exit 1 + fi + + run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + + local deleted_count=$(get_backup_info "count_deleted" --deleted | grep -E '^[[:space:]][0-9]{14} ' | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l) + + # Expected: 1 backup from test 1 + 1 from test 2 + 2 backups (incr + full) from this test = 4 total + local want=4 + [ "${deleted_count}" -eq "${want}" ] || { echo "[ERROR] Expected ${want} backups to be deleted, but found ${deleted_count}"; exit 1; } +} -test_id=3 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_s3_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | tail -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find S3 full backup" - exit 1 -fi - -run_command "delete_s3_full_cascade" --timestamp "$timestamp" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade - -deleted_count=$(get_backup_info "count_deleted" --deleted | grep -E '^[[:space:]][0-9]{14} ' | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l) - -# Delete one backup from test 1 and one from test 2 -# Plus 2 backups (incr + full) from this test -[ "$deleted_count" -eq 4 ] || { echo "[ERROR] Expected 4 backups to be deleted, but found $deleted_count"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ # Test 4: Try to delete non-existent backup (should fail) +test_delete_nonexistent_backup() { + local fake_timestamp="19990101000000" + + echo "[INFO] Attempting to delete non-existent backup: ${fake_timestamp}" + + if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force 2>/dev/null; then + echo "[ERROR] Expected deletion of non-existent backup to fail, but it succeeded" + exit 1 + else + echo "[INFO] Deletion of non-existent backup correctly failed as expected" + fi +} -test_id=4 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -fake_timestamp="19990101000000" - -echo "[INFO] Attempting to delete non-existent backup: $fake_timestamp" - -if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "$fake_timestamp" --force 2>/dev/null; then - echo "[ERROR] Expected deletion of non-existent backup to fail, but it succeeded" - exit 1 -else - echo "[INFO] Deletion of non-existent backup correctly failed as expected" -fi - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" +run_test "${COMMAND}" 1 test_delete_local_full +run_test "${COMMAND}" 2 test_delete_s3_incremental +run_test "${COMMAND}" 3 test_delete_s3_full_cascade +run_test "${COMMAND}" 4 test_delete_nonexistent_backup -################################################################ -echo "[INFO] ${COMMAND} all tests passed" +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_backup-info.sh b/e2e_tests/scripts/run_tests/run_backup-info.sh index 3dd8ad8..dfb7497 100755 --- a/e2e_tests/scripts/run_tests/run_backup-info.sh +++ b/e2e_tests/scripts/run_tests/run_backup-info.sh @@ -1,113 +1,76 @@ #!/usr/bin/env bash set -Eeuo pipefail +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + COMMAND="backup-info" -BIN_DIR="/home/gpadmin/gpbackman" -DATA_DIR="/data/master/gpseg-1" run_command(){ - local label="$1"; shift - echo "[INFO] Running ${COMMAND}: $label" - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } } -################################################################ -# Count of all backups in the history database -test_id=1 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=12 -got=$(run_command total_backups | grep -E '^[[:space:]][0-9]{14} ' | wc -l) - -[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } +# Test 1: Count all backups in history database +test_count_all_backups() { + local want=12 + local got=$(run_command total_backups | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals "${want}" "${got}" +} -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" +# Test 2: Count all full backups +test_count_full_backups() { + local want=7 + local got1=$(run_command total_full_backups | grep -E '^[[:space:]][0-9]{14} ' | grep full | wc -l) + local got2=$(run_command filter_full_backups --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals_both "${want}" "${got1}" "${got2}" +} -################################################################ -# Count of all full backups in the history database +# Test 3: Count all incremental backups # Compare the number of backups from the output of all backups and # from the output with the --type full flag -test_id=2 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=7 -got1=$(run_command total_full_backups | grep -E '^[[:space:]][0-9]{14} ' | grep full | wc -l) -got2=$(run_command filter_full_backups --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) - -[ "$got1" -eq "$want" ] && [ "$got2" -eq "$want" ] || { echo "[ERROR] Expected $want , got1=$got1, got2=$got2"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Count of all incremental backups in the history database -# Compare the number of backups from the output of all backups and -# from the output with the --type incremental flag -test_id=3 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=3 -got1=$(run_command total_incremental_backups | grep -E '^[[:space:]][0-9]{14} ' | grep incremental | wc -l) -got2=$(run_command filter_incremental_backups --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) - -[ "$got1" -eq "$want" ] && [ "$got2" -eq "$want" ] || { echo "[ERROR] Expected $want , got1=$got1, got2=$got2"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Count of backups which include table sch2.tbl_c -test_id=4 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=2 -got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E '^[[:space:]][0-9]{14} ' | wc -l) - -[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Count of backups which exclude table sch2.tbl_d - -test_id=5 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=2 -got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E '^[[:space:]][0-9]{14} ' | wc -l) +test_count_incremental_backups() { + local want=3 + local got1=$(run_command total_incremental_backups | grep -E '^[[:space:]][0-9]{14} ' | grep incremental | wc -l) + local got2=$(run_command filter_incremental_backups --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals_both "${want}" "${got1}" "${got2}" +} -[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } +# Test 4: Count backups that include table sch2.tbl_c +test_count_include_table_backups() { + local want=2 + local got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals "${want}" "${got}" +} -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" +# Test 5: Count backups that exclude table sch2.tbl_d +test_count_exclude_table_backups() { + local want=2 + local got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals "${want}" "${got}" +} -################################################################ -# Count of full backups which include table sch2.tbl_c +# Test 6: Count full backups that include table sch2.tbl_c # Use --type full to filter only full backups -test_id=6 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -want=1 -got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) - -[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - - -################################################################ -# Count of incremental backups which exclude table sch2.tbl_d -# Use --type incremental to filter only incremental backups -test_id=7 - -echo "[INFO] ${COMMAND} TEST ${test_id}" +test_count_include_table_full_backups() { + local want=1 + local got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals "${want}" "${got}" +} -want=1 -got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) +# Test 7: Count incremental backups that exclude table sch2.tbl_d +test_count_exclude_table_incremental_backups() { + local want=1 + local got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + assert_equals "${want}" "${got}" +} -[ "$got" -eq "$want" ] || { echo "[ERROR] Expected $want , got $got"; exit 1; } +run_test "${COMMAND}" 1 test_count_all_backups +run_test "${COMMAND}" 2 test_count_full_backups +run_test "${COMMAND}" 3 test_count_incremental_backups +run_test "${COMMAND}" 4 test_count_include_table_backups +run_test "${COMMAND}" 5 test_count_exclude_table_backups +run_test "${COMMAND}" 6 test_count_include_table_full_backups +run_test "${COMMAND}" 7 test_count_exclude_table_incremental_backups -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index 7940848..9b03d13 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -1,114 +1,87 @@ #!/usr/bin/env bash set -Eeuo pipefail +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + COMMAND="report-info" -BIN_DIR="/home/gpadmin/gpbackman" -DATA_DIR="/data/master/gpseg-1" BACKUP_DIR_PREFIX="/tmp/testWithPrefix" BACKUP_DIR_SINGLE="/tmp/testNoPrefix" run_command(){ - local label="$1"; shift - echo "[INFO] Running ${COMMAND}: $label" - ${BIN_DIR}/gpbackman report-info --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} $label failed"; exit 1; } + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman report-info --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } } -get_backup_info(){ - local label="$1"; shift - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] backup-info $label failed"; exit 1; } +# Test 1: Get report info for full local backup (without backup-dir) +test_report_full_local_no_dir() { + local timestamp=$(get_backup_info "get_full_local" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 + fi + + local report_output=$(run_command "full_local_no_dir" --timestamp "${timestamp}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } } -################################################################ -# Test 1: Get report info for full local backup (without using backup-dir) - -test_id=1 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -# Get timestamp for first full local backup -timestamp=$(get_backup_info "get_full_local" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find full local backup timestamp" - exit 1 -fi - -report_output=$(run_command "full_local_no_dir" --timestamp "$timestamp") - -echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } -echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } -echo "$report_output" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Test 2: Get report info for full local backup (with using backup-dir) - -test_id=2 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find full local backup timestamp for backup-dir test" - exit 1 -fi - -report_dir="/data/master/gpseg-1" - -report_output=$(run_command "local_with_backup_dir_console" --timestamp "$timestamp" --backup-dir "${report_dir}") - -echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } -echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } -echo "$report_output" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Test 3: Get report info for full s3 backup (without using plugin-report-file-path) - -test_id=3 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find full s3 backup timestamp" - exit 1 -fi - -report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "$timestamp" --plugin-config ~/gpbackup_s3_plugin.yaml) - -echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } -echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } -echo "$report_output" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } - -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" - -################################################################ -# Test 4: Get report info for full s3 backup (with using plugin-report-file-path) - -test_id=4 - -echo "[INFO] ${COMMAND} TEST ${test_id}" - -timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') - -if [ -z "$timestamp" ]; then - echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" - exit 1 -fi - -report_dir="/backup/test/backups/${timestamp:0:8}/${timestamp}" +# Test 2: Get report info for full local backup (with backup-dir) +test_report_full_local_with_dir() { + local timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp for backup-dir test" + exit 1 + fi + + local report_dir="/data/master/gpseg-1" + local report_output=$(run_command "local_with_backup_dir_console" --timestamp "${timestamp}" --backup-dir "${report_dir}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } +} -report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "$timestamp" --plugin-config ~/gpbackup_s3_plugin.yaml --plugin-report-file-path ${report_dir}) +# Test 3: Get report info for full S3 backup (without plugin-report-file-path) +test_report_s3_no_plugin_path() { + local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full s3 backup timestamp" + exit 1 + fi + + local report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config ~/gpbackup_s3_plugin.yaml) + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } +} -echo "$report_output" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } -echo "$report_output" | grep -q "timestamp key:.*$timestamp" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } -echo "$report_output" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } +# Test 4: Get report info for full S3 backup (with plugin-report-file-path) +test_report_s3_with_plugin_path() { + local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" + exit 1 + fi + + local report_dir="/backup/test/backups/${timestamp:0:8}/${timestamp}" + local report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config ~/gpbackup_s3_plugin.yaml --plugin-report-file-path ${report_dir}) + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } +} -echo "[INFO] ${COMMAND} TEST ${test_id} is successful" +run_test "${COMMAND}" 1 test_report_full_local_no_dir +run_test "${COMMAND}" 2 test_report_full_local_with_dir +run_test "${COMMAND}" 3 test_report_s3_no_plugin_path +run_test "${COMMAND}" 4 test_report_s3_with_plugin_path -echo "[INFO] ${COMMAND} all tests passed" +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_test.sh b/e2e_tests/scripts/run_tests/run_test.sh index bc3c676..67e3b18 100755 --- a/e2e_tests/scripts/run_tests/run_test.sh +++ b/e2e_tests/scripts/run_tests/run_test.sh @@ -14,7 +14,7 @@ wait_for_service() { echo "[INFO] Cluster ready" return 0 fi - echo "[INFO] Waiting cluster startup ($i/${max_attempts})" + echo "[INFO] Waiting cluster startup (${i}/${max_attempts})" sleep 10 done echo "[ERROR] Cluster failed to start within timeout" From 1c89eaef3baa7d0750e5b81c32eb250daf44b3d2 Mon Sep 17 00:00:00 2001 From: woblerr Date: Tue, 2 Sep 2025 07:58:04 +0300 Subject: [PATCH 05/23] Remove unused files. --- .../src_data/gpbackup_20230724090000_report | 59 ----------------- .../src_data/gpbackup_20240505201504_report | 61 ------------------ .../src_data/gpbackup_20240506201504_report | 61 ------------------ e2e_tests/src_data/gpbackup_history.db | Bin 36864 -> 0 bytes 4 files changed, 181 deletions(-) delete mode 100644 e2e_tests/src_data/gpbackup_20230724090000_report delete mode 100644 e2e_tests/src_data/gpbackup_20240505201504_report delete mode 100644 e2e_tests/src_data/gpbackup_20240506201504_report delete mode 100644 e2e_tests/src_data/gpbackup_history.db diff --git a/e2e_tests/src_data/gpbackup_20230724090000_report b/e2e_tests/src_data/gpbackup_20230724090000_report deleted file mode 100644 index d238141..0000000 --- a/e2e_tests/src_data/gpbackup_20230724090000_report +++ /dev/null @@ -1,59 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20230724090000 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --dbname demo --compression-type gzip --plugin-config /tmp/gpbackup_plugin_config.yml --metadata-only -compression: gzip -plugin executable: gpbackup_s3_plugin -backup section: Metadata Only -object filtering: None -includes statistics: No -data file format: No Data Files -incremental: False - -start time: Mon Jul 24 2023 09:00:00 -end time: Mon Jul 24 2023 09:05:17 -duration: 0:05:17 - -backup status: Success - -segment count: 8 - -count of database objects in backup: -aggregates 50 -casts 8 -collations 0 -constraints 100 -conversions 0 -default privileges 60 -database gucs 0 -event triggers 0 -extensions 10 -foreign data wrappers 0 -foreign servers 0 -functions 100 -indexes 5 -operator classes 1 -operator families 1 -operators 10 -procedural languages 1 -protocols 1 -resource groups 3 -resource queues 1 -roles 200 -rules 0 -schemas 70 -sequences 15 -tables 1000 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 60 -user mappings 0 -views 500 diff --git a/e2e_tests/src_data/gpbackup_20240505201504_report b/e2e_tests/src_data/gpbackup_20240505201504_report deleted file mode 100644 index 18c4cd3..0000000 --- a/e2e_tests/src_data/gpbackup_20240505201504_report +++ /dev/null @@ -1,61 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20240505201504 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --backup-dir /tmp/testWithPrefix --dbname demo -compression: gzip -plugin executable: None -backup section: All Sections -object filtering: None -includes statistics: No -data file format: Multiple Data Files Per Segment -incremental: False - -start time: Wed May 05 2024 20:15:04 -end time: Wed May 05 2024 20:15:19 -duration: 0:00:15 - -backup status: Success - -database size: 500 MB -segment count: 4 - -count of database objects in backup: -aggregates 0 -casts 0 -collations 0 -constraints 0 -conversions 0 -default privileges 0 -database gucs 0 -event triggers 0 -extensions 3 -foreign data wrappers 0 -foreign servers 1 -functions 0 -indexes 0 -operator classes 0 -operator families 0 -operators 0 -procedural languages 0 -protocols 1 -resource groups 2 -resource queues 1 -roles 50 -rules 0 -schemas 4 -sequences 0 -tables 100 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 0 -user mappings 1 -views 0 - diff --git a/e2e_tests/src_data/gpbackup_20240506201504_report b/e2e_tests/src_data/gpbackup_20240506201504_report deleted file mode 100644 index 390583a..0000000 --- a/e2e_tests/src_data/gpbackup_20240506201504_report +++ /dev/null @@ -1,61 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20240506201504 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --backup-dir /tmp/testNoPrefix --single-backup-dir --dbname demo -compression: gzip -plugin executable: None -backup section: All Sections -object filtering: None -includes statistics: No -data file format: Multiple Data Files Per Segment -incremental: False - -start time: Wed May 06 2024 20:15:04 -end time: Wed May 06 2024 20:15:19 -duration: 0:00:15 - -backup status: Success - -database size: 500 MB -segment count: 4 - -count of database objects in backup: -aggregates 0 -casts 0 -collations 0 -constraints 0 -conversions 0 -default privileges 0 -database gucs 0 -event triggers 0 -extensions 3 -foreign data wrappers 0 -foreign servers 1 -functions 0 -indexes 0 -operator classes 0 -operator families 0 -operators 0 -procedural languages 0 -protocols 1 -resource groups 2 -resource queues 1 -roles 50 -rules 0 -schemas 4 -sequences 0 -tables 100 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 0 -user mappings 1 -views 0 - diff --git a/e2e_tests/src_data/gpbackup_history.db b/e2e_tests/src_data/gpbackup_history.db deleted file mode 100644 index 7614686a652e3385ed54ff6694231dd4bf021132..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36864 zcmeI5U5p!76@cya?s~@d%+250-I_4d08QM4-I=jxe?Y28woV%*Y{O>xsi-t^*0YX< z|7wpRLGX~EN+3WWRfxW{Dm+xGD)C0eQ<0GPlaPQXgpdFMLVZF=AW?}I&K=*mo|&DQ zB&Ldz=uG0Q@44rmbM7~HuIHY~USIjdrM6?M`~6|J;i%Qrp;S7Z`k1PwQmGQWM0j~e z7T(Nz1^Aivy^niaO05iElgPU%VeyGn?vLb`x#Qwh;d|M?XB80Ofdr5M5wNpR4TUU3_p1V|6BNWtAIZ+gyb{EVw zx&w8${+V5M`&oE>^3tUtyU&z<7VD(X)Ce0`_B zy;Z-WUTy4sZaf(IYQcHs=2O{h>Ey}ut)~xkI?&yb5Zlqwd)UR9yRosDEuB7{zID>= zM!UDy88>Zfw0F(!Hb$XhHqez&$nd%TJ@3HRuBEf3ayk9d zWZbiBoc)*!1j~dFqUt;NToD}6>gAmaA3wYE9Gy?xNovyCYPN^2;+cRER{42*ID!!y zp|IEQ4q%!a*-iDr_O811OnvL3ddeql_taC`8NE^o8um26s^#1mOfI{iO0xl%O=DzR z{IZOxkB@F-x4L%EvG)4ooFiAgZa4*c z(<#BLxcI9S`7T+Je=0vO{Y(0`bh_}9!o~bw@;7q-&V4<1LVQy^C;VO*2-)nbSre4; zKmter2_OL^a32w96_+d0vbeOgEC`a|9->|nDuPfpG^47m8#QfHgF|02)>pLF7u$oT z-R-Zf7}b?(Yrt;PM^(sa(EAR3Wn)G6H8l10E91RAxKD2_g&m1P(2=QYx~`jd@5pUf zH9beiLXN~pN2W2GBSY6bM@R3((WbdMlOuqrdybCWfg@w1s?X+VqqgZeI(#RNHq99Z z1DvC(rg2AyZtqCfG;^jNxx?(R}*!qn523o_g# z7eO@1%acXPIX7RZKIJZ9)$+8uOx?0sKLB#wfN5^FSsxmL zGjG=4%Pv={!H*X~k_7hgQqxS$G&J4RYO79nuOZ8 zvZIYz9kGT}M|0ef7(Ldi+J-UH9R@Tsr(>;^KHw;HE2GbPD?=OB_@K_g{aaDCQbZ$L zD=~LCQxVzeVEF_3{2%_)KORT`2_OL^fCP{L z5`v5+(3-Ogkc|N{UFK4Ee?3WRIX6NE7jdD7^Qm5JPTMaT(wa$h#VC_*|QLarAf6X40&)&mp!kOYX3yX*hQ^N*&;pUB(f71-VH zv*aUWS$PUSkZwsS#w)mATA(Z? zDu9b)K?Qi|>~jGnr2Azb+`=RV*x@Zf0b)4hDe#@|TnoIUkf{P& za54o}Qsg)V_7Gi>rV8vyHM77Vs>w=dQSmaX$-wuJnk^-xJ zPl4?h$P`#ffn~qvf^7oGT(A=U{{I8_!Gr-L0VIF~kN^@u0!RP}AOR$R1dsp{xN8Fc E11t{X;{X5v From 0e5b775dcbab82a52c9e9bc66094d197b28596db Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 6 Sep 2025 00:19:50 +0300 Subject: [PATCH 06/23] Refactor e2e tests for backup-clean command. --- e2e_tests/scripts/run_backup-clean.sh | 58 ------------- .../scripts/run_tests/common_functions.sh | 6 +- .../scripts/run_tests/run_backup-clean.sh | 87 +++++++++++++++++++ 3 files changed, 91 insertions(+), 60 deletions(-) delete mode 100755 e2e_tests/scripts/run_backup-clean.sh create mode 100755 e2e_tests/scripts/run_tests/run_backup-clean.sh diff --git a/e2e_tests/scripts/run_backup-clean.sh b/e2e_tests/scripts/run_backup-clean.sh deleted file mode 100755 index cd103af..0000000 --- a/e2e_tests/scripts/run_backup-clean.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-clean" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" -TIMESTAMP="" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_incremental_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Delete all backups older than timestamp. -# Because other backup are incermental and we don't use the option --cascade, no backup will be deleted. -TEST_ID="1" - -TIMESTAMP="20230725101500" - -# Execute backup-delete commnad. - -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---before-timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted) - -TEST_CNT_SQL=2 - -# Check results. -# In sql db there is one predifined deleted backup - 20230725110310. -# So, it's ok that one deleted backup exists. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT_SQL}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite=${result_cnt_sqlite}, want=${TEST_CNT_SQL}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh index 8a15d63..4c40847 100644 --- a/e2e_tests/scripts/run_tests/common_functions.sh +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -1,7 +1,9 @@ #!/usr/bin/env bash -readonly BIN_DIR="/home/gpadmin/gpbackman" -readonly DATA_DIR="/data/master/gpseg-1" +BIN_DIR="/home/gpadmin/gpbackman" +DATA_DIR="/data/master/gpseg-1" + +TIMESTAMP_GREP_PATTERN='^[[:space:]][0-9]{14}' log_test_start() { local command="${1}" diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh new file mode 100755 index 0000000..08af103 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -0,0 +1,87 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# In the test, we consistently perform cleanup for backups created within the script prepare/prepare_gpdb_backups.sh +# Using the --cascade option. +# If the backup creation logic changes in the script, this test may start to fail and corrections also need to be made here. +# +# First, we delete all local backups older than the 9th timestamp from backup-info command, +# there should be 3 deleted backups. +# +# Then we delete all local backups younger than the 3th timestamp, +# there should be a total of 5 deleted backups. +# +# Then we delete all S3 backups younger than the 5th timestamp, +# there should be a total of 7 deleted backups. +# +# Then we delete all S3 backups older than the 5th timestamp, +# there should be a total of 12 deleted backups. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="backup-clean" + +run_command() { + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman backup-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { + echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; + } +} + +count_deleted_backups() { + get_backup_info "count_deleted" | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l +} + +get_cutoff_timestamp() { + local line_no="$1" + get_backup_info "get_line_${line_no}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' +} + + +# Test 1: Clean local backups older than timestamp (--before-timestamp) +# Without --cascade, no dependent backups +test_clean_local_backups_before_timestamp() { + local want=3 + local cutoff_timestamp=$(get_cutoff_timestamp 9) + run_command "clean_local_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 2: Clean local backups newer than timestamp (--after-timestamp) +# Without --cascade, no dependent backups +test_clean_local_backups_after_timestamp() { + local want=5 + local cutoff_timestamp=$(get_cutoff_timestamp 3) + run_command "clean_local_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 3: Clean S3 backups newer than timestamp (--after-timestamp) +# Without --cascade, no dependent backups +test_clean_s3_backups_after_timestamp() { + local want=7 + local cutoff_timestamp=$(get_cutoff_timestamp 5) + run_command "clean_s3_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 4: Clean S3 backups older than timestamp (--before-timestamp) +# With --cascade +test_clean_s3_backups_before_timestamp() { + local want=12 + local cutoff_timestamp=$(get_cutoff_timestamp 5) + run_command "clean_s3_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_clean_local_backups_before_timestamp +run_test "${COMMAND}" 2 test_clean_local_backups_after_timestamp +run_test "${COMMAND}" 3 test_clean_s3_backups_after_timestamp +run_test "${COMMAND}" 4 test_clean_s3_backups_before_timestamp + +log_all_tests_passed "${COMMAND}" From 6a860aeb8f446c1ae5fde6def3b390e2ff725d7f Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 6 Sep 2025 00:31:47 +0300 Subject: [PATCH 07/23] Add variable and function unification. --- .../scripts/run_tests/common_functions.sh | 4 ++++ .../scripts/run_tests/run_backup-clean.sh | 5 ----- .../scripts/run_tests/run_backup-delete.sh | 15 ++++++--------- e2e_tests/scripts/run_tests/run_backup-info.sh | 18 +++++++++--------- e2e_tests/scripts/run_tests/run_report-info.sh | 8 ++++---- 5 files changed, 23 insertions(+), 27 deletions(-) diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh index 4c40847..5912360 100644 --- a/e2e_tests/scripts/run_tests/common_functions.sh +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -29,6 +29,10 @@ get_backup_info() { } } +count_deleted_backups() { + get_backup_info "count_deleted" | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l +} + assert_equals() { local expected="${1}" local actual="${2}" diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh index 08af103..1816ef2 100755 --- a/e2e_tests/scripts/run_tests/run_backup-clean.sh +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -29,16 +29,11 @@ run_command() { } } -count_deleted_backups() { - get_backup_info "count_deleted" | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l -} - get_cutoff_timestamp() { local line_no="$1" get_backup_info "get_line_${line_no}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' } - # Test 1: Clean local backups older than timestamp (--before-timestamp) # Without --cascade, no dependent backups test_clean_local_backups_before_timestamp() { diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index e664418..be6588e 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -18,7 +18,7 @@ get_backup_info_for_timestamp(){ # Test 1: Delete local full backup test_delete_local_full() { - local timestamp=$(get_backup_info "get_local_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_local_full" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp" @@ -40,7 +40,7 @@ test_delete_local_full() { # Test 2: Delete S3 incremental backup test_delete_s3_incremental() { - local timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find S3 incremental backup" @@ -62,20 +62,17 @@ test_delete_s3_incremental() { # Test 3: Delete S3 full backup with cascade test_delete_s3_full_cascade() { - local timestamp=$(get_backup_info "get_s3_full" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | tail -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_s3_full" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | tail -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find S3 full backup" exit 1 fi - - run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade - - local deleted_count=$(get_backup_info "count_deleted" --deleted | grep -E '^[[:space:]][0-9]{14} ' | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l) - # Expected: 1 backup from test 1 + 1 from test 2 + 2 backups (incr + full) from this test = 4 total local want=4 - [ "${deleted_count}" -eq "${want}" ] || { echo "[ERROR] Expected ${want} backups to be deleted, but found ${deleted_count}"; exit 1; } + run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" } # Test 4: Try to delete non-existent backup (should fail) diff --git a/e2e_tests/scripts/run_tests/run_backup-info.sh b/e2e_tests/scripts/run_tests/run_backup-info.sh index dfb7497..1ea2f8d 100755 --- a/e2e_tests/scripts/run_tests/run_backup-info.sh +++ b/e2e_tests/scripts/run_tests/run_backup-info.sh @@ -14,15 +14,15 @@ run_command(){ # Test 1: Count all backups in history database test_count_all_backups() { local want=12 - local got=$(run_command total_backups | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got=$(run_command total_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 2: Count all full backups test_count_full_backups() { local want=7 - local got1=$(run_command total_full_backups | grep -E '^[[:space:]][0-9]{14} ' | grep full | wc -l) - local got2=$(run_command filter_full_backups --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got1=$(run_command total_full_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep full | wc -l) + local got2=$(run_command filter_full_backups --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals_both "${want}" "${got1}" "${got2}" } @@ -31,22 +31,22 @@ test_count_full_backups() { # from the output with the --type full flag test_count_incremental_backups() { local want=3 - local got1=$(run_command total_incremental_backups | grep -E '^[[:space:]][0-9]{14} ' | grep incremental | wc -l) - local got2=$(run_command filter_incremental_backups --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got1=$(run_command total_incremental_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep incremental | wc -l) + local got2=$(run_command filter_incremental_backups --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals_both "${want}" "${got1}" "${got2}" } # Test 4: Count backups that include table sch2.tbl_c test_count_include_table_backups() { local want=2 - local got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 5: Count backups that exclude table sch2.tbl_d test_count_exclude_table_backups() { local want=2 - local got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } @@ -54,14 +54,14 @@ test_count_exclude_table_backups() { # Use --type full to filter only full backups test_count_include_table_full_backups() { local want=1 - local got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 7: Count incremental backups that exclude table sch2.tbl_d test_count_exclude_table_incremental_backups() { local want=1 - local got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E '^[[:space:]][0-9]{14} ' | wc -l) + local got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index 9b03d13..52ec165 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -15,7 +15,7 @@ run_command(){ # Test 1: Get report info for full local backup (without backup-dir) test_report_full_local_no_dir() { - local timestamp=$(get_backup_info "get_full_local" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_local" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp" @@ -31,7 +31,7 @@ test_report_full_local_no_dir() { # Test 2: Get report info for full local backup (with backup-dir) test_report_full_local_with_dir() { - local timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp for backup-dir test" @@ -48,7 +48,7 @@ test_report_full_local_with_dir() { # Test 3: Get report info for full S3 backup (without plugin-report-file-path) test_report_s3_no_plugin_path() { - local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full s3 backup timestamp" @@ -64,7 +64,7 @@ test_report_s3_no_plugin_path() { # Test 4: Get report info for full S3 backup (with plugin-report-file-path) test_report_s3_with_plugin_path() { - local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E '^[[:space:]][0-9]{14} ' | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" From 9cfb4ade1800bf2686df8ef0269d85c1deb321b0 Mon Sep 17 00:00:00 2001 From: woblerr Date: Wed, 10 Sep 2025 11:27:32 +0300 Subject: [PATCH 08/23] Refactor e2e tests for history-clean command. --- .../scripts/run_tests/common_functions.sh | 6 ++ .../scripts/run_tests/run_history-clean.sh | 60 +++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100755 e2e_tests/scripts/run_tests/run_history-clean.sh diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh index 5912360..b85a978 100644 --- a/e2e_tests/scripts/run_tests/common_functions.sh +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -2,6 +2,7 @@ BIN_DIR="/home/gpadmin/gpbackman" DATA_DIR="/data/master/gpseg-1" +PLUGIN_CFG="/home/gpadmin/gpbackup_s3_plugin.yaml" TIMESTAMP_GREP_PATTERN='^[[:space:]][0-9]{14}' @@ -33,6 +34,11 @@ count_deleted_backups() { get_backup_info "count_deleted" | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l } +get_cutoff_timestamp() { + local line_no="$1" + get_backup_info "get_line_${line_no}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' +} + assert_equals() { local expected="${1}" local actual="${2}" diff --git a/e2e_tests/scripts/run_tests/run_history-clean.sh b/e2e_tests/scripts/run_tests/run_history-clean.sh new file mode 100755 index 0000000..febf0a8 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_history-clean.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# During the test, we consistently clean up the backups created within the script prepare/prepare_gpdb_backups.sh +# We clean up the history db from deleted backups and make sure that they are successfully deleted. +# It is checked that the number of deleted backups is 0. + +# If the backup logic in the script changes, this test may fail, and corrections will also need to be made here. + +# First, we delete all local backups older than the 9th timestamp using the backup-info command. + +# Then we delete all S3 backups older than the 2th timestamp using the backup-info command. + +# After each deletion we cleanup history db. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="history-clean" + +run_command(){ + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman history-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } +} + +run_backup_clean() { + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman backup-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { + echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; + } +} + +# Test 1: Clean from history db local backups older than timestamp (--before-timestamp) +test_history_clean_local_before_timestamp(){ + # Delete local backups + local cutoff_timestamp=$(get_cutoff_timestamp 9) + run_backup_clean "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + run_command "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local want=0 + # Count deleted backups + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 2: Clean from history db S3 backups older than timestamp (--before-timestamp) +test_history_clean_s3_before_timestamp(){ + # Delete S3 backups + local cutoff_timestamp=$(get_cutoff_timestamp 2) + run_backup_clean "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade + run_command "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local want=0 + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_history_clean_local_before_timestamp +run_test "${COMMAND}" 2 test_history_clean_s3_before_timestamp + +log_all_tests_passed "${COMMAND}" From 817792e1421cbeccd72dd47de3f1ad1c4fb86871 Mon Sep 17 00:00:00 2001 From: woblerr Date: Wed, 10 Sep 2025 11:28:04 +0300 Subject: [PATCH 09/23] Use common function and variable. --- e2e_tests/scripts/run_tests/run_backup-clean.sh | 9 ++------- e2e_tests/scripts/run_tests/run_backup-delete.sh | 8 ++++---- e2e_tests/scripts/run_tests/run_report-info.sh | 6 +++--- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh index 1816ef2..caa3341 100755 --- a/e2e_tests/scripts/run_tests/run_backup-clean.sh +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -29,11 +29,6 @@ run_command() { } } -get_cutoff_timestamp() { - local line_no="$1" - get_backup_info "get_line_${line_no}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' -} - # Test 1: Clean local backups older than timestamp (--before-timestamp) # Without --cascade, no dependent backups test_clean_local_backups_before_timestamp() { @@ -59,7 +54,7 @@ test_clean_local_backups_after_timestamp() { test_clean_s3_backups_after_timestamp() { local want=7 local cutoff_timestamp=$(get_cutoff_timestamp 5) - run_command "clean_s3_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml + run_command "clean_s3_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" local got=$(count_deleted_backups) assert_equals "${want}" "${got}" } @@ -69,7 +64,7 @@ test_clean_s3_backups_after_timestamp() { test_clean_s3_backups_before_timestamp() { local want=12 local cutoff_timestamp=$(get_cutoff_timestamp 5) - run_command "clean_s3_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + run_command "clean_s3_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade local got=$(count_deleted_backups) assert_equals "${want}" "${got}" } diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index be6588e..fb90b8b 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -46,9 +46,9 @@ test_delete_s3_incremental() { echo "[ERROR] Could not find S3 incremental backup" exit 1 fi - - run_command "delete_s3_incremental" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml - + + run_command "delete_s3_incremental" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" + local deleted_backup=$(get_backup_info_for_timestamp "${timestamp}") local date_deleted=$(echo "${deleted_backup}" | grep "${timestamp}" | awk -F'|' '{print $NF}' | xargs) @@ -70,7 +70,7 @@ test_delete_s3_full_cascade() { fi # Expected: 1 backup from test 1 + 1 from test 2 + 2 backups (incr + full) from this test = 4 total local want=4 - run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config /home/gpadmin/gpbackup_s3_plugin.yaml --cascade + run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade local got=$(count_deleted_backups) assert_equals "${want}" "${got}" } diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index 52ec165..c45bb7e 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -55,7 +55,7 @@ test_report_s3_no_plugin_path() { exit 1 fi - local report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config ~/gpbackup_s3_plugin.yaml) + local report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}") echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } @@ -72,8 +72,8 @@ test_report_s3_with_plugin_path() { fi local report_dir="/backup/test/backups/${timestamp:0:8}/${timestamp}" - local report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config ~/gpbackup_s3_plugin.yaml --plugin-report-file-path ${report_dir}) - + local report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" --plugin-report-file-path "${report_dir}") + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } echo "${report_output}" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } From 597ea2f456dce1d98a5cd92acf606cd5b2a9e771 Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:06:34 +0300 Subject: [PATCH 10/23] Refactor e2e tests for history-migrate command. --- e2e_tests/docker-compose.yml | 3 +- e2e_tests/scripts/run_history-clean.sh | 53 --------- e2e_tests/scripts/run_history-migrate.sh | 74 ------------ .../scripts/run_tests/common_functions.sh | 7 +- .../scripts/run_tests/run_history-migrate.sh | 111 ++++++++++++++++++ 5 files changed, 117 insertions(+), 131 deletions(-) delete mode 100755 e2e_tests/scripts/run_history-clean.sh delete mode 100755 e2e_tests/scripts/run_history-migrate.sh mode change 100644 => 100755 e2e_tests/scripts/run_tests/common_functions.sh create mode 100755 e2e_tests/scripts/run_tests/run_history-migrate.sh diff --git a/e2e_tests/docker-compose.yml b/e2e_tests/docker-compose.yml index 8badab3..aaa8a44 100644 --- a/e2e_tests/docker-compose.yml +++ b/e2e_tests/docker-compose.yml @@ -71,9 +71,10 @@ services: volumes: - ./conf/gpbackup_s3_plugin.yaml:/home/gpadmin/gpbackup_s3_plugin.yaml - ./scripts/prepare/gpdb_init:/docker-entrypoint-initdb.d - - gpbackman_bin:/home/gpadmin/gpbackman - ./scripts/prepare/prepare_gpdb_backups.sh:/home/gpadmin/prepare_gpdb_backups.sh - ./scripts/run_tests:/home/gpadmin/run_tests + - ./src_data:/home/gpadmin/src_data + - gpbackman_bin:/home/gpadmin/gpbackman networks: - e2e diff --git a/e2e_tests/scripts/run_history-clean.sh b/e2e_tests/scripts/run_history-clean.sh deleted file mode 100755 index cd55329..0000000 --- a/e2e_tests/scripts/run_history-clean.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="history-clean" - -SRC_DIR="/home/gpbackman/src_data" -WORK_DIR="/home/gpbackman/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_failure_plugin.yaml \ -${SRC_DIR}/gpbackup_history_incremental_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Delete backups from history database older than timestamp. -# There are no failed or deleted backups after command execution. -TEST_ID="1" - -TIMESTAMP="20231212101500" - -# Execute history-clean commnad. - -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---before-timestamp ${TIMESTAMP} \ - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted --failed) - -TEST_CNT=0 - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${TEST_CNT}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_history-migrate.sh b/e2e_tests/scripts/run_history-migrate.sh deleted file mode 100755 index 578af6c..0000000 --- a/e2e_tests/scripts/run_history-migrate.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="history-migrate" - -SRC_DIR="/home/gpbackman/src_data" -WORK_DIR="/home/gpbackman/test_data" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_dataonly_nodata_plugin.yaml \ -${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${WORK_DIR} - -# Execute history-migrate commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-file ${WORK_DIR}/gpbackup_history_dataonly_nodata_plugin.yaml \ ---history-file ${WORK_DIR}/gpbackup_history_metadata_plugin.yaml \ ---history-db ${WORK_DIR}/gpbackup_history.db - -################################################################ -# Test 1. -# Check that in source data there are files with .migrated type after migration. -# Format: -# source_file.megrated. -TEST_ID="1" - -REGEX_LIST='''gpbackup_history_dataonly_nodata_plugin.yaml.migrated -gpbackup_history_metadata_plugin.yaml.migrated -''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - if [ ! -f "${WORK_DIR}/${i}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nFile ${i} not found." - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Compare results of backup-info command before and after migration. -TEST_ID="2" - -TEST_CNT_SQL=2 - -# backup-info commnad for sqlite backup history format. -# This result from migrated data. -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted \ ---failed) - -DATE_REGEX="Success" - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f3 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT_SQL}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh old mode 100644 new mode 100755 index b85a978..45e3ab0 --- a/e2e_tests/scripts/run_tests/common_functions.sh +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -25,18 +25,19 @@ log_all_tests_passed() { get_backup_info() { local label="${1}"; shift - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { + echo "[INFO] Running backup-info: ${label}" + ${BIN_DIR}/gpbackman backup-info --deleted --failed "$@" || { echo "[ERROR] backup-info ${label} failed"; exit 1; } } count_deleted_backups() { - get_backup_info "count_deleted" | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l + get_backup_info "count_deleted" --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l } get_cutoff_timestamp() { local line_no="$1" - get_backup_info "get_line_${line_no}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' + get_backup_info "get_line_${line_no}" --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' } assert_equals() { diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh new file mode 100755 index 0000000..229bff0 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -0,0 +1,111 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# Tests for history-migrate: +# 1) Migrate gpbackup_history_full_local.yaml into an empty DB in /tmp and expect 2 backups. +# 2) Migrate gpbackup_history_full_local.yaml into existing DB prepared by setup and expect base+2. +# 3) Migrate all files from src_data into a fresh empty DB in /tmp and expect 14 backups. +# 4) Migrate all files from src_data into existing DB (excluding already migrated full_local) and expect base+12. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="history-migrate" +SRC_DIR="/home/gpadmin/src_data" +WORK_BASE="/tmp/history_migrate_tests" + +TEST_FILE_FULL_LOCAL="gpbackup_history_full_local.yaml" + +run_command(){ + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman history-migrate "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } +} + +prepare_workdir(){ + local name="${1}" + local dir="${WORK_BASE}/${name}" + rm -rf "${dir}" && mkdir -p "${dir}" + echo "${dir}" +} + +# Test 1: Single file into empty DB in /tmp +# Expect 2 backups from file +test_migrate_single_into_empty_db(){ + local workdir=$(prepare_workdir test1) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${workdir}/gpbackup_history.db" + run_command "single_into_empty_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=2 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 2: Single file into existing DB (prepared by setup) +# 12 backups from initial setup + 2 from file +test_migrate_single_into_existing_db(){ + local workdir=$(prepare_workdir test2) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + run_command "single_into_existing_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 3: Duplicate migration into the same DB must fail with UNIQUE constraint +test_migrate_duplicate_into_existing_db_fail(){ + local workdir=$(prepare_workdir test3) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + set +e + set -x + ${BIN_DIR}/gpbackman history-migrate --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + if [ $? -eq 0 ]; then + echo "[ERROR] Expected failure, but command succeeded" + exit 1 + fi + set +x + set -e +} + +# Test 4: All files into fresh empty DB in /tmp +# 14 backups from all files +test_migrate_all_into_empty_db(){ + local workdir=$(prepare_workdir test3) + cp "${SRC_DIR}"/*.yaml "${workdir}/" + local db="${workdir}/gpbackup_history.db" + local args=() + for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") + done + run_command "all_into_empty_db" "${args[@]}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 5: All files into existing DB +# 12 backups from initial setup + 12 from files +# The duplicates, already loaded in test2, should be skipped +test_migrate_all_into_existing_db(){ +local workdir=$(prepare_workdir test4) +cp "${SRC_DIR}"/*.yaml "${workdir}/" +rm -f "${workdir}/${TEST_FILE_FULL_LOCAL}" +local db="${DATA_DIR}/gpbackup_history.db" +local args=() +for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") +done +run_command "all_into_existing_db" "${args[@]}" --history-db "${db}" +local want=24 +local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) +assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_migrate_single_into_empty_db +run_test "${COMMAND}" 2 test_migrate_single_into_existing_db +run_test "${COMMAND}" 3 test_migrate_duplicate_into_existing_db_fail +run_test "${COMMAND}" 4 test_migrate_all_into_empty_db +run_test "${COMMAND}" 5 test_migrate_all_into_existing_db + +log_all_tests_passed "${COMMAND}" From 0d47e731dfbddbdc46dca5d1ca2c8b32c5d14922 Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:10:51 +0300 Subject: [PATCH 11/23] Fix e2e test for history-migrate. --- e2e_tests/scripts/run_tests/run_history-migrate.sh | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh index 229bff0..9af650f 100755 --- a/e2e_tests/scripts/run_tests/run_history-migrate.sh +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -1,11 +1,12 @@ #!/usr/bin/env bash set -Eeuo pipefail -# Tests for history-migrate: -# 1) Migrate gpbackup_history_full_local.yaml into an empty DB in /tmp and expect 2 backups. -# 2) Migrate gpbackup_history_full_local.yaml into existing DB prepared by setup and expect base+2. -# 3) Migrate all files from src_data into a fresh empty DB in /tmp and expect 14 backups. -# 4) Migrate all files from src_data into existing DB (excluding already migrated full_local) and expect base+12. +# Tests for history-migrate (current): +# 1) Migrate gpbackup_history_full_local.yaml into an empty DB in /tmp -> expect 2 backups. +# 2) Migrate the same file into an existing DB prepared by setup -> expect 12 base + 2 = 14 total. +# 3) Duplicate migration into the same DB -> must fail with UNIQUE constraint. +# 4) Migrate all YAML files into a fresh empty DB in /tmp -> expect 14 backups total. +# 5) Migrate all YAML files into an existing DB (excluding already migrated full_local) -> expect 12 base + 14 = 26 total; duplicates skipped. source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" @@ -97,7 +98,7 @@ for f in "${workdir}"/*.yaml; do args+=(--history-file "${f}") done run_command "all_into_existing_db" "${args[@]}" --history-db "${db}" -local want=24 +local want=26 local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } From a5cfda3c5a92bdd59844d8c6ba6192070c294b7a Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:14:53 +0300 Subject: [PATCH 12/23] Refactor tests. --- .../scripts/run_tests/run_backup-delete.sh | 16 ++++++------- .../scripts/run_tests/run_backup-info.sh | 24 +++++++------------ .../scripts/run_tests/run_history-migrate.sh | 4 ---- .../scripts/run_tests/run_report-info.sh | 8 +++---- 4 files changed, 20 insertions(+), 32 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index fb90b8b..11ca8fb 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -13,12 +13,12 @@ run_command(){ get_backup_info_for_timestamp(){ local timestamp="${1}" - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed | grep "${timestamp}" || echo "No info found for timestamp ${timestamp}" + get_backup_info "get_specific_backup" --history-db ${DATA_DIR}/gpbackup_history.db | grep "${timestamp}" || echo "No info found for timestamp ${timestamp}" } # Test 1: Delete local full backup test_delete_local_full() { - local timestamp=$(get_backup_info "get_local_full" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_local_full" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp" @@ -40,7 +40,7 @@ test_delete_local_full() { # Test 2: Delete S3 incremental backup test_delete_s3_incremental() { - local timestamp=$(get_backup_info "get_s3_incremental" --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_s3_incremental" --history-db ${DATA_DIR}/gpbackup_history.db --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find S3 incremental backup" @@ -62,7 +62,7 @@ test_delete_s3_incremental() { # Test 3: Delete S3 full backup with cascade test_delete_s3_full_cascade() { - local timestamp=$(get_backup_info "get_s3_full" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | tail -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_s3_full" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | tail -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find S3 full backup" @@ -80,12 +80,10 @@ test_delete_nonexistent_backup() { local fake_timestamp="19990101000000" echo "[INFO] Attempting to delete non-existent backup: ${fake_timestamp}" - - if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force 2>/dev/null; then - echo "[ERROR] Expected deletion of non-existent backup to fail, but it succeeded" + ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force + if [ $? -eq 0 ]; then + echo "[ERROR] Expected failure, but command succeeded" exit 1 - else - echo "[INFO] Deletion of non-existent backup correctly failed as expected" fi } diff --git a/e2e_tests/scripts/run_tests/run_backup-info.sh b/e2e_tests/scripts/run_tests/run_backup-info.sh index 1ea2f8d..a33ba0a 100755 --- a/e2e_tests/scripts/run_tests/run_backup-info.sh +++ b/e2e_tests/scripts/run_tests/run_backup-info.sh @@ -5,24 +5,18 @@ source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" COMMAND="backup-info" -run_command(){ - local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman backup-info --history-db ${DATA_DIR}/gpbackup_history.db --deleted --failed "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } -} - # Test 1: Count all backups in history database test_count_all_backups() { local want=12 - local got=$(run_command total_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got=$(get_backup_info total_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 2: Count all full backups test_count_full_backups() { local want=7 - local got1=$(run_command total_full_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep full | wc -l) - local got2=$(run_command filter_full_backups --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got1=$(get_backup_info total_full_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep full | wc -l) + local got2=$(get_backup_info filter_full_backups --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals_both "${want}" "${got1}" "${got2}" } @@ -31,22 +25,22 @@ test_count_full_backups() { # from the output with the --type full flag test_count_incremental_backups() { local want=3 - local got1=$(run_command total_incremental_backups | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep incremental | wc -l) - local got2=$(run_command filter_incremental_backups --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got1=$(get_backup_info total_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep incremental | wc -l) + local got2=$(get_backup_info filter_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals_both "${want}" "${got1}" "${got2}" } # Test 4: Count backups that include table sch2.tbl_c test_count_include_table_backups() { local want=2 - local got=$(run_command total_include_table_backups --table sch2.tbl_c | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got=$(get_backup_info total_include_table_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_c | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 5: Count backups that exclude table sch2.tbl_d test_count_exclude_table_backups() { local want=2 - local got=$(run_command total_exclude_table_backups --table sch2.tbl_d --exclude | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got=$(get_backup_info total_exclude_table_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_d --exclude | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } @@ -54,14 +48,14 @@ test_count_exclude_table_backups() { # Use --type full to filter only full backups test_count_include_table_full_backups() { local want=1 - local got=$(run_command total_include_table_full_backups --table sch2.tbl_c --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got=$(get_backup_info total_include_table_full_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_c --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } # Test 7: Count incremental backups that exclude table sch2.tbl_d test_count_exclude_table_incremental_backups() { local want=1 - local got=$(run_command total_exclude_table_incremental_backups --table sch2.tbl_d --exclude --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + local got=$(get_backup_info total_exclude_table_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_d --exclude --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) assert_equals "${want}" "${got}" } diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh index 9af650f..d13ee1d 100755 --- a/e2e_tests/scripts/run_tests/run_history-migrate.sh +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -58,15 +58,11 @@ test_migrate_duplicate_into_existing_db_fail(){ local workdir=$(prepare_workdir test3) cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" local db="${DATA_DIR}/gpbackup_history.db" - set +e - set -x ${BIN_DIR}/gpbackman history-migrate --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" if [ $? -eq 0 ]; then echo "[ERROR] Expected failure, but command succeeded" exit 1 fi - set +x - set -e } # Test 4: All files into fresh empty DB in /tmp diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index c45bb7e..7d48365 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -15,7 +15,7 @@ run_command(){ # Test 1: Get report info for full local backup (without backup-dir) test_report_full_local_no_dir() { - local timestamp=$(get_backup_info "get_full_local" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_local" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp" @@ -31,7 +31,7 @@ test_report_full_local_no_dir() { # Test 2: Get report info for full local backup (with backup-dir) test_report_full_local_with_dir() { - local timestamp=$(get_backup_info "get_full_local_with_dir" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_local_with_dir" --history-db ${DATA_DIR}/gpbackup_history.db --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp for backup-dir test" @@ -48,7 +48,7 @@ test_report_full_local_with_dir() { # Test 3: Get report info for full S3 backup (without plugin-report-file-path) test_report_s3_no_plugin_path() { - local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_s3" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full s3 backup timestamp" @@ -64,7 +64,7 @@ test_report_s3_no_plugin_path() { # Test 4: Get report info for full S3 backup (with plugin-report-file-path) test_report_s3_with_plugin_path() { - local timestamp=$(get_backup_info "get_full_s3" --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_s3" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" From 6b7f3cd7c489dcee6ce9a576bff3233a1aa1ab31 Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:19:52 +0300 Subject: [PATCH 13/23] Remove unused code. --- e2e_tests/scripts/run_tests/run_report-info.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index 7d48365..b26865d 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -4,8 +4,6 @@ set -Eeuo pipefail source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" COMMAND="report-info" -BACKUP_DIR_PREFIX="/tmp/testWithPrefix" -BACKUP_DIR_SINGLE="/tmp/testNoPrefix" run_command(){ local label="${1}"; shift From 26ad1e2df35f540d11ec9485f1683d8888b154e0 Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:30:26 +0300 Subject: [PATCH 14/23] Fix style and small bugs. --- .../scripts/run_tests/run_backup-clean.sh | 1 - .../scripts/run_tests/run_backup-delete.sh | 8 +- .../scripts/run_tests/run_history-migrate.sh | 105 +++++++++--------- .../scripts/run_tests/run_report-info.sh | 2 +- 4 files changed, 57 insertions(+), 59 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh index caa3341..22172de 100755 --- a/e2e_tests/scripts/run_tests/run_backup-clean.sh +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -2,7 +2,6 @@ set -Eeuo pipefail # In the test, we consistently perform cleanup for backups created within the script prepare/prepare_gpdb_backups.sh -# Using the --cascade option. # If the backup creation logic changes in the script, this test may start to fail and corrections also need to be made here. # # First, we delete all local backups older than the 9th timestamp from backup-info command, diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index 11ca8fb..d2869a7 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -63,7 +63,6 @@ test_delete_s3_incremental() { # Test 3: Delete S3 full backup with cascade test_delete_s3_full_cascade() { local timestamp=$(get_backup_info "get_s3_full" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | tail -1 | awk '{print $1}') - if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find S3 full backup" exit 1 @@ -78,12 +77,11 @@ test_delete_s3_full_cascade() { # Test 4: Try to delete non-existent backup (should fail) test_delete_nonexistent_backup() { local fake_timestamp="19990101000000" - - echo "[INFO] Attempting to delete non-existent backup: ${fake_timestamp}" - ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force - if [ $? -eq 0 ]; then + if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force; then echo "[ERROR] Expected failure, but command succeeded" exit 1 + else + echo "[INFO] Expected failure occurred" fi } diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh index d13ee1d..4446e73 100755 --- a/e2e_tests/scripts/run_tests/run_history-migrate.sh +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -17,86 +17,87 @@ WORK_BASE="/tmp/history_migrate_tests" TEST_FILE_FULL_LOCAL="gpbackup_history_full_local.yaml" run_command(){ - local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman history-migrate "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } + local label="${1}"; shift + echo "[INFO] Running ${COMMAND}: ${label}" + ${BIN_DIR}/gpbackman history-migrate "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } } prepare_workdir(){ - local name="${1}" - local dir="${WORK_BASE}/${name}" - rm -rf "${dir}" && mkdir -p "${dir}" - echo "${dir}" + local name="${1}" + local dir="${WORK_BASE}/${name}" + rm -rf "${dir}" && mkdir -p "${dir}" + echo "${dir}" } # Test 1: Single file into empty DB in /tmp # Expect 2 backups from file test_migrate_single_into_empty_db(){ - local workdir=$(prepare_workdir test1) - cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" - local db="${workdir}/gpbackup_history.db" - run_command "single_into_empty_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" - local want=2 - local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) - assert_equals "${want}" "${got}" + local workdir=$(prepare_workdir test1) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${workdir}/gpbackup_history.db" + run_command "single_into_empty_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=2 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" } # Test 2: Single file into existing DB (prepared by setup) # 12 backups from initial setup + 2 from file test_migrate_single_into_existing_db(){ - local workdir=$(prepare_workdir test2) - cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" - local db="${DATA_DIR}/gpbackup_history.db" - run_command "single_into_existing_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" - local want=14 - local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) - assert_equals "${want}" "${got}" + local workdir=$(prepare_workdir test2) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + run_command "single_into_existing_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" } # Test 3: Duplicate migration into the same DB must fail with UNIQUE constraint test_migrate_duplicate_into_existing_db_fail(){ - local workdir=$(prepare_workdir test3) - cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" - local db="${DATA_DIR}/gpbackup_history.db" - ${BIN_DIR}/gpbackman history-migrate --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" - if [ $? -eq 0 ]; then - echo "[ERROR] Expected failure, but command succeeded" - exit 1 - fi + local workdir=$(prepare_workdir test3) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + if ${BIN_DIR}/gpbackman history-migrate --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}"; then + echo "[ERROR] Expected failure, but command succeeded" + exit 1 + else + echo "[INFO] Expected failure occurred" + fi } # Test 4: All files into fresh empty DB in /tmp # 14 backups from all files test_migrate_all_into_empty_db(){ - local workdir=$(prepare_workdir test3) - cp "${SRC_DIR}"/*.yaml "${workdir}/" - local db="${workdir}/gpbackup_history.db" - local args=() - for f in "${workdir}"/*.yaml; do - args+=(--history-file "${f}") - done - run_command "all_into_empty_db" "${args[@]}" --history-db "${db}" - local want=14 - local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) - assert_equals "${want}" "${got}" + local workdir=$(prepare_workdir test3) + cp "${SRC_DIR}"/*.yaml "${workdir}/" + local db="${workdir}/gpbackup_history.db" + local args=() + for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") + done + run_command "all_into_empty_db" "${args[@]}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" } # Test 5: All files into existing DB # 12 backups from initial setup + 12 from files # The duplicates, already loaded in test2, should be skipped test_migrate_all_into_existing_db(){ -local workdir=$(prepare_workdir test4) -cp "${SRC_DIR}"/*.yaml "${workdir}/" -rm -f "${workdir}/${TEST_FILE_FULL_LOCAL}" -local db="${DATA_DIR}/gpbackup_history.db" -local args=() -for f in "${workdir}"/*.yaml; do - args+=(--history-file "${f}") -done -run_command "all_into_existing_db" "${args[@]}" --history-db "${db}" -local want=26 -local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) -assert_equals "${want}" "${got}" + local workdir=$(prepare_workdir test4) + cp "${SRC_DIR}"/*.yaml "${workdir}/" + rm -f "${workdir}/${TEST_FILE_FULL_LOCAL}" + local db="${DATA_DIR}/gpbackup_history.db" + local args=() + for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") + done + run_command "all_into_existing_db" "${args[@]}" --history-db "${db}" + local want=26 + local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" } run_test "${COMMAND}" 1 test_migrate_single_into_empty_db diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index b26865d..b368b6e 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -29,7 +29,7 @@ test_report_full_local_no_dir() { # Test 2: Get report info for full local backup (with backup-dir) test_report_full_local_with_dir() { - local timestamp=$(get_backup_info "get_full_local_with_dir" --history-db ${DATA_DIR}/gpbackup_history.db --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + local timestamp=$(get_backup_info "get_full_local_with_dir" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') if [ -z "${timestamp}" ]; then echo "[ERROR] Could not find full local backup timestamp for backup-dir test" From 04fae8dca700f32e169efe9c3e430053ba3e7ac5 Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 00:45:57 +0300 Subject: [PATCH 15/23] Use common function run_gpbackman. --- e2e_tests/scripts/run_tests/common_functions.sh | 13 +++++++++---- e2e_tests/scripts/run_tests/run_backup-clean.sh | 5 +---- e2e_tests/scripts/run_tests/run_backup-delete.sh | 3 +-- e2e_tests/scripts/run_tests/run_history-clean.sh | 10 +++------- e2e_tests/scripts/run_tests/run_history-migrate.sh | 3 +-- e2e_tests/scripts/run_tests/run_report-info.sh | 5 ++--- 6 files changed, 17 insertions(+), 22 deletions(-) diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh index 45e3ab0..7ee7bce 100755 --- a/e2e_tests/scripts/run_tests/common_functions.sh +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -22,13 +22,18 @@ log_all_tests_passed() { local command="${1}" echo "[INFO] ${command} all tests passed" } +run_gpbackman() { + local subcmd="${1}"; shift + local label="${1}"; shift + echo "[INFO] Running ${subcmd}: ${label}" + ${BIN_DIR}/gpbackman "${subcmd}" "$@" || { + echo "[ERROR] ${subcmd} ${label} failed"; exit 1; + } +} get_backup_info() { local label="${1}"; shift - echo "[INFO] Running backup-info: ${label}" - ${BIN_DIR}/gpbackman backup-info --deleted --failed "$@" || { - echo "[ERROR] backup-info ${label} failed"; exit 1; - } + run_gpbackman "backup-info" "${label}" --deleted --failed "$@" } count_deleted_backups() { diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh index 22172de..2f279e7 100755 --- a/e2e_tests/scripts/run_tests/run_backup-clean.sh +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -22,10 +22,7 @@ COMMAND="backup-clean" run_command() { local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman backup-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { - echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; - } + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" } # Test 1: Clean local backups older than timestamp (--before-timestamp) diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index d2869a7..7163da3 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -7,8 +7,7 @@ COMMAND="backup-delete" run_command(){ local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" } get_backup_info_for_timestamp(){ diff --git a/e2e_tests/scripts/run_tests/run_history-clean.sh b/e2e_tests/scripts/run_tests/run_history-clean.sh index febf0a8..d2f0154 100755 --- a/e2e_tests/scripts/run_tests/run_history-clean.sh +++ b/e2e_tests/scripts/run_tests/run_history-clean.sh @@ -18,17 +18,13 @@ source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" COMMAND="history-clean" run_command(){ - local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman history-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" } run_backup_clean() { local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman backup-clean --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { - echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; - } + run_gpbackman "backup-clean" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" } # Test 1: Clean from history db local backups older than timestamp (--before-timestamp) diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh index 4446e73..53b42ed 100755 --- a/e2e_tests/scripts/run_tests/run_history-migrate.sh +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -18,8 +18,7 @@ TEST_FILE_FULL_LOCAL="gpbackup_history_full_local.yaml" run_command(){ local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman history-migrate "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } + run_gpbackman "${COMMAND}" "${label}" "$@" } prepare_workdir(){ diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index b368b6e..ef9e213 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -6,9 +6,8 @@ source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" COMMAND="report-info" run_command(){ - local label="${1}"; shift - echo "[INFO] Running ${COMMAND}: ${label}" - ${BIN_DIR}/gpbackman report-info --history-db ${DATA_DIR}/gpbackup_history.db "$@" || { echo "[ERROR] ${COMMAND} ${label} failed"; exit 1; } + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" } # Test 1: Get report info for full local backup (without backup-dir) From 447bf958a57452ee91136c5bda53785c35cf485c Mon Sep 17 00:00:00 2001 From: woblerr Date: Thu, 11 Sep 2025 19:12:39 +0300 Subject: [PATCH 16/23] Run all e2e tests one by one. --- Makefile | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index fa8941f..38ada30 100755 --- a/Makefile +++ b/Makefile @@ -17,7 +17,6 @@ test: @echo "Run tests for $(APP_NAME)" TZ="Etc/UTC" go test -mod=vendor -timeout=60s -count 1 ./... -# Define function to create e2e test targets define define_e2e_test .PHONY: test-e2e_$(1) test-e2e_$(1): @@ -31,6 +30,14 @@ endef # Generate e2e test targets for all commands $(foreach cmd,$(E2E_COMMANDS),$(eval $(call define_e2e_test,$(cmd)))) +.PHONY: test-e2e +test-e2e: + @for cmd in $(E2E_COMMANDS); do \ + echo "Running : $$cmd"; \ + $(MAKE) test-e2e_$$cmd || { echo "$$cmd failed."; exit 1; }; \ + echo "$$cmd passed"; \ + done + .PHONY: test-e2e-down test-e2e-down: @echo "Stop old containers" From a482d08edae678fac0215fc5df3c15b4ede3dc9a Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 00:07:55 +0300 Subject: [PATCH 17/23] Add e2e tests execution on push to master. --- .github/workflows/build.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b147a59..31847be 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -54,9 +54,10 @@ jobs: - name: Available platforms run: echo ${{ steps.buildx.outputs.platforms }} - # - name: Run end-to-end tests - # run: | - # make test-e2e + - name: Run end-to-end tests + if: github.event_name == 'push' && github.ref == 'refs/heads/master' + run: | + make test-e2e - name: Build image and push master tag to ghcr.io and Docker Hub if: github.event_name == 'push' && github.ref == 'refs/heads/master' From 05b426e820ebd8ef8f32763d633fc728276d8772 Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 01:02:08 +0300 Subject: [PATCH 18/23] Run e2e tests only on pull requests to master. --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 31847be..18aade2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -55,7 +55,7 @@ jobs: run: echo ${{ steps.buildx.outputs.platforms }} - name: Run end-to-end tests - if: github.event_name == 'push' && github.ref == 'refs/heads/master' + if: github.event_name == 'pull_request' && github.base_ref == 'master' run: | make test-e2e From f63cae929eea00b41c594e82db7f9d8617dc7d38 Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 01:26:19 +0300 Subject: [PATCH 19/23] Add docs for e2e tests. --- e2e_tests/README.md | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 e2e_tests/README.md diff --git a/e2e_tests/README.md b/e2e_tests/README.md new file mode 100644 index 0000000..bdb04e2 --- /dev/null +++ b/e2e_tests/README.md @@ -0,0 +1,42 @@ +# End-to-end tests + +The following architecture is used to run the tests: + +* Separate containers for MinIO and nginx. Official images [minio/minio](https://hub.docker.com/r/minio/minio), [minio/mc](https://hub.docker.com/r/minio/mc) and [nginx](https://hub.docker.com/_/nginx) are used. It's necessary for S3 compatible storage for WAL archiving and backups. +- Separate container gpbackman-export: runs the gpbackman image and copies the binary to a shared Docker volume (gpbackman_bin) for use inside the Greenplum container. +* Separate container for Greenplum. The [docker-greenplum image](https://github.com/woblerr/docker-greenplum) is used to run a single-node Greenplum cluster. + +## Running tests + +Run all tests (sequentially for all commands): + +```bash +make test-e2e +``` + +Run tests for a single command: + +```bash +make test-e2e_backup-info +make test-e2e_report-info +make test-e2e_backup-delete +make test-e2e_backup-clean +make test-e2e_history-clean +make test-e2e_history-migrate +``` + +Manually run a specific test (example for `backup-info`): + +```bash +docker compose -f e2e_tests/docker-compose.yml up -d + +docker exec greenplum bash -c 'su - gpadmin -c "/home/gpadmin/run_tests/run_test.sh backup-info"' + +docker compose -f e2e_tests/docker-compose.yml down -v +``` + +If during manual execution the test fails, you should recreate containers. + +## Notes +- Tests are executed as `gpadmin` inside the Greenplum container. The runner waits for the cluster to become ready and then prepares the backup set before executing checks. +- Scripts exit with a non-zero code on failure. From 6c482fe096ff819a54449c907315a18769c4ae62 Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 01:43:31 +0300 Subject: [PATCH 20/23] Fix CI. Error like: repository does not exist or may require 'docker login'. --- .github/workflows/build.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 18aade2..04b3bb5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -56,7 +56,11 @@ jobs: - name: Run end-to-end tests if: github.event_name == 'pull_request' && github.base_ref == 'master' + env: + DOCKERHUB_USER: ${{ secrets.DOCKEHUB_USER }} + DOCKERHUB_PKG: ${{ secrets.DOCKEHUB_TOKEN }} run: | + echo ${DOCKERHUB_PKG} | docker login -u ${DOCKERHUB_USER} --password-stdin make test-e2e - name: Build image and push master tag to ghcr.io and Docker Hub From 598f26ebf2fa5c94d88f59f7268992a111fba1de Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 01:50:21 +0300 Subject: [PATCH 21/23] Fix CI with e2e tests. Update README for e2e tests. --- .github/workflows/build.yml | 5 +---- e2e_tests/README.md | 5 +++++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 04b3bb5..c468b6d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -56,11 +56,8 @@ jobs: - name: Run end-to-end tests if: github.event_name == 'pull_request' && github.base_ref == 'master' - env: - DOCKERHUB_USER: ${{ secrets.DOCKEHUB_USER }} - DOCKERHUB_PKG: ${{ secrets.DOCKEHUB_TOKEN }} run: | - echo ${DOCKERHUB_PKG} | docker login -u ${DOCKERHUB_USER} --password-stdin + make docker make test-e2e - name: Build image and push master tag to ghcr.io and Docker Hub diff --git a/e2e_tests/README.md b/e2e_tests/README.md index bdb04e2..b2c5dc6 100644 --- a/e2e_tests/README.md +++ b/e2e_tests/README.md @@ -8,6 +8,11 @@ The following architecture is used to run the tests: ## Running tests +Buld gpbackman image: +```bash +make docker +``` + Run all tests (sequentially for all commands): ```bash From b62a5bae86fe8519fb6eb2051514b1257f00572e Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 22:34:20 +0300 Subject: [PATCH 22/23] Fix comments and remove trailing whitespaces. --- e2e_tests/scripts/prepare/prepare_gpdb_backups.sh | 8 ++++---- e2e_tests/scripts/run_tests/run_backup-clean.sh | 2 +- e2e_tests/scripts/run_tests/run_backup-delete.sh | 2 +- e2e_tests/scripts/run_tests/run_report-info.sh | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh index f542f57..8710a74 100755 --- a/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh +++ b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh @@ -43,10 +43,10 @@ run_backup metadata_only_s3 "${COMMON_PLUGIN_FLAGS[@]}" --metadata-only # Full S3 no filters run_backup full_s3 "${COMMON_PLUGIN_FLAGS[@]}" --leaf-partition-data -# Full S3 include-table sch1.tbl_c +# Full S3 include-table sch2.tbl_c run_backup full_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --include-table sch2.tbl_c --leaf-partition-data -# Full S3 exclude-table sch1.tbl_d +# Full S3 exclude-table sch2.tbl_d run_backup full_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --exclude-table sch2.tbl_d --leaf-partition-data # Insert data @@ -56,13 +56,13 @@ psql -d demo -c "INSERT INTO sch2.tbl_d SELECT i, i FROM generate_series(1,10000 # Incremental S3 no filters run_backup incr_s3 "${COMMON_PLUGIN_FLAGS[@]}" --incremental --leaf-partition-data -# Incremental S3 include-table sch1.tbl_c +# Incremental S3 include-table sch2.tbl_c run_backup incr_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --include-table sch2.tbl_c --leaf-partition-data # Insert data psql -d demo -c "INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i;" -# Incremental S3 exclude-table sch1.tbl_d +# Incremental S3 exclude-table sch2.tbl_d run_backup incr_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --exclude-table sch2.tbl_d --leaf-partition-data # Data-only LOCAL no filters diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh index 2f279e7..4708775 100755 --- a/e2e_tests/scripts/run_tests/run_backup-clean.sh +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -66,7 +66,7 @@ test_clean_s3_backups_before_timestamp() { } run_test "${COMMAND}" 1 test_clean_local_backups_before_timestamp -run_test "${COMMAND}" 2 test_clean_local_backups_after_timestamp +run_test "${COMMAND}" 2 test_clean_local_backups_after_timestamp run_test "${COMMAND}" 3 test_clean_s3_backups_after_timestamp run_test "${COMMAND}" 4 test_clean_s3_backups_before_timestamp diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh index 7163da3..15cc0d4 100755 --- a/e2e_tests/scripts/run_tests/run_backup-delete.sh +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -85,7 +85,7 @@ test_delete_nonexistent_backup() { } run_test "${COMMAND}" 1 test_delete_local_full -run_test "${COMMAND}" 2 test_delete_s3_incremental +run_test "${COMMAND}" 2 test_delete_s3_incremental run_test "${COMMAND}" 3 test_delete_s3_full_cascade run_test "${COMMAND}" 4 test_delete_nonexistent_backup diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh index ef9e213..942a6e5 100755 --- a/e2e_tests/scripts/run_tests/run_report-info.sh +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -77,7 +77,7 @@ test_report_s3_with_plugin_path() { } run_test "${COMMAND}" 1 test_report_full_local_no_dir -run_test "${COMMAND}" 2 test_report_full_local_with_dir +run_test "${COMMAND}" 2 test_report_full_local_with_dir run_test "${COMMAND}" 3 test_report_s3_no_plugin_path run_test "${COMMAND}" 4 test_report_s3_with_plugin_path From 90111bf522f87469f895c1e90cdfd054f999969a Mon Sep 17 00:00:00 2001 From: woblerr Date: Sat, 13 Sep 2025 23:04:59 +0300 Subject: [PATCH 23/23] Fix folders in history-migrate tests. --- e2e_tests/scripts/run_tests/run_history-migrate.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh index 53b42ed..e23824c 100755 --- a/e2e_tests/scripts/run_tests/run_history-migrate.sh +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -68,7 +68,7 @@ test_migrate_duplicate_into_existing_db_fail(){ # Test 4: All files into fresh empty DB in /tmp # 14 backups from all files test_migrate_all_into_empty_db(){ - local workdir=$(prepare_workdir test3) + local workdir=$(prepare_workdir test4) cp "${SRC_DIR}"/*.yaml "${workdir}/" local db="${workdir}/gpbackup_history.db" local args=() @@ -85,7 +85,7 @@ test_migrate_all_into_empty_db(){ # 12 backups from initial setup + 12 from files # The duplicates, already loaded in test2, should be skipped test_migrate_all_into_existing_db(){ - local workdir=$(prepare_workdir test4) + local workdir=$(prepare_workdir test5) cp "${SRC_DIR}"/*.yaml "${workdir}/" rm -f "${workdir}/${TEST_FILE_FULL_LOCAL}" local db="${DATA_DIR}/gpbackup_history.db"