diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b147a59..c468b6d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -54,9 +54,11 @@ jobs: - name: Available platforms run: echo ${{ steps.buildx.outputs.platforms }} - # - name: Run end-to-end tests - # run: | - # make test-e2e + - name: Run end-to-end tests + if: github.event_name == 'pull_request' && github.base_ref == 'master' + run: | + make docker + make test-e2e - name: Build image and push master tag to ghcr.io and Docker Hub if: github.event_name == 'push' && github.ref == 'refs/heads/master' diff --git a/Makefile b/Makefile index c70c93c..38ada30 100755 --- a/Makefile +++ b/Makefile @@ -7,64 +7,36 @@ ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) MUSL_CROSS := $(shell brew list| grep musl-cross) UID := $(shell id -u) GID := $(shell id -g) +GPDB_CONTAINER_NAME := greenplum +GPDB_USER := gpadmin +# List of all e2e test commands +E2E_COMMANDS := backup-info report-info backup-delete backup-clean history-clean history-migrate .PHONY: test test: @echo "Run tests for $(APP_NAME)" TZ="Etc/UTC" go test -mod=vendor -timeout=60s -count 1 ./... -.PHONY: test-e2e -test-e2e: - @echo "Run end-to-end tests for $(APP_NAME)" - @make docker - @make test-e2e_backup-clean - @make test-e2e_backup-delete - @make test-e2e_backup-info - @make test-e2e_history-clean - @make test-e2e_history-migrate - @make test-e2e_report-info - -.PHONY: test-e2e_backup-info -test-e2e_backup-info: - @echo "Run end-to-end tests for $(APP_NAME) for backup-info command" - $(call down_docker_compose) - $(call run_docker_compose,backup-info) - $(call down_docker_compose) - -.PHONY: test-e2e_backup-delete -test-e2e_backup-delete: - @echo "Run end-to-end tests for $(APP_NAME) for backup-delete command" - $(call down_docker_compose) - $(call run_docker_compose,backup-delete) - $(call down_docker_compose) - -.PHONY: test-e2e_backup-clean -test-e2e_backup-clean: - @echo "Run end-to-end tests for $(APP_NAME) for backup-clean command" - $(call down_docker_compose) - $(call run_docker_compose,backup-clean) - $(call down_docker_compose) - -.PHONY: test-e2e_history-clean -test-e2e_history-clean: - @echo "Run end-to-end tests for $(APP_NAME) for history-clean command" - $(call down_docker_compose) - $(call run_docker_compose,history-clean) - $(call down_docker_compose) +define define_e2e_test +.PHONY: test-e2e_$(1) +test-e2e_$(1): + @echo "Run end-to-end tests for $(APP_NAME) for $(1) command" + $$(call down_docker_compose) + $$(call run_docker_compose) + $$(call run_e2e_tests,$(1)) + $$(call down_docker_compose) +endef -.PHONY: test-e2e_history-migrate -test-e2e_history-migrate: - @echo "Run end-to-end tests for $(APP_NAME) for history-migrate command" - $(call down_docker_compose) - $(call run_docker_compose,history-migrate) - $(call down_docker_compose) +# Generate e2e test targets for all commands +$(foreach cmd,$(E2E_COMMANDS),$(eval $(call define_e2e_test,$(cmd)))) -.PHONY: test-e2e_report-info -test-e2e_report-info: - @echo "Run end-to-end tests for $(APP_NAME) for report-info command" - $(call down_docker_compose) - $(call run_docker_compose,report-info) - $(call down_docker_compose) +.PHONY: test-e2e +test-e2e: + @for cmd in $(E2E_COMMANDS); do \ + echo "Running : $$cmd"; \ + $(MAKE) test-e2e_$$cmd || { echo "$$cmd failed."; exit 1; }; \ + echo "$$cmd passed"; \ + done .PHONY: test-e2e-down test-e2e-down: @@ -116,16 +88,15 @@ docker-alpine: @echo "Version $(BRANCH)-$(GIT_REV)" DOCKER_BUILDKIT=1 docker build --pull -f Dockerfile.alpine --build-arg REPO_BUILD_TAG=$(BRANCH)-$(GIT_REV) -t $(APP_NAME)-alpine . -define e2e_command - @echo "Run end-to-end tests for $(APP_NAME) for ${1} command" - docker run --rm -v $(ROOT_DIR)/e2e_tests/:/home/gpbackman/e2e_tests --name="$(APP_NAME)" "$(APP_NAME)" /home/gpbackman/e2e_tests/run_e2e_${1}.sh -endef define run_docker_compose - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml build --force-rm --parallel ${1} - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml run --rm --name ${1} ${1} + docker compose -f e2e_tests/docker-compose.yml up -d endef define down_docker_compose - GPBACKMAN_UID=$(UID) GPBACKMAN_GID=$(GID) docker compose -f e2e_tests/docker-compose.yml down -v + docker compose -f e2e_tests/docker-compose.yml down -v +endef + +define run_e2e_tests + docker exec "$(GPDB_CONTAINER_NAME)" su - ${GPDB_USER} -c "/home/$(GPDB_USER)/run_tests/run_test.sh $(1)" endef \ No newline at end of file diff --git a/e2e_tests/.env b/e2e_tests/.env index 01560fa..7bede92 100644 --- a/e2e_tests/.env +++ b/e2e_tests/.env @@ -1,9 +1,7 @@ IMAGE_GPBACKMAN=gpbackman -IMAGE_TAG_MINIO=RELEASE.2023-09-07T02-05-02Z -IMAGE_TAG_MINIO_MC=RELEASE.2023-09-07T22-48-55Z -# Don't upgade s3 plugin version until https://github.com/greenplum-db/gpbackup-s3-plugin/issues/61 -# will be fixed. -S3_PLUGIN_VERSION=1.10.0 + +IMAGE_TAG_MINIO=RELEASE.2025-04-22T22-12-26Z +IMAGE_TAG_MINIO_MC=RELEASE.2025-04-16T18-13-26Z MINIO_ROOT_USER=minio MINIO_ROOT_PASSWORD=minioBackup MINIO_SITE_REGION=us-west-1 @@ -12,3 +10,6 @@ S3_MINIO_BUCKET=backup S3_MINIO_HOSTNAME=myminio S3_MINIO_KEY=demo S3_MINIO_KEY_SECRET=demoBackup + +IMAGE_TAG_GREENPLUM=6.27.1 +GREENPLUM_PASSWORD=gparray diff --git a/e2e_tests/README.md b/e2e_tests/README.md new file mode 100644 index 0000000..b2c5dc6 --- /dev/null +++ b/e2e_tests/README.md @@ -0,0 +1,47 @@ +# End-to-end tests + +The following architecture is used to run the tests: + +* Separate containers for MinIO and nginx. Official images [minio/minio](https://hub.docker.com/r/minio/minio), [minio/mc](https://hub.docker.com/r/minio/mc) and [nginx](https://hub.docker.com/_/nginx) are used. It's necessary for S3 compatible storage for WAL archiving and backups. +- Separate container gpbackman-export: runs the gpbackman image and copies the binary to a shared Docker volume (gpbackman_bin) for use inside the Greenplum container. +* Separate container for Greenplum. The [docker-greenplum image](https://github.com/woblerr/docker-greenplum) is used to run a single-node Greenplum cluster. + +## Running tests + +Buld gpbackman image: +```bash +make docker +``` + +Run all tests (sequentially for all commands): + +```bash +make test-e2e +``` + +Run tests for a single command: + +```bash +make test-e2e_backup-info +make test-e2e_report-info +make test-e2e_backup-delete +make test-e2e_backup-clean +make test-e2e_history-clean +make test-e2e_history-migrate +``` + +Manually run a specific test (example for `backup-info`): + +```bash +docker compose -f e2e_tests/docker-compose.yml up -d + +docker exec greenplum bash -c 'su - gpadmin -c "/home/gpadmin/run_tests/run_test.sh backup-info"' + +docker compose -f e2e_tests/docker-compose.yml down -v +``` + +If during manual execution the test fails, you should recreate containers. + +## Notes +- Tests are executed as `gpadmin` inside the Greenplum container. The runner waits for the cluster to become ready and then prepares the backup set before executing checks. +- Scripts exit with a non-zero code on failure. diff --git a/e2e_tests/conf/Dockerfile.s3_plugin b/e2e_tests/conf/Dockerfile.s3_plugin deleted file mode 100644 index 84bd009..0000000 --- a/e2e_tests/conf/Dockerfile.s3_plugin +++ /dev/null @@ -1,21 +0,0 @@ -ARG S3_PLUGIN_VERSION="1.10.1" - -# Starting from 25.05.2024, all Greenplum repositories (including gpbackup-s3-plugin) were transferred -# to the archive on GitHub. At the same time, all tags have been deleted from the archives. -# The fork containing the necessary tags is used for testing. - -FROM golang:1.24-bookworm AS s3_plugin-builder -ARG S3_PLUGIN_VERSION -RUN apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential bash perl wget ca-certificates \ - # && wget https://github.com/greenplum-db/gpbackup-s3-plugin/archive/refs/tags/${S3_PLUGIN_VERSION}.tar.gz -O /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz \ - && wget https://github.com/woblerr/gpbackup-s3-plugin/archive/refs/tags/${S3_PLUGIN_VERSION}.tar.gz -O /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz \ - && mkdir -p /tmp/gpbackup-s3-plugin \ - && tar -xzf /tmp/gpbackup-s3-plugin-${S3_PLUGIN_VERSION}.tar.gz --strip-components=1 -C /tmp/gpbackup-s3-plugin \ - && cd /tmp/gpbackup-s3-plugin \ - && make build \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -FROM gpbackman AS gpbackman-plugins -COPY --from=s3_plugin-builder /go/bin/gpbackup_s3_plugin /home/gpbackman/gpbackup_s3_plugin diff --git a/e2e_tests/conf/gpbackup_s3_plugin.yaml b/e2e_tests/conf/gpbackup_s3_plugin.yaml index 10902e9..7a7bf0a 100644 --- a/e2e_tests/conf/gpbackup_s3_plugin.yaml +++ b/e2e_tests/conf/gpbackup_s3_plugin.yaml @@ -1,5 +1,5 @@ --- -executablepath: /home/gpbackman/gpbackup_s3_plugin +executablepath: /usr/local/greenplum-db/bin/gpbackup_s3_plugin options: region: us-west-1 endpoint: minio:9000 diff --git a/e2e_tests/docker-compose.yml b/e2e_tests/docker-compose.yml index 76faee6..aaa8a44 100644 --- a/e2e_tests/docker-compose.yml +++ b/e2e_tests/docker-compose.yml @@ -37,123 +37,49 @@ services: minio: condition: service_healthy volumes: - - "./scripts/prepare_minio.sh:/prepare_minio.sh" - - "./src_data:/tmp/src_data" + - "./scripts/prepare/prepare_minio.sh:/prepare_minio.sh" entrypoint: /prepare_minio.sh networks: - e2e -################################################################ - # Test backup-info command. - backup-info: - image: ${IMAGE_GPBACKMAN} - container_name: backup-info - hostname: backup-info - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-info.sh:/home/gpbackman/run_backup-info.sh" - command: /home/gpbackman/run_backup-info.sh - networks: - - e2e - - ################################################################ - # Test backup-delete command. - backup-delete: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: backup-delete - container_name: backup-delete - hostname: backup-delete - depends_on: - minio: - condition: service_started - prepare_minio: - condition: service_completed_successfully - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-delete.sh:/home/gpbackman/run_backup-delete.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - - "./conf/gpbackup_s3_plugin_invalid.yaml:/home/gpbackman/gpbackup_s3_plugin_invalid.yaml" - command: /home/gpbackman/run_backup-delete.sh - networks: - - e2e - - ################################################################ - # Test history-clean command. - history-clean: - image: ${IMAGE_GPBACKMAN} - container_name: history-clean - hostname: history-clean - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_history-clean.sh:/home/gpbackman/run_history-clean.sh" - command: /home/gpbackman/run_history-clean.sh - networks: - - e2e - ################################################################ - # Test history-migrate command. - history-migrate: + # Export gpbackman binary to shared volume. + gpbackman-export: image: ${IMAGE_GPBACKMAN} - container_name: history-migrate - hostname: history-migrate + container_name: gpbackman-export + hostname: gpbackman-export volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_history-migrate.sh:/home/gpbackman/run_history-migrate.sh" - command: /home/gpbackman/run_history-migrate.sh + - gpbackman_bin:/export + entrypoint: ["/bin/sh","-c","cp /usr/bin/gpbackman /export/gpbackman && chmod 755 /export/gpbackman && sleep infinity"] networks: - e2e ################################################################ - # Test report-info command. - report-info: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: report-info - container_name: report-info - hostname: report-info + greenplum: + image: woblerr/greenplum:${IMAGE_TAG_GREENPLUM} + container_name: greenplum + hostname: greenplum depends_on: minio: - condition: service_started + condition: service_healthy prepare_minio: condition: service_completed_successfully - volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_report-info.sh:/home/gpbackman/run_report-info.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - command: /home/gpbackman/run_report-info.sh - networks: - - e2e - - ################################################################ - # Test backup-clean command. - backup-clean: - build: - context: . - dockerfile: ./conf/Dockerfile.s3_plugin - args: - S3_PLUGIN_VERSION: ${S3_PLUGIN_VERSION} - image: backup-clean - container_name: backup-clean - hostname: backup-clean - depends_on: - minio: + gpbackman-export: condition: service_started - prepare_minio: - condition: service_completed_successfully + environment: + - "GREENPLUM_PASSWORD" volumes: - - "./src_data:/home/gpbackman/src_data" - - "./scripts/run_backup-clean.sh:/home/gpbackman/run_backup-clean.sh" - - "./conf/gpbackup_s3_plugin.yaml:/home/gpbackman/gpbackup_s3_plugin.yaml" - command: /home/gpbackman/run_backup-clean.sh + - ./conf/gpbackup_s3_plugin.yaml:/home/gpadmin/gpbackup_s3_plugin.yaml + - ./scripts/prepare/gpdb_init:/docker-entrypoint-initdb.d + - ./scripts/prepare/prepare_gpdb_backups.sh:/home/gpadmin/prepare_gpdb_backups.sh + - ./scripts/run_tests:/home/gpadmin/run_tests + - ./src_data:/home/gpadmin/src_data + - gpbackman_bin:/home/gpadmin/gpbackman networks: - e2e networks: e2e: + +volumes: + gpbackman_bin: diff --git a/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql b/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql new file mode 100644 index 0000000..f01adc8 --- /dev/null +++ b/e2e_tests/scripts/prepare/gpdb_init/tables_init.sql @@ -0,0 +1,16 @@ +CREATE SCHEMA IF NOT EXISTS sch1; +CREATE SCHEMA IF NOT EXISTS sch2; + +DROP TABLE IF EXISTS sch1.tbl_a; +DROP TABLE IF EXISTS sch1.tbl_b; +DROP TABLE IF EXISTS sch2.tbl_c; +DROP TABLE IF EXISTS sch2.tbl_d; + +CREATE TABLE sch1.tbl_a AS SELECT i FROM generate_series(1,100000) AS i; +CREATE TABLE sch1.tbl_b AS SELECT i FROM generate_series(1,100000) AS i; + +CREATE TABLE sch2.tbl_c (a int, b int) WITH (appendoptimized = true) DISTRIBUTED BY (a); +INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i; + +CREATE TABLE sch2.tbl_d (a int, b int) WITH (appendoptimized = true, orientation = column) DISTRIBUTED BY (a); +INSERT INTO sch2.tbl_d SELECT i, i FROM generate_series(1,100000) i; diff --git a/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh new file mode 100755 index 0000000..8710a74 --- /dev/null +++ b/e2e_tests/scripts/prepare/prepare_gpdb_backups.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# Backup sequence overview: +# 1. full_local : Full LOCAL backup (all tables) +# 2. full_local_include_table : Full LOCAL backup including only sch1.tbl_a +# 3. full_local_exclude_table : Full LOCAL backup excluding sch1.tbl_b +# 4. metadata_only_s3 : Metadata-only S3 backup (no data) +# 5. full_s3 : Full S3 backup (all tables, leaf partition data) +# 6. full_s3_include_table : Full S3 backup including only sch2.tbl_c +# 7. full_s3_exclude_table : Full S3 backup excluding sch2.tbl_d +# 8. (data change) : Insert into sch2.tbl_c and sch2.tbl_d +# 9. incr_s3 : Incremental S3 backup +# 10. incr_s3_include_table : Incremental S3 backup including only sch2.tbl_c +# 11. (data change) : Insert more rows into sch2.tbl_c +# 12. incr_s3_exclude_table : Incremental S3 backup excluding sch2.tbl_d +# 13. data_only_local : Data-only LOCAL backup (no metadata) +# 14. full_local : Final full LOCAL backup (all tables) + +DB_NAME="demo" +PLUGIN_CFG=/home/gpadmin/gpbackup_s3_plugin.yaml +COMMON_PLUGIN_FLAGS=(--plugin-config "$PLUGIN_CFG") + +run_backup(){ + local label="$1"; shift + echo "[INFO] Running backup: $label" + gpbackup --dbname ${DB_NAME} "$@" || { echo "[ERROR] Backup $label failed"; exit 1; } + sleep 10 +} + +# Full LOCAL no filters +run_backup full_local + +# Full LOCAL include-table sch1.tbl_a +run_backup full_local_include_table --include-table sch1.tbl_a + +# Full LOCAL exclude-table sch1.tbl_b +run_backup full_local_exclude_table --exclude-table sch1.tbl_b + +# Metadata-only s3 +run_backup metadata_only_s3 "${COMMON_PLUGIN_FLAGS[@]}" --metadata-only + +# Full S3 no filters +run_backup full_s3 "${COMMON_PLUGIN_FLAGS[@]}" --leaf-partition-data + +# Full S3 include-table sch2.tbl_c +run_backup full_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --include-table sch2.tbl_c --leaf-partition-data + +# Full S3 exclude-table sch2.tbl_d +run_backup full_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --exclude-table sch2.tbl_d --leaf-partition-data + +# Insert data +psql -d demo -c "INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i;" +psql -d demo -c "INSERT INTO sch2.tbl_d SELECT i, i FROM generate_series(1,100000) AS i;" + +# Incremental S3 no filters +run_backup incr_s3 "${COMMON_PLUGIN_FLAGS[@]}" --incremental --leaf-partition-data + +# Incremental S3 include-table sch2.tbl_c +run_backup incr_s3_include_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --include-table sch2.tbl_c --leaf-partition-data + +# Insert data +psql -d demo -c "INSERT INTO sch2.tbl_c SELECT i, i FROM generate_series(1,100000) i;" + +# Incremental S3 exclude-table sch2.tbl_d +run_backup incr_s3_exclude_table "${COMMON_PLUGIN_FLAGS[@]}" --incremental --exclude-table sch2.tbl_d --leaf-partition-data + +# Data-only LOCAL no filters +run_backup data_only_local --data-only + +# Full LOCAL no filters +run_backup full_local + +echo "[INFO] Backups prepared successfully" +exit 0 diff --git a/e2e_tests/scripts/prepare/prepare_minio.sh b/e2e_tests/scripts/prepare/prepare_minio.sh new file mode 100755 index 0000000..f5de392 --- /dev/null +++ b/e2e_tests/scripts/prepare/prepare_minio.sh @@ -0,0 +1,7 @@ +#!/bin/sh +set -eu + +mc config host add ${S3_MINIO_HOSTNAME} http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD}; +mc mb ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}; +mc admin user add ${S3_MINIO_HOSTNAME} ${S3_MINIO_KEY} ${S3_MINIO_KEY_SECRET}; +mc admin policy attach ${S3_MINIO_HOSTNAME} readwrite --user ${S3_MINIO_KEY} diff --git a/e2e_tests/scripts/prepare_minio.sh b/e2e_tests/scripts/prepare_minio.sh deleted file mode 100755 index 705bf45..0000000 --- a/e2e_tests/scripts/prepare_minio.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -set -e - -mc config host add ${S3_MINIO_HOSTNAME} http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD}; -mc mb ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}; -mc admin user add ${S3_MINIO_HOSTNAME} ${S3_MINIO_KEY} ${S3_MINIO_KEY_SECRET}; -mc admin policy attach ${S3_MINIO_HOSTNAME} readwrite --user ${S3_MINIO_KEY} - -TIMESTAMP="20230724090000" -touch /tmp/test.txt -mc cp /tmp/test.txt ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${TIMESTAMP:0:8}/${TIMESTAMP}/test.txt -mc cp /tmp/src_data/gpbackup_${TIMESTAMP}_report ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${TIMESTAMP:0:8}/${TIMESTAMP}/gpbackup_${TIMESTAMP}_report - -TIMESTAMPS="20230721090000 20230722100000 20230723082000 20230725101115 20230725101152 20230725101959 20230725102831 20230725102950 20230725110051" -for i in ${TIMESTAMPS}; do - mc cp /tmp/test.txt ${S3_MINIO_HOSTNAME}/${S3_MINIO_BUCKET}/test/backups/${i:0:8}/${i}/test.txt -done diff --git a/e2e_tests/scripts/run_backup-clean.sh b/e2e_tests/scripts/run_backup-clean.sh deleted file mode 100755 index cd103af..0000000 --- a/e2e_tests/scripts/run_backup-clean.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-clean" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" -TIMESTAMP="" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_incremental_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Delete all backups older than timestamp. -# Because other backup are incermental and we don't use the option --cascade, no backup will be deleted. -TEST_ID="1" - -TIMESTAMP="20230725101500" - -# Execute backup-delete commnad. - -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---before-timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted) - -TEST_CNT_SQL=2 - -# Check results. -# In sql db there is one predifined deleted backup - 20230725110310. -# So, it's ok that one deleted backup exists. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT_SQL}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite=${result_cnt_sqlite}, want=${TEST_CNT_SQL}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_backup-delete.sh b/e2e_tests/scripts/run_backup-delete.sh deleted file mode 100755 index 58ab663..0000000 --- a/e2e_tests/scripts/run_backup-delete.sh +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-delete" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" -TIMESTAMP="" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# All ther calls are executed for the same timestamp. -# At the first call, the backup is deleted from the s3. -# The yaml history file format is used. -# History file yaml format is used, there are a real s3 call and a real backup deletion. - -# At second call, there are a real s3 call and no real backup deletion. -# The sqlite history file format is used. -# Because this backup was deleted in first call, there are no files in the s3. -# But the info about deletion attempt is written to log file and DATE DELETED is updated in history file. -TEST_ID="1" - -TIMESTAMP="20230724090000" - -# Execute backup-delete commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml \ ---force \ ---ignore-errors - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted | grep -w ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_date_deleted=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX}) -if [ $? != 0 ]; then - echo -e "[ERROR] r${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite:\n${bckp_date_deleted}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Test cascade delete option -TEST_ID="2" - -TIMESTAMP="20230725101959" -# After successful delete, in history there should be 5 backup with dete deleted info. -# 2 from source + 1 from test 1 + 3 from this test. -TEST_CNT=6 - -# Execute backup-delete commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml \ ---cascade \ ---force \ ---ignore-errors - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted) - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_sqlite=${result_cnt_sqlite}, want=${TEST_CNT}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_backup-info.sh b/e2e_tests/scripts/run_backup-info.sh deleted file mode 100755 index cee3a18..0000000 --- a/e2e_tests/scripts/run_backup-info.sh +++ /dev/null @@ -1,263 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="backup-info" - -SRC_DIR="/home/gpbackman/src_data" - -# backup-info commnad for sqlite backup history format. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---deleted \ ---failed) - -IFS=$'\n' -################################################################ -# Test 1. -# Simple test to check the number of provided backups. -# Format: -# status | type | object filtering| plugin | date deleted | repetitions. -# For backup without plugin info - blank line, so them skips in this test. -TEST_ID="1" - -REGEX_LIST='''Success|data-only|gpbackup_s3_plugin|1 -Success|metadata-only|gpbackup_s3_plugin|2 -Success|full|gpbackup_s3_plugin|4 -Failure|full|gpbackup_s3_plugin|3 -Success|incremental|gpbackup_s3_plugin|10''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - bckp_plugin=$(echo "${i}" | cut -f3 -d'|') - cnt=$(echo "${i}" | cut -f4 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | grep -w "${bckp_plugin}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Simple test to check full info about backups. -# Format: -# timestamp| date | status | database| type| plugin | duration | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering, plugin info and non-empty dete deleted fields. -TEST_ID="2" - -REGEX_LIST='''20230806230400|Sun Aug 06 2023 23:04:00|Failure|demo|full|gpbackup_s3_plugin|00:00:38|1 -20230725102950|Tue Jul 25 2023 10:29:50|Success|demo|incremental|gpbackup_s3_plugin|00:00:19|1 -20230725110051|Tue Jul 25 2023 11:00:51|Success|demo|incremental|gpbackup_s3_plugin|00:00:20|1 -20230725102831|Tue Jul 25 2023 10:28:31|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|1 -20230725101959|Tue Jul 25 2023 10:19:59|Success|demo|incremental|gpbackup_s3_plugin|00:00:22|1 -20230725101152|Tue Jul 25 2023 10:11:52|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|1 -20230725101115|Tue Jul 25 2023 10:11:15|Success|demo|full|gpbackup_s3_plugin|00:00:20|1 -20230724090000|Mon Jul 24 2023 09:00:00|Success|demo|metadata-only|gpbackup_s3_plugin|00:05:17|1 -20230723082000|Sun Jul 23 2023 08:20:00|Success|demo|data-only|gpbackup_s3_plugin|00:35:17|1 -20230722100000|Sat Jul 22 2023 10:00:00|Success|demo|full|gpbackup_s3_plugin|00:25:17|1 -20230721090000|Fri Jul 21 2023 09:00:00|Success|demo|metadata-only|gpbackup_s3_plugin|00:04:17|1''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_plugin=$(echo "${i}" | cut -f6 -d'|') - bckp_duration=$(echo "${i}" | cut -f7 -d'|') - cnt=$(echo "${i}" | cut -f8 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_plugin}" | \ - grep -w "${bckp_duration}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 3. -# Simple test to check full info about backups with deleted field. -# Format: -# timestamp| date | status | database| type | plugin | duration | date deleted | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering field. -TEST_ID="3" - -REGEX_LIST="20230725110310|Tue Jul 25 2023 11:03:10|Success|demo|incremental|gpbackup_s3_plugin|00:00:18|Wed Jul 26 2023 11:03:28|1" - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_plugin=$(echo "${i}" | cut -f6 -d'|') - bckp_duration=$(echo "${i}" | cut -f7 -d'|') - bckp_date_deleted=$(echo "${i}" | cut -f8 -d'|') - cnt=$(echo "${i}" | cut -f9 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_plugin}" | \ - grep -w "${bckp_duration}" | \ - grep -w "${bckp_date_deleted}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 4. -# Simple test to check full info about local backups. -# Format: -# timestamp| date | status | database| type| duration | repetitions. -# The match of all fields in the backup information is checked. -# Don't test backup with empty object filtering and date deleted fields. -# For local backups plugin field is empty. -TEST_ID="4" - -REGEX_LIST='''20240505201504|Sun May 05 2024 20:15:04|Success|demo|full|00:00:15|1 -20240506201504|Mon May 06 2024 20:15:04|Success|demo|full|00:00:15|1''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_timestamp=$(echo "${i}" | cut -f1 -d'|') - bckp_date=$(echo "${i}" | cut -f2 -d'|') - bckp_status=$(echo "${i}" | cut -f3 -d'|') - bckp_database=$(echo "${i}" | cut -f4 -d'|') - bckp_type=$(echo "${i}" | cut -f5 -d'|') - bckp_duration=$(echo "${i}" | cut -f6 -d'|') - cnt=$(echo "${i}" | cut -f7 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | \ - grep -w "${bckp_timestamp}" | \ - grep -w "${bckp_date}" | \ - grep -w "${bckp_status}" | \ - grep -w "${bckp_database}" | \ - grep -w "${bckp_type}" | \ - grep -w "${bckp_duration}" | \ - wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 5. -# Simple test to check type option -# Format: -# status | type| repetitions. -# For backup without plugin info - blank line, so them skips in this test. -TEST_ID="5" - -REGEX_LIST='''Success|full|6 -Failure|full|3''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - cnt=$(echo "${i}" | cut -f3 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 6. -# Simple test to check filtering by --type flag. -# Format: -# status| type| repetitions. -# Testing on incremental backup type. -TEST_ID="6" - -# backup-info commnad for sqlite backup history format. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---type incremental) - -REGEX_LIST='''Success|incremental|8''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - bckp_type=$(echo "${i}" | cut -f2 -d'|') - cnt=$(echo "${i}" | cut -f3 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | grep -w "${bckp_type}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 7. -# Simple test to check filtering by --schema flag. -# Format: -# status| repetitions. -# Testing on include test1 schema. -TEST_ID="7" - -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${SRC_DIR}/gpbackup_history.db \ ---deleted \ ---schema test1) - -REGEX_LIST='''Success|3''' - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - bckp_status=$(echo "${i}" | cut -f1 -d'|') - cnt=$(echo "${i}" | cut -f2 -d'|') - result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | grep -w "${bckp_status}" | wc -l | tr -d ' ') - if [ "${result_cnt_sqlite}" != "${cnt}" ]; then\ - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\n'${i}': get_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${cnt}" - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_history-clean.sh b/e2e_tests/scripts/run_history-clean.sh deleted file mode 100755 index cd55329..0000000 --- a/e2e_tests/scripts/run_history-clean.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="history-clean" - -SRC_DIR="/home/gpbackman/src_data" -WORK_DIR="/home/gpbackman/test_data" - -DATE_REGEX="(Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(0[1-9]|[12][0-9]|3[01])\s[0-9]{4}\s(0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9])" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_failure_plugin.yaml \ -${SRC_DIR}/gpbackup_history_incremental_plugin.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Delete backups from history database older than timestamp. -# There are no failed or deleted backups after command execution. -TEST_ID="1" - -TIMESTAMP="20231212101500" - -# Execute history-clean commnad. - -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---before-timestamp ${TIMESTAMP} \ - -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted --failed) - -TEST_CNT=0 - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f9 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_yaml=${result_cnt_yaml}, get_sqlite=${result_cnt_sqlite}, want=${TEST_CNT}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_history-migrate.sh b/e2e_tests/scripts/run_history-migrate.sh deleted file mode 100755 index 578af6c..0000000 --- a/e2e_tests/scripts/run_history-migrate.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="history-migrate" - -SRC_DIR="/home/gpbackman/src_data" -WORK_DIR="/home/gpbackman/test_data" - -# Prepare data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_dataonly_nodata_plugin.yaml \ -${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${WORK_DIR} - -# Execute history-migrate commnad. -gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-file ${WORK_DIR}/gpbackup_history_dataonly_nodata_plugin.yaml \ ---history-file ${WORK_DIR}/gpbackup_history_metadata_plugin.yaml \ ---history-db ${WORK_DIR}/gpbackup_history.db - -################################################################ -# Test 1. -# Check that in source data there are files with .migrated type after migration. -# Format: -# source_file.megrated. -TEST_ID="1" - -REGEX_LIST='''gpbackup_history_dataonly_nodata_plugin.yaml.migrated -gpbackup_history_metadata_plugin.yaml.migrated -''' - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -for i in ${REGEX_LIST} -do - if [ ! -f "${WORK_DIR}/${i}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nFile ${i} not found." - exit 1 - fi -done -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Compare results of backup-info command before and after migration. -TEST_ID="2" - -TEST_CNT_SQL=2 - -# backup-info commnad for sqlite backup history format. -# This result from migrated data. -GPBACKMAN_RESULT_SQLITE=$(gpbackman backup-info \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---deleted \ ---failed) - -DATE_REGEX="Success" - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -result_cnt_sqlite=$(echo "${GPBACKMAN_RESULT_SQLITE}" | cut -f3 -d'|' | awk '{$1=$1};1' | grep -E ${DATE_REGEX} | wc -l) -if [ "${result_cnt_sqlite}" != "${TEST_CNT_SQL}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_report-info.sh b/e2e_tests/scripts/run_report-info.sh deleted file mode 100755 index 0d04c38..0000000 --- a/e2e_tests/scripts/run_report-info.sh +++ /dev/null @@ -1,162 +0,0 @@ -#!/bin/sh - -# Local image for e2e tests should be built before running tests. -# See make file for details. -# This test works with files from src_data directory. -# If new file with backup info is added to src_data, it's nessary to update test cases in this script. - -GPBACKMAN_TEST_COMMAND="report-info" - -HOME_DIR="/home/gpbackman" -SRC_DIR="${HOME_DIR}/src_data" -WORK_DIR="${HOME_DIR}/test_data" - -# Prepare general data. -rm -rf "${WORK_DIR}/" -mkdir -p "${WORK_DIR}" -cp ${SRC_DIR}/gpbackup_history_metadata_plugin.yaml \ -${SRC_DIR}/gpbackup_history_full_local.yaml \ -${SRC_DIR}/gpbackup_history.db \ -${WORK_DIR} - -################################################################ -# Test 1. -# Get report info for specified backup with gpbackup_s3_plugin. -TEST_ID="1" - -TIMESTAMP="20230724090000" - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---plugin-config ${HOME_DIR}/gpbackup_s3_plugin.yaml | grep -v 'Reading Plugin Config') - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -################################################################ -# Test 2. -# Get report info for specified local backup with specifying backup directory without single-backup-dir format. -# Set backup directory from console. -TEST_ID="2" - -TIMESTAMP="20240505201504" -BACKUP_DIR="/tmp/testWithPrefix" -REPORT_DIR="${BACKUP_DIR}/segment-1/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---backup-dir ${BACKUP_DIR}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 3. -# Get report info for specified local backup with specifying backup directory without single-backup-dir format. -# Set backup directory from history database. -TEST_ID="3" - -TIMESTAMP="20240505201504" -BACKUP_DIR="/tmp/testWithPrefix" -REPORT_DIR="${BACKUP_DIR}/segment-1/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 4s. -# Get report info for specified local backup with specifying backup directory with single-backup-dir format. -# Set backup directory from console. -TEST_ID="4" - -TIMESTAMP="20240506201504" -BACKUP_DIR="/tmp/testNoPrefix" -REPORT_DIR="${BACKUP_DIR}/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP} \ ---backup-dir ${BACKUP_DIR}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - - -################################################################ -# Test 4. -# Get report info for specified local backup with specifying backup directory with single-backup-dir format. -# Set backup directory from history database. -TEST_ID="5" - -TIMESTAMP="20240506201504" -BACKUP_DIR="/tmp/testNoPrefix" -REPORT_DIR="${BACKUP_DIR}/backups/${TIMESTAMP:0:8}/${TIMESTAMP}" -# Prepare data. -mkdir -p ${REPORT_DIR} - -cp ${SRC_DIR}/gpbackup_${TIMESTAMP}_report ${REPORT_DIR} - -# Execute report-info commnad. -GPBACKMAN_RESULT_SQLITE=$(gpbackman ${GPBACKMAN_TEST_COMMAND} \ ---history-db ${WORK_DIR}/gpbackup_history.db \ ---timestamp ${TIMESTAMP}) - -# Check results. -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID}." -bckp_report=$(cat ${SRC_DIR}/gpbackup_${TIMESTAMP}_report) -if [ "${bckp_report}" != "${GPBACKMAN_RESULT_SQLITE}" ]; then - echo -e "[ERROR] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} failed.\nbckp_report:\n${bckp_report}\nget_yaml:\n${GPBACKMAN_RESULT_YAML}\nget_sqlite:\n${GPBACKMAN_RESULT_SQLITE}" - exit 1 -fi -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} test ${TEST_ID} passed." - -echo "[INFO] ${GPBACKMAN_TEST_COMMAND} all tests passed" -exit 0 diff --git a/e2e_tests/scripts/run_tests/common_functions.sh b/e2e_tests/scripts/run_tests/common_functions.sh new file mode 100755 index 0000000..7ee7bce --- /dev/null +++ b/e2e_tests/scripts/run_tests/common_functions.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +BIN_DIR="/home/gpadmin/gpbackman" +DATA_DIR="/data/master/gpseg-1" +PLUGIN_CFG="/home/gpadmin/gpbackup_s3_plugin.yaml" + +TIMESTAMP_GREP_PATTERN='^[[:space:]][0-9]{14}' + +log_test_start() { + local command="${1}" + local test_id="${2}" + echo "[INFO] ${command} TEST ${test_id}" +} + +log_test_success() { + local command="${1}" + local test_id="${2}" + echo "[INFO] ${command} TEST ${test_id} is successful" +} + +log_all_tests_passed() { + local command="${1}" + echo "[INFO] ${command} all tests passed" +} +run_gpbackman() { + local subcmd="${1}"; shift + local label="${1}"; shift + echo "[INFO] Running ${subcmd}: ${label}" + ${BIN_DIR}/gpbackman "${subcmd}" "$@" || { + echo "[ERROR] ${subcmd} ${label} failed"; exit 1; + } +} + +get_backup_info() { + local label="${1}"; shift + run_gpbackman "backup-info" "${label}" --deleted --failed "$@" +} + +count_deleted_backups() { + get_backup_info "count_deleted" --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | awk -F'|' 'NF >= 9 && $NF !~ /^[[:space:]]*$/' | wc -l +} + +get_cutoff_timestamp() { + local line_no="$1" + get_backup_info "get_line_${line_no}" --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | sed -n "${line_no}p" | awk '{print $1}' +} + +assert_equals() { + local expected="${1}" + local actual="${2}" + local message="${3:-}" + + [ "${actual}" -eq "${expected}" ] || { + echo "[ERROR] Expected ${expected}, got ${actual}${message:+ - ${message}}"; exit 1; + } +} + +assert_equals_both() { + local expected="${1}" + local actual1="${2}" + local actual2="${3}" + local message="${4:-}" + + [ "${actual1}" -eq "${expected}" ] && [ "${actual2}" -eq "${expected}" ] || { + echo "[ERROR] Expected ${expected}, got1=${actual1}, got2=${actual2}${message:+ - ${message}}"; exit 1; + } +} + +run_test() { + local command="${1}" + local test_id="${2}" + local test_function="${3}" + + log_test_start "${command}" "${test_id}" + ${test_function} + log_test_success "${command}" "${test_id}" +} diff --git a/e2e_tests/scripts/run_tests/run_backup-clean.sh b/e2e_tests/scripts/run_tests/run_backup-clean.sh new file mode 100755 index 0000000..4708775 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-clean.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# In the test, we consistently perform cleanup for backups created within the script prepare/prepare_gpdb_backups.sh +# If the backup creation logic changes in the script, this test may start to fail and corrections also need to be made here. +# +# First, we delete all local backups older than the 9th timestamp from backup-info command, +# there should be 3 deleted backups. +# +# Then we delete all local backups younger than the 3th timestamp, +# there should be a total of 5 deleted backups. +# +# Then we delete all S3 backups younger than the 5th timestamp, +# there should be a total of 7 deleted backups. +# +# Then we delete all S3 backups older than the 5th timestamp, +# there should be a total of 12 deleted backups. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="backup-clean" + +run_command() { + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" +} + +# Test 1: Clean local backups older than timestamp (--before-timestamp) +# Without --cascade, no dependent backups +test_clean_local_backups_before_timestamp() { + local want=3 + local cutoff_timestamp=$(get_cutoff_timestamp 9) + run_command "clean_local_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 2: Clean local backups newer than timestamp (--after-timestamp) +# Without --cascade, no dependent backups +test_clean_local_backups_after_timestamp() { + local want=5 + local cutoff_timestamp=$(get_cutoff_timestamp 3) + run_command "clean_local_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 3: Clean S3 backups newer than timestamp (--after-timestamp) +# Without --cascade, no dependent backups +test_clean_s3_backups_after_timestamp() { + local want=7 + local cutoff_timestamp=$(get_cutoff_timestamp 5) + run_command "clean_s3_after_${cutoff_timestamp}" --after-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 4: Clean S3 backups older than timestamp (--before-timestamp) +# With --cascade +test_clean_s3_backups_before_timestamp() { + local want=12 + local cutoff_timestamp=$(get_cutoff_timestamp 5) + run_command "clean_s3_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_clean_local_backups_before_timestamp +run_test "${COMMAND}" 2 test_clean_local_backups_after_timestamp +run_test "${COMMAND}" 3 test_clean_s3_backups_after_timestamp +run_test "${COMMAND}" 4 test_clean_s3_backups_before_timestamp + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_backup-delete.sh b/e2e_tests/scripts/run_tests/run_backup-delete.sh new file mode 100755 index 0000000..15cc0d4 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-delete.sh @@ -0,0 +1,92 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="backup-delete" + +run_command(){ + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" +} + +get_backup_info_for_timestamp(){ + local timestamp="${1}" + get_backup_info "get_specific_backup" --history-db ${DATA_DIR}/gpbackup_history.db | grep "${timestamp}" || echo "No info found for timestamp ${timestamp}" +} + +# Test 1: Delete local full backup +test_delete_local_full() { + local timestamp=$(get_backup_info "get_local_full" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 + fi + + run_command "delete_local_full" --timestamp "${timestamp}" + + local deleted_backup=$(get_backup_info_for_timestamp "${timestamp}") + local date_deleted=$(echo "${deleted_backup}" | grep "${timestamp}" | awk -F'|' '{print $NF}' | xargs) + + if [ -n "${date_deleted}" ]; then + echo "[INFO] Backup ${timestamp} successfully marked as deleted" + else + echo "[ERROR] Backup should be marked as deleted" + exit 1 + fi +} + +# Test 2: Delete S3 incremental backup +test_delete_s3_incremental() { + local timestamp=$(get_backup_info "get_s3_incremental" --history-db ${DATA_DIR}/gpbackup_history.db --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find S3 incremental backup" + exit 1 + fi + + run_command "delete_s3_incremental" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" + + local deleted_backup=$(get_backup_info_for_timestamp "${timestamp}") + local date_deleted=$(echo "${deleted_backup}" | grep "${timestamp}" | awk -F'|' '{print $NF}' | xargs) + + if [ -n "${date_deleted}" ]; then + echo "[INFO] S3 backup ${timestamp} successfully marked as deleted" + else + echo "[ERROR] S3 backup should be marked as deleted" + exit 1 + fi +} + +# Test 3: Delete S3 full backup with cascade +test_delete_s3_full_cascade() { + local timestamp=$(get_backup_info "get_s3_full" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | tail -1 | awk '{print $1}') + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find S3 full backup" + exit 1 + fi + # Expected: 1 backup from test 1 + 1 from test 2 + 2 backups (incr + full) from this test = 4 total + local want=4 + run_command "delete_s3_full_cascade" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 4: Try to delete non-existent backup (should fail) +test_delete_nonexistent_backup() { + local fake_timestamp="19990101000000" + if ${BIN_DIR}/gpbackman backup-delete --history-db ${DATA_DIR}/gpbackup_history.db --timestamp "${fake_timestamp}" --force; then + echo "[ERROR] Expected failure, but command succeeded" + exit 1 + else + echo "[INFO] Expected failure occurred" + fi +} + +run_test "${COMMAND}" 1 test_delete_local_full +run_test "${COMMAND}" 2 test_delete_s3_incremental +run_test "${COMMAND}" 3 test_delete_s3_full_cascade +run_test "${COMMAND}" 4 test_delete_nonexistent_backup + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_backup-info.sh b/e2e_tests/scripts/run_tests/run_backup-info.sh new file mode 100755 index 0000000..a33ba0a --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_backup-info.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="backup-info" + +# Test 1: Count all backups in history database +test_count_all_backups() { + local want=12 + local got=$(get_backup_info total_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 2: Count all full backups +test_count_full_backups() { + local want=7 + local got1=$(get_backup_info total_full_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep full | wc -l) + local got2=$(get_backup_info filter_full_backups --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals_both "${want}" "${got1}" "${got2}" +} + +# Test 3: Count all incremental backups +# Compare the number of backups from the output of all backups and +# from the output with the --type full flag +test_count_incremental_backups() { + local want=3 + local got1=$(get_backup_info total_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep incremental | wc -l) + local got2=$(get_backup_info filter_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals_both "${want}" "${got1}" "${got2}" +} + +# Test 4: Count backups that include table sch2.tbl_c +test_count_include_table_backups() { + local want=2 + local got=$(get_backup_info total_include_table_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_c | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 5: Count backups that exclude table sch2.tbl_d +test_count_exclude_table_backups() { + local want=2 + local got=$(get_backup_info total_exclude_table_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_d --exclude | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 6: Count full backups that include table sch2.tbl_c +# Use --type full to filter only full backups +test_count_include_table_full_backups() { + local want=1 + local got=$(get_backup_info total_include_table_full_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_c --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 7: Count incremental backups that exclude table sch2.tbl_d +test_count_exclude_table_incremental_backups() { + local want=1 + local got=$(get_backup_info total_exclude_table_incremental_backups --history-db ${DATA_DIR}/gpbackup_history.db --table sch2.tbl_d --exclude --type incremental | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_count_all_backups +run_test "${COMMAND}" 2 test_count_full_backups +run_test "${COMMAND}" 3 test_count_incremental_backups +run_test "${COMMAND}" 4 test_count_include_table_backups +run_test "${COMMAND}" 5 test_count_exclude_table_backups +run_test "${COMMAND}" 6 test_count_include_table_full_backups +run_test "${COMMAND}" 7 test_count_exclude_table_incremental_backups + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_history-clean.sh b/e2e_tests/scripts/run_tests/run_history-clean.sh new file mode 100755 index 0000000..d2f0154 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_history-clean.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# During the test, we consistently clean up the backups created within the script prepare/prepare_gpdb_backups.sh +# We clean up the history db from deleted backups and make sure that they are successfully deleted. +# It is checked that the number of deleted backups is 0. + +# If the backup logic in the script changes, this test may fail, and corrections will also need to be made here. + +# First, we delete all local backups older than the 9th timestamp using the backup-info command. + +# Then we delete all S3 backups older than the 2th timestamp using the backup-info command. + +# After each deletion we cleanup history db. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="history-clean" + +run_command(){ + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" +} + +run_backup_clean() { + local label="${1}"; shift + run_gpbackman "backup-clean" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" +} + +# Test 1: Clean from history db local backups older than timestamp (--before-timestamp) +test_history_clean_local_before_timestamp(){ + # Delete local backups + local cutoff_timestamp=$(get_cutoff_timestamp 9) + run_backup_clean "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + run_command "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local want=0 + # Count deleted backups + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +# Test 2: Clean from history db S3 backups older than timestamp (--before-timestamp) +test_history_clean_s3_before_timestamp(){ + # Delete S3 backups + local cutoff_timestamp=$(get_cutoff_timestamp 2) + run_backup_clean "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" --plugin-config "${PLUGIN_CFG}" --cascade + run_command "clean_before_${cutoff_timestamp}" --before-timestamp "${cutoff_timestamp}" + local want=0 + local got=$(count_deleted_backups) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_history_clean_local_before_timestamp +run_test "${COMMAND}" 2 test_history_clean_s3_before_timestamp + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_history-migrate.sh b/e2e_tests/scripts/run_tests/run_history-migrate.sh new file mode 100755 index 0000000..e23824c --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_history-migrate.sh @@ -0,0 +1,108 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +# Tests for history-migrate (current): +# 1) Migrate gpbackup_history_full_local.yaml into an empty DB in /tmp -> expect 2 backups. +# 2) Migrate the same file into an existing DB prepared by setup -> expect 12 base + 2 = 14 total. +# 3) Duplicate migration into the same DB -> must fail with UNIQUE constraint. +# 4) Migrate all YAML files into a fresh empty DB in /tmp -> expect 14 backups total. +# 5) Migrate all YAML files into an existing DB (excluding already migrated full_local) -> expect 12 base + 14 = 26 total; duplicates skipped. + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="history-migrate" +SRC_DIR="/home/gpadmin/src_data" +WORK_BASE="/tmp/history_migrate_tests" + +TEST_FILE_FULL_LOCAL="gpbackup_history_full_local.yaml" + +run_command(){ + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" "$@" +} + +prepare_workdir(){ + local name="${1}" + local dir="${WORK_BASE}/${name}" + rm -rf "${dir}" && mkdir -p "${dir}" + echo "${dir}" +} + +# Test 1: Single file into empty DB in /tmp +# Expect 2 backups from file +test_migrate_single_into_empty_db(){ + local workdir=$(prepare_workdir test1) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${workdir}/gpbackup_history.db" + run_command "single_into_empty_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=2 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 2: Single file into existing DB (prepared by setup) +# 12 backups from initial setup + 2 from file +test_migrate_single_into_existing_db(){ + local workdir=$(prepare_workdir test2) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + run_command "single_into_existing_db" --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db ${db} | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 3: Duplicate migration into the same DB must fail with UNIQUE constraint +test_migrate_duplicate_into_existing_db_fail(){ + local workdir=$(prepare_workdir test3) + cp "${SRC_DIR}/${TEST_FILE_FULL_LOCAL}" "${workdir}/" + local db="${DATA_DIR}/gpbackup_history.db" + if ${BIN_DIR}/gpbackman history-migrate --history-file "${workdir}/${TEST_FILE_FULL_LOCAL}" --history-db "${db}"; then + echo "[ERROR] Expected failure, but command succeeded" + exit 1 + else + echo "[INFO] Expected failure occurred" + fi +} + +# Test 4: All files into fresh empty DB in /tmp +# 14 backups from all files +test_migrate_all_into_empty_db(){ + local workdir=$(prepare_workdir test4) + cp "${SRC_DIR}"/*.yaml "${workdir}/" + local db="${workdir}/gpbackup_history.db" + local args=() + for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") + done + run_command "all_into_empty_db" "${args[@]}" --history-db "${db}" + local want=14 + local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +# Test 5: All files into existing DB +# 12 backups from initial setup + 12 from files +# The duplicates, already loaded in test2, should be skipped +test_migrate_all_into_existing_db(){ + local workdir=$(prepare_workdir test5) + cp "${SRC_DIR}"/*.yaml "${workdir}/" + rm -f "${workdir}/${TEST_FILE_FULL_LOCAL}" + local db="${DATA_DIR}/gpbackup_history.db" + local args=() + for f in "${workdir}"/*.yaml; do + args+=(--history-file "${f}") + done + run_command "all_into_existing_db" "${args[@]}" --history-db "${db}" + local want=26 + local got=$(get_backup_info total_full_backups --history-db "${db}" | grep -E "${TIMESTAMP_GREP_PATTERN}" | wc -l) + assert_equals "${want}" "${got}" +} + +run_test "${COMMAND}" 1 test_migrate_single_into_empty_db +run_test "${COMMAND}" 2 test_migrate_single_into_existing_db +run_test "${COMMAND}" 3 test_migrate_duplicate_into_existing_db_fail +run_test "${COMMAND}" 4 test_migrate_all_into_empty_db +run_test "${COMMAND}" 5 test_migrate_all_into_existing_db + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_report-info.sh b/e2e_tests/scripts/run_tests/run_report-info.sh new file mode 100755 index 0000000..942a6e5 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_report-info.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +source "$(dirname "${BASH_SOURCE[0]}")/common_functions.sh" + +COMMAND="report-info" + +run_command(){ + local label="${1}"; shift + run_gpbackman "${COMMAND}" "${label}" --history-db ${DATA_DIR}/gpbackup_history.db "$@" +} + +# Test 1: Get report info for full local backup (without backup-dir) +test_report_full_local_no_dir() { + local timestamp=$(get_backup_info "get_full_local" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp" + exit 1 + fi + + local report_output=$(run_command "full_local_no_dir" --timestamp "${timestamp}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } +} + +# Test 2: Get report info for full local backup (with backup-dir) +test_report_full_local_with_dir() { + local timestamp=$(get_backup_info "get_full_local_with_dir" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep -v plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full local backup timestamp for backup-dir test" + exit 1 + fi + + local report_dir="/data/master/gpseg-1" + local report_output=$(run_command "local_with_backup_dir_console" --timestamp "${timestamp}" --backup-dir "${report_dir}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*None" || { echo "[ERROR] Expected 'plugin executable: None' for local backup"; exit 1; } +} + +# Test 3: Get report info for full S3 backup (without plugin-report-file-path) +test_report_s3_no_plugin_path() { + local timestamp=$(get_backup_info "get_full_s3" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full s3 backup timestamp" + exit 1 + fi + + local report_output=$(run_command "s3_without_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } +} + +# Test 4: Get report info for full S3 backup (with plugin-report-file-path) +test_report_s3_with_plugin_path() { + local timestamp=$(get_backup_info "get_full_s3" --history-db ${DATA_DIR}/gpbackup_history.db --type full | grep -E "${TIMESTAMP_GREP_PATTERN}" | grep plugin | head -1 | awk '{print $1}') + + if [ -z "${timestamp}" ]; then + echo "[ERROR] Could not find full s3 backup timestamp for plugin-report-file-path test" + exit 1 + fi + + local report_dir="/backup/test/backups/${timestamp:0:8}/${timestamp}" + local report_output=$(run_command "s3_with_plugin_report_file_path" --timestamp "${timestamp}" --plugin-config "${PLUGIN_CFG}" --plugin-report-file-path "${report_dir}") + + echo "${report_output}" | grep -q "^Greenplum Database Backup Report" || { echo "[ERROR] Expected report header"; exit 1; } + echo "${report_output}" | grep -q "timestamp key:.*${timestamp}" || { echo "[ERROR] Expected timestamp key in report"; exit 1; } + echo "${report_output}" | grep -q "plugin executable:.*gpbackup_s3_plugin" || { echo "[ERROR] Expected 'plugin executable: gpbackup_s3_plugin' for s3 backup"; exit 1; } +} + +run_test "${COMMAND}" 1 test_report_full_local_no_dir +run_test "${COMMAND}" 2 test_report_full_local_with_dir +run_test "${COMMAND}" 3 test_report_s3_no_plugin_path +run_test "${COMMAND}" 4 test_report_s3_with_plugin_path + +log_all_tests_passed "${COMMAND}" diff --git a/e2e_tests/scripts/run_tests/run_test.sh b/e2e_tests/scripts/run_tests/run_test.sh new file mode 100755 index 0000000..67e3b18 --- /dev/null +++ b/e2e_tests/scripts/run_tests/run_test.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -Eeuo pipefail + +TEST_COMMAND=${1:-} +GP_DB_NAME="demo" +HOME_DIR="/home/gpadmin" +SCRIPTS_DIR="${HOME_DIR}/run_tests" + +wait_for_service() { + local max_attempts=${1:-10} + + for i in $(seq 1 ${max_attempts}); do + if psql -d ${GP_DB_NAME} -t -c "SELECT 1;" >/dev/null 2>&1; then + echo "[INFO] Cluster ready" + return 0 + fi + echo "[INFO] Waiting cluster startup (${i}/${max_attempts})" + sleep 10 + done + echo "[ERROR] Cluster failed to start within timeout" + return 1 +} + + +exec_test_for_command() { + case "${TEST_COMMAND}" in + backup-info) + "${SCRIPTS_DIR}/run_backup-info.sh" + ;; + report-info) + "${SCRIPTS_DIR}/run_report-info.sh" + ;; + backup-delete) + "${SCRIPTS_DIR}/run_backup-delete.sh" + ;; + backup-clean) + "${SCRIPTS_DIR}/run_backup-clean.sh" + ;; + history-clean) + "${SCRIPTS_DIR}/run_history-clean.sh" + ;; + history-migrate) + "${SCRIPTS_DIR}/run_history-migrate.sh" + ;; + *) + echo "[ERROR] Unknown test command: ${TEST_COMMAND}" + exit 1 + ;; + esac +} + +echo "[INFO] Check Greenplum cluster" +sleep 90 +wait_for_service + +echo "[INFO] Prepare Greenplum backups" +"${HOME_DIR}/prepare_gpdb_backups.sh" + +echo "[INFO] Run e2e tests for command: ${TEST_COMMAND}" +exec_test_for_command \ No newline at end of file diff --git a/e2e_tests/src_data/gpbackup_20230724090000_report b/e2e_tests/src_data/gpbackup_20230724090000_report deleted file mode 100644 index d238141..0000000 --- a/e2e_tests/src_data/gpbackup_20230724090000_report +++ /dev/null @@ -1,59 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20230724090000 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --dbname demo --compression-type gzip --plugin-config /tmp/gpbackup_plugin_config.yml --metadata-only -compression: gzip -plugin executable: gpbackup_s3_plugin -backup section: Metadata Only -object filtering: None -includes statistics: No -data file format: No Data Files -incremental: False - -start time: Mon Jul 24 2023 09:00:00 -end time: Mon Jul 24 2023 09:05:17 -duration: 0:05:17 - -backup status: Success - -segment count: 8 - -count of database objects in backup: -aggregates 50 -casts 8 -collations 0 -constraints 100 -conversions 0 -default privileges 60 -database gucs 0 -event triggers 0 -extensions 10 -foreign data wrappers 0 -foreign servers 0 -functions 100 -indexes 5 -operator classes 1 -operator families 1 -operators 10 -procedural languages 1 -protocols 1 -resource groups 3 -resource queues 1 -roles 200 -rules 0 -schemas 70 -sequences 15 -tables 1000 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 60 -user mappings 0 -views 500 diff --git a/e2e_tests/src_data/gpbackup_20240505201504_report b/e2e_tests/src_data/gpbackup_20240505201504_report deleted file mode 100644 index 18c4cd3..0000000 --- a/e2e_tests/src_data/gpbackup_20240505201504_report +++ /dev/null @@ -1,61 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20240505201504 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --backup-dir /tmp/testWithPrefix --dbname demo -compression: gzip -plugin executable: None -backup section: All Sections -object filtering: None -includes statistics: No -data file format: Multiple Data Files Per Segment -incremental: False - -start time: Wed May 05 2024 20:15:04 -end time: Wed May 05 2024 20:15:19 -duration: 0:00:15 - -backup status: Success - -database size: 500 MB -segment count: 4 - -count of database objects in backup: -aggregates 0 -casts 0 -collations 0 -constraints 0 -conversions 0 -default privileges 0 -database gucs 0 -event triggers 0 -extensions 3 -foreign data wrappers 0 -foreign servers 1 -functions 0 -indexes 0 -operator classes 0 -operator families 0 -operators 0 -procedural languages 0 -protocols 1 -resource groups 2 -resource queues 1 -roles 50 -rules 0 -schemas 4 -sequences 0 -tables 100 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 0 -user mappings 1 -views 0 - diff --git a/e2e_tests/src_data/gpbackup_20240506201504_report b/e2e_tests/src_data/gpbackup_20240506201504_report deleted file mode 100644 index 390583a..0000000 --- a/e2e_tests/src_data/gpbackup_20240506201504_report +++ /dev/null @@ -1,61 +0,0 @@ -Greenplum Database Backup Report - -timestamp key: 20240506201504 -gpdb version: 6.23.3 -gpbackup version: 1.27.0 - -database name: demo -command line: gpbackup --backup-dir /tmp/testNoPrefix --single-backup-dir --dbname demo -compression: gzip -plugin executable: None -backup section: All Sections -object filtering: None -includes statistics: No -data file format: Multiple Data Files Per Segment -incremental: False - -start time: Wed May 06 2024 20:15:04 -end time: Wed May 06 2024 20:15:19 -duration: 0:00:15 - -backup status: Success - -database size: 500 MB -segment count: 4 - -count of database objects in backup: -aggregates 0 -casts 0 -collations 0 -constraints 0 -conversions 0 -default privileges 0 -database gucs 0 -event triggers 0 -extensions 3 -foreign data wrappers 0 -foreign servers 1 -functions 0 -indexes 0 -operator classes 0 -operator families 0 -operators 0 -procedural languages 0 -protocols 1 -resource groups 2 -resource queues 1 -roles 50 -rules 0 -schemas 4 -sequences 0 -tables 100 -tablespaces 0 -text search configurations 0 -text search dictionaries 0 -text search parsers 0 -text search templates 0 -triggers 0 -types 0 -user mappings 1 -views 0 - diff --git a/e2e_tests/src_data/gpbackup_history.db b/e2e_tests/src_data/gpbackup_history.db deleted file mode 100644 index 7614686..0000000 Binary files a/e2e_tests/src_data/gpbackup_history.db and /dev/null differ