diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 000000000..f1587ac39 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,25 @@ +### Github Actions Workflow + +This build file replaces the existing `Jenkins.ci` build process. + +`lint.yaml` replaces the `Static code validation` stage of the Jenkins build. + +`build.yaml` replaces the `Build / Test` stage of the Jenkins build. + +Many of the other stages are replaced merely by the fact we're using Github Actions, we use prebuild Docker containers so we don't have to replicate the steps for building containers. + +The `Build result notification` stage was not moved to GHA, build failures will be reports via GHA. + +The build process for `Jenkins.nightly` was not ported to GHA. + +#### Configuring actions + +If you are cloning or forking this repo you will need to configure two secrets for Actions to run correctly. + +Secrets can be set via Settings -> Secrets -> New repository secret. + +CR_USER is your GH username. +CR_PAT can be created by following [these directions](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) + +Once you have run the build once with those secrets, you have to make then package public. +Access the package at https://ghcr.io/USER/indy-node/indy-node-build or https://ghcr.io/USER/indy-node/indy-node-lint then change the visibility in 'Package Settings' to 'Public' then re-run the build. diff --git a/.github/workflows/build-u18004.yml b/.github/workflows/build-u18004.yml new file mode 100644 index 000000000..26030a4f9 --- /dev/null +++ b/.github/workflows/build-u18004.yml @@ -0,0 +1,254 @@ +name: indy-node-build +on: [ push, pull_request, workflow_dispatch ] + +jobs: + workflow-setup: + runs-on: ubuntu-latest + outputs: + CACHE_KEY_LINT: ${{ steps.cache.outputs.CACHE_KEY_LINT }} + CACHE_KEY_BUILD: ${{ steps.cache.outputs.CACHE_KEY_BUILD }} + # Expose the lowercase version of the GitHub repository name + # to all subsequent jobs that reference image repositories + # as the push and pull operations require the URL of the repository + # to be in lowercase. + GITHUB_REPOSITORY_NAME: ${{ steps.cache.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Set outputs + id: cache + run: | + echo "::set-output name=CACHE_KEY_LINT::${{ hashFiles('.github/workflows/lint/Dockerfile') }}" + echo "::set-output name=CACHE_KEY_BUILD::${{ hashFiles('.github/workflows/build/Dockerfile') }}" + echo "::set-output name=GITHUB_REPOSITORY_NAME::$(echo ${GITHUB_REPOSITORY,,})" + + build-lint-image: + needs: workflow-setup + runs-on: ubuntu-latest + env: + DOCKER_BUILDKIT: 1 + CACHE_KEY_LINT: ${{ needs.workflow-setup.outputs.CACHE_KEY_LINT }} + GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Try load from cache. + id: cache-image-lint + uses: actions/cache@v2 + with: + path: ${GITHUB_WORKSPACE}/cache + key: ${{ env.CACHE_KEY_LINT }} + - name: If NOT found in cache, build and push image. + if: steps.cache-image-lint.outputs.cache-hit != 'true' + run: | + echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin + docker build -f .github/workflows/lint/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:${{ env.CACHE_KEY_LINT }} . + docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:${{ env.CACHE_KEY_LINT }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:latest + docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:latest + mkdir -p ${GITHUB_WORKSPACE}/cache + touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_LINT }} + build-test-image: + needs: workflow-setup + runs-on: ubuntu-latest + env: + DOCKER_BUILDKIT: 1 + CACHE_KEY_BUILD: ${{ needs.workflow-setup.outputs.CACHE_KEY_BUILD }} + GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Try load from cache. + id: cache-image-build + uses: actions/cache@v2 + with: + path: ${GITHUB_WORKSPACE}/cache + key: ${{ env.CACHE_KEY_BUILD }} + - name: If NOT found in cache, build and push image. + if: steps.cache-image-build.outputs.cache-hit != 'true' + run: | + echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin + docker build -f .github/workflows/build/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:${{ env.CACHE_KEY_BUILD }} . + docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:${{ env.CACHE_KEY_BUILD }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:latest + docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:latest + mkdir -p ${GITHUB_WORKSPACE}/cache + touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_BUILD }} + indy_node: + name: Build Indy Node + needs: build-test-image + runs-on: ubuntu-20.04 + container: + image: ghcr.io/${{ github.repository }}/indy-node-build + strategy: + matrix: + module: [indy_node, indy_common] + slice: [1, 2, 3, 4 ,5, 6, 7,8, 9, 10, 11] + fail-fast: false + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: Install dependencies + run: pip install .[tests] + continue-on-error: true + + - name: Run Indy Node ${{ matrix.module }} test slice ${{ matrix.slice }}/${{ strategy.job-total }} + run: RUSTPYTHONASYNCIODEBUG=0 python3 runner.py --pytest "python3 -m pytest -l -vv --junitxml=test-result-indy-node-${{ matrix.module }}-${{ matrix.slice }}.xml" --dir "${{ matrix.module }}" --output "test-result-indy-node-${{ matrix.slice }}.txt" --test-only-slice "${{ matrix.slice }}/${{ strategy.job-total }}" + + - name: Publish Test Report + uses: scacap/action-surefire-report@v1 + continue-on-error: true + with: + check_name: Indy Node ${{ matrix.module }} ${{ matrix.slice }}/${{ strategy.job-total }} Test Report + github_token: ${{ secrets.GITHUB_TOKEN }} + report_paths: test-result-indy-node-${{ matrix.module }}-${{ matrix.slice }}.xml + + lint: + name: Lint + runs-on: ubuntu-latest + container: + image: ghcr.io/adenishchenko/indy-node/indy-node-lint + needs: [workflow-setup, build-lint-image] + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: flake8 + run: python3 -m flake8 + +# name: indy-node-build-u2004 +# on: [ push, pull_request ] + +# jobs: +# workflow-setup: +# runs-on: ubuntu-20.04 +# outputs: +# CACHE_KEY_CI_IMAGE: ${{ steps.cache.outputs.CACHE_KEY_CI_IMAGE }} +# CACHE_KEY_BUILD_IMAGE: ${{ steps.cache.outputs.CACHE_KEY_BUILD_IMAGE }} +# GITHUB_REPOSITORY_NAME: ${{ steps.cache.outputs.GITHUB_REPOSITORY_NAME }} +# DOCKER_FILE_POSTFIX: ${{ steps.cache.outputs.DOCKER_FILE_POSTFIX }} +# steps: +# - name: Checkout +# uses: actions/checkout@v2 + +# - name: Set outputs +# id: cache +# run: | +# echo "::set-output name=CACHE_KEY_CI_IMAGE::${{ hashFiles('.github/workflows/test/Dockerfile') }}-rc1" +# echo "::set-output name=CACHE_KEY_BUILD_IMAGE::${{ hashFiles('.github/workflows/build/Dockerfile') }}-rc1" +# # Convert the GitHub repository name to lowercase +# echo "::set-output name=GITHUB_REPOSITORY_NAME::$(echo ${GITHUB_REPOSITORY,,})" +# echo "::set-output name=DOCKER_FILE_POSTFIX::ubuntu-20-04" + + +# test-image: +# # Reference to workflow-setup job is required to access its various outputs. +# needs: workflow-setup +# runs-on: ubuntu-latest +# env: +# DOCKER_BUILDKIT: 1 +# CACHE_KEY_CI_IMAGE: ${{ needs.workflow-setup.outputs.CACHE_KEY_CI_IMAGE }} +# GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} +# DOCKER_FILE_POSTFIX: ${{ needs.workflow-setup.outputs.DOCKER_FILE_POSTFIX }} +# steps: +# - name: Git checkout +# uses: actions/checkout@v2 + +# - name: Try load from cache. +# id: cache-image +# uses: actions/cache@v2 +# with: +# path: ${GITHUB_WORKSPACE}/cache +# key: ${{ env.CACHE_KEY_CI_IMAGE}} + +# - name: If NOT found in cache, build and push image. +# if: steps.cache-image.outputs.cache-hit != 'true' +# run: | +# echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin +# docker build -f .github/workflows/test/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/node-test:${{ env.CACHE_KEY_CI_IMAGE }} . +# docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/node-test:${{ env.CACHE_KEY_CI_IMAGE }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/node-test:${{ env.DOCKER_FILE_POSTFIX }} +# docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/node-test:${{ env.DOCKER_FILE_POSTFIX }} +# mkdir -p ${GITHUB_WORKSPACE}/cache +# touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_CI_IMAGE }} + +# build-image: +# # Reference to workflow-setup job is required to access its various outputs. +# needs: workflow-setup +# runs-on: ubuntu-latest +# env: +# DOCKER_BUILDKIT: 1 +# CACHE_KEY_BUILD_IMAGE: ${{ needs.workflow-setup.outputs.CACHE_KEY_BUILD_IMAGE }} +# GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} +# DOCKER_FILE_POSTFIX: ${{ needs.workflow-setup.outputs.DOCKER_FILE_POSTFIX }} +# steps: +# - name: Git checkout +# uses: actions/checkout@v2 + +# - name: Try load from cache. +# id: cache-image +# uses: actions/cache@v2 +# with: +# path: ${GITHUB_WORKSPACE}/cache +# key: ${{ env.CACHE_KEY_BUILD_IMAGE }} + +# - name: If NOT found in cache, build and push image. +# if: steps.cache-image.outputs.cache-hit != 'true' +# run: | +# echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin +# docker build -f .github/workflows/build/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/node-build:${{ env.CACHE_KEY_BUILD_IMAGE }} . +# docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/node-build:${{ env.CACHE_KEY_BUILD_IMAGE }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/node-build:${{ env.DOCKER_FILE_POSTFIX }} +# docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/node-build:${{ env.DOCKER_FILE_POSTFIX }} +# mkdir -p ${GITHUB_WORKSPACE}/cache +# touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_BUILD_IMAGE }} + + + + + + + + +# indy-common: +# name: Build Indy Common +# runs-on: ubuntu-latest +# #TODO: move this to hyperledger? +# container: udosson/indy-node-ci-u1804:latest +# steps: +# - name: Check out code +# uses: actions/checkout@v2 + +# - name: Install dependencies +# run: pip install .[tests] +# continue-on-error: true + +# - name: Run Indy Common test +# run: python3 -m pytest -l -vv --junitxml=test-result-indy-common.xml --dir indy_common --output "test-result-indy-common.txt" + +# indy-node: +# name: Build Indy Node +# runs-on: ubuntu-18.04 +# #TODO: move this to hyperledger? +# container: udosson/indy-node-ci-u1804:latest +# steps: +# - name: Check out code +# uses: actions/checkout@v2 + +# - name: Install dependencies +# run: pip install .[tests] +# continue-on-error: true + +# - name: Run Indy Node test +# run: python3 -m pytest -l -vv --junitxml=test-result-indy-node.xml --dir indy_node --output "test-result-indy-node.txt" + + + +# lint: +# name: Lint +# runs-on: ubuntu-18.04 +# #TODO: move this to hyperledger? +# container: udosson/indy-node-ci-u1804:latest +# steps: +# - name: Check out code +# uses: actions/checkout@v2 + +# - name: flake8 +# run: python3 -m flake8 diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml new file mode 100644 index 000000000..177b7e44b --- /dev/null +++ b/.github/workflows/build.yaml @@ -0,0 +1,117 @@ +name: indy-node-build +on: [ push, pull_request, workflow_dispatch ] + +jobs: + workflow-setup: + runs-on: ubuntu-latest + outputs: + CACHE_KEY_LINT: ${{ steps.cache.outputs.CACHE_KEY_LINT }} + CACHE_KEY_BUILD: ${{ steps.cache.outputs.CACHE_KEY_BUILD }} + # Expose the lowercase version of the GitHub repository name + # to all subsequent jobs that reference image repositories + # as the push and pull operations require the URL of the repository + # to be in lowercase. + GITHUB_REPOSITORY_NAME: ${{ steps.cache.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Set outputs + id: cache + run: | + echo "::set-output name=CACHE_KEY_LINT::${{ hashFiles('.github/workflows/lint/Dockerfile') }}" + echo "::set-output name=CACHE_KEY_BUILD::${{ hashFiles('.github/workflows/build/Dockerfile') }}" + echo "::set-output name=GITHUB_REPOSITORY_NAME::$(echo ${GITHUB_REPOSITORY,,})" + + build-lint-image: + needs: workflow-setup + runs-on: ubuntu-latest + env: + DOCKER_BUILDKIT: 1 + CACHE_KEY_LINT: ${{ needs.workflow-setup.outputs.CACHE_KEY_LINT }} + GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Try load from cache. + id: cache-image-lint + uses: actions/cache@v2 + with: + path: ${GITHUB_WORKSPACE}/cache + key: ${{ env.CACHE_KEY_LINT }} + - name: If NOT found in cache, build and push image. + if: steps.cache-image-lint.outputs.cache-hit != 'true' + run: | + echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin + docker build -f .github/workflows/lint/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:${{ env.CACHE_KEY_LINT }} . + docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:${{ env.CACHE_KEY_LINT }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:latest + docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-lint:latest + mkdir -p ${GITHUB_WORKSPACE}/cache + touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_LINT }} + + build-test-image: + needs: workflow-setup + runs-on: ubuntu-latest + env: + DOCKER_BUILDKIT: 1 + CACHE_KEY_BUILD: ${{ needs.workflow-setup.outputs.CACHE_KEY_BUILD }} + GITHUB_REPOSITORY_NAME: ${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }} + steps: + - name: Git checkout + uses: actions/checkout@v2 + - name: Try load from cache. + id: cache-image-build + uses: actions/cache@v2 + with: + path: ${GITHUB_WORKSPACE}/cache + key: ${{ env.CACHE_KEY_BUILD }} + - name: If NOT found in cache, build and push image. + if: steps.cache-image-build.outputs.cache-hit != 'true' + run: | + echo ${{ secrets.CR_PAT }} | docker login ghcr.io --username ${{ secrets.CR_USER }} --password-stdin + docker build -f .github/workflows/build/Dockerfile --no-cache -t ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:${{ env.CACHE_KEY_BUILD }} . + docker tag ${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:${{ env.CACHE_KEY_BUILD }} ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:latest + docker push ghcr.io/${{ env.GITHUB_REPOSITORY_NAME }}/indy-node-build:latest + mkdir -p ${GITHUB_WORKSPACE}/cache + touch ${GITHUB_WORKSPACE}/cache/${{ env.CACHE_KEY_BUILD }} + + indy_node: + name: Build Indy Node + needs: build-test-image + runs-on: ubuntu-18.04 + container: + image: ghcr.io/${{ github.repository }}/indy-node-build + strategy: + matrix: + module: [indy_node, indy_common] + slice: [1, 2, 3, 4 ,5, 6, 7,8, 9, 10, 11] + fail-fast: false + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: Install dependencies + run: pip install .[tests] + continue-on-error: true + + - name: Run Indy Node ${{ matrix.module }} test slice ${{ matrix.slice }}/${{ strategy.job-total }} + run: RUSTPYTHONASYNCIODEBUG=0 python3 runner.py --pytest "python3 -m pytest -l -vv --junitxml=test-result-indy-node-${{ matrix.module }}-${{ matrix.slice }}.xml" --dir "${{ matrix.module }}" --output "test-result-indy-node-${{ matrix.slice }}.txt" --test-only-slice "${{ matrix.slice }}/${{ strategy.job-total }}" + + - name: Publish Test Report + uses: scacap/action-surefire-report@v1 + with: + check_name: Indy Node ${{ matrix.module }} ${{ matrix.slice }}/${{ strategy.job-total }} Test Report + github_token: ${{ secrets.GITHUB_TOKEN }} + report_paths: test-result-indy-node-${{ matrix.module }}-${{ matrix.slice }}.xml + + lint: + name: Lint + runs-on: ubuntu-latest + container: + image: ghcr.io/${{ needs.workflow-setup.outputs.GITHUB_REPOSITORY_NAME }}/indy-node-lint + needs: [workflow-setup, build-lint-image] + steps: + - name: Check out code + uses: actions/checkout@v2 + + - name: flake8 + run: python3 -m flake8 diff --git a/.github/workflows/build/Dockerfile b/.github/workflows/build/Dockerfile new file mode 100644 index 000000000..a53e03c44 --- /dev/null +++ b/.github/workflows/build/Dockerfile @@ -0,0 +1,17 @@ +FROM hyperledger/indy-core-baseci:0.0.3-master +LABEL maintainer="Hyperledger " + +RUN apt-get update -y && apt-get install -y \ + python3-nacl \ + libindy-crypto=0.4.5 \ + libindy=1.15.0~1618-xenial \ +# rocksdb python wrapper + libbz2-dev \ + zlib1g-dev \ + liblz4-dev \ + libsnappy-dev \ + rocksdb=5.8.8 \ + ursa=0.3.2-2 \ + jq + +RUN indy_image_clean diff --git a/.github/workflows/build/README.md b/.github/workflows/build/README.md new file mode 100644 index 000000000..984f201f2 --- /dev/null +++ b/.github/workflows/build/README.md @@ -0,0 +1,3 @@ +# Building the build image + +This `Dockerfile` is used as part of the workflow, any changes to it will force the docker image to be rebuilt and that new image will be used to run the downstream workflow. \ No newline at end of file diff --git a/.github/workflows/lint/Dockerfile b/.github/workflows/lint/Dockerfile new file mode 100644 index 000000000..4e8c659b2 --- /dev/null +++ b/.github/workflows/lint/Dockerfile @@ -0,0 +1,21 @@ +# Development +FROM ubuntu:20.04 +LABEL maintainer="Hyperledger " + +RUN apt-get update && apt-get dist-upgrade -y + +# Install environment +RUN apt-get install -y \ + git \ + wget \ + python3.5 \ + python3-pip \ + python-setuptools \ + python3-nacl + +RUN pip3 install -U \ + 'pip<10.0.0' \ + setuptools \ + pep8==1.7.1 \ + pep8-naming==0.6.1 \ + flake8==3.5.0 diff --git a/.github/workflows/lint/README.md b/.github/workflows/lint/README.md new file mode 100644 index 000000000..6b2a83fd0 --- /dev/null +++ b/.github/workflows/lint/README.md @@ -0,0 +1,3 @@ +# Building the lint image + +This `Dockerfile` is used as part of the workflow, any changes to it will force the docker image to be rebuilt and that new image will be used to run the downstream workflow. \ No newline at end of file diff --git a/.gitignore b/.gitignore index 6147ec972..ca783e72d 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,9 @@ var/ *.egg *.eggs +# Needed for GitHub Actions +!.github/workflows/build + # Installer logs pip-log.txt pip-delete-this-directory.txt @@ -72,8 +75,15 @@ include/ # generated doc files docs/source/api_docs/ -# hidden files -.* +# IntelliJ specific config +*.idea +*.iml + +#vscode +.vscode # Vagrant files -.vagrant \ No newline at end of file +.vagrant + +# test output from working with GitHub actions +test-result-node.xml \ No newline at end of file diff --git a/Jenkinsfile.ci b/Jenkinsfile.ci index bdc2a1b5a..210d74386 100644 --- a/Jenkinsfile.ci +++ b/Jenkinsfile.ci @@ -73,6 +73,7 @@ def withTestEnv(body) { buildDocker("hyperledger/indy-node-ci", "ci/ubuntu.dockerfile ci").inside { echo 'Test: Install dependencies' + sh "pip install pip==10.0.0" install() body.call('python') } diff --git a/ci/pipeline.groovy b/ci/pipeline.groovy index 22fe63a74..d03a2b216 100644 --- a/ci/pipeline.groovy +++ b/ci/pipeline.groovy @@ -148,6 +148,7 @@ def systemTests(Closure body) { def uid = sh(returnStdout: true, script: 'id -u').trim() docker.build("hyperledger/indy-node-ci", "--build-arg uid=$uid -f ci/ubuntu.dockerfile ci").inside { sh """ + pip install pip==10.0.0 pip install .[tests] >$pipLogName """ @@ -160,7 +161,7 @@ def systemTests(Closure body) { indySDKVersions.pypi = sh(returnStdout: true, script: """ grep "^Collecting python3-indy==" $pipLogName | awk '{print \$2}' | awk -F'==' '{print \$2}' """).trim() - indySDKVersions.debian = indySDKVersions.pypi.replaceAll(/-(dev|rc)-(.*)/, "~\$2") + indySDKVersions.debian = indySDKVersions.pypi.replaceAll(/-(dev|rc)-(.*)/, "~\$2-xenial") echo "indy-sdk version: ${indySDKVersions}" } diff --git a/ci/ubuntu.dockerfile b/ci/ubuntu.dockerfile index 46ea2eda4..8fa83c8b3 100644 --- a/ci/ubuntu.dockerfile +++ b/ci/ubuntu.dockerfile @@ -8,7 +8,7 @@ ARG venv=venv RUN apt-get update -y && apt-get install -y \ python3-nacl \ ursa=0.3.2-2 \ - libindy=1.13.0~1420 \ + libindy=1.15.0~1618-xenial \ # rocksdb python wrapper libbz2-dev \ zlib1g-dev \ diff --git a/dev-setup/ubuntu/ubuntu-2004/SetupVMTest.txt b/dev-setup/ubuntu/ubuntu-2004/SetupVMTest.txt new file mode 100644 index 000000000..49e9d2cc2 --- /dev/null +++ b/dev-setup/ubuntu/ubuntu-2004/SetupVMTest.txt @@ -0,0 +1,105 @@ +#VM 20.04 Setup + +##Pre-Install + + sudo apt-get update && sudo apt-get install -y apt-transport-https ca-certificates + sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys CE7709D068DB5E88 || sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys CE7709D068DB5E88 + sudo echo "deb https://repo.sovrin.org/deb bionic master" >> /etc/apt/sources.list + sudo echo "deb https://repo.sovrin.org/deb bionic stable" >> /etc/apt/sources.list + sudo echo "deb http://security.ubuntu.com/ubuntu bionic-security main" >> /etc/apt/sources.list + + sudo apt-get update && sudo apt-get install -y \ + git \ + wget \ + unzip \ + python3-pip \ + python3-venv \ + libsodium23 \ + iptables \ + at \ + supervisor \ + python3-nacl \ + rocksdb-tools \ + librocksdb5.17 \ + librocksdb-dev \ + libsnappy-dev \ + liblz4-dev \ + libbz2-dev \ + libssl1.0.0 \ + libindy \ + ursa + + git clone https://github.com/hyperledger/indy-node.git + git clone https://github.com/hyperledger/indy-plenum.git + # in both indy-node and indy-plenum checkout origin/ubuntu-20.04-upgrade + sudo cp /usr/lib/ursa/libursa.* /usr/lib/ + # Should be done in python env + pip install -U \ + Pygments==2.2.0 \ + Pympler==0.8 \ + apipkg==1.5 \ + attrs==20.3.0 \ + base58==2.1.0 \ + distro==1.5.0 \ + execnet==1.8.0 \ + flake8==3.8.4 \ + indy-plenum==1.13.0.dev1032 \ + indy-node==1.13.0.dev1222 \ + iniconfig==1.1.1 \ + intervaltree==2.1.0 \ + ioflo==2.0.2 \ + jsonpickle==2.0.0 \ + leveldb==0.201 \ + libnacl==1.7.2 \ + mccabe==0.6.1 \ + msgpack-python==0.5.6 \ + orderedset==2.0.3 \ + packaging==20.9 \ + pip==9.0.3 \ + pluggy==0.13.1 \ + portalocker==2.2.1 \ + prompt-toolkit==3.0.16 \ + psutil==5.6.6 \ + py==1.10.0 \ + pycodestyle==2.6.0 \ + pyflakes==2.2.0 \ + pyparsing==2.4.7 \ + pytest==6.2.2 \ + pytest-asyncio==0.14.0 \ + pytest-forked==1.3.0 \ + pytest-runner==5.3.0 \ + pytest-xdist==2.2.1 \ + python-dateutil==2.6.1 \ + python-rocksdb==0.7.0 \ + python-ursa==0.1.1 \ + python3-indy==1.13.0 \ + pyzmq==18.1.0 \ + rlp==0.6.0 \ + semver==2.13.0 \ + setuptools==53.0.0 \ + sha3==0.2.1 \ + six==1.15.0 \ + sortedcontainers==1.5.7 \ + timeout-decorator==0.5.0 \ + toml==0.10.2 \ + ujson==1.33 \ + wcwidth==0.2.5 \ + wheel==0.34.2 \ + zipp==1.2.0 + +##IDE Setup + Pycharm: + # Open indy-node + # Open indy-plenum - Link + # Create virtual env in project structure - python interpreter + # Create virtual env in project structure - python interpreter + # All pip3 commands mentioned above must be done in env + +## Base Dependencies Needed to test +### Library Dependencies: + libindy 1.15.0-bionic + libindy-crypto 0.4.5 + ursa 0.3.2-2 + + + diff --git a/indy_common/auth.py b/indy_common/auth.py index 176f0eaed..dbaf9ab9a 100644 --- a/indy_common/auth.py +++ b/indy_common/auth.py @@ -1,5 +1,5 @@ from indy_common.config_util import getConfig -from plenum.common.constants import TRUSTEE, STEWARD, NODE +from plenum.common.constants import TRUSTEE, STEWARD, NODE, LEDGERS_FREEZE from stp_core.common.log import getlogger from indy_common.constants import OWNER, POOL_UPGRADE, ENDORSER, NYM, \ @@ -62,6 +62,8 @@ def generate_auth_map(valid_roles): {TRUSTEE: []}, '{}___'.format(VALIDATOR_INFO): {TRUSTEE: [], STEWARD: []}, + '{}___'.format(LEDGERS_FREEZE): + {TRUSTEE: []}, } return auth_map diff --git a/indy_common/authorize/auth_map.py b/indy_common/authorize/auth_map.py index 0534acb08..9bd47c79f 100644 --- a/indy_common/authorize/auth_map.py +++ b/indy_common/authorize/auth_map.py @@ -9,7 +9,7 @@ REVOC_REG_DEF, ATTRIB, AUTH_RULES, JSON_LD_CONTEXT, RICH_SCHEMA, RICH_SCHEMA_MAPPING, \ RICH_SCHEMA_ENCODING, RICH_SCHEMA_CRED_DEF, RICH_SCHEMA_PRES_DEF from plenum.common.constants import TRUSTEE, STEWARD, VERKEY, TXN_AUTHOR_AGREEMENT, TXN_AUTHOR_AGREEMENT_AML, \ - TXN_AUTHOR_AGREEMENT_DISABLE + TXN_AUTHOR_AGREEMENT_DISABLE, LEDGERS_FREEZE edit_role_actions = {} # type: Dict[str, Dict[str, AuthActionEdit]] for role_from in accepted_roles: @@ -236,6 +236,11 @@ old_value='*', new_value='*') +edit_frozen_ledgers = AuthActionEdit(txn_type=LEDGERS_FREEZE, + field='*', + old_value='*', + new_value='*') + # Anyone constraint anyone_constraint = AuthConstraint(role='*', sig_count=1) @@ -254,6 +259,9 @@ # One Trustee constraint one_trustee_constraint = AuthConstraint(TRUSTEE, 1) +# Three Trustee constraint +three_trustee_constraint = AuthConstraint(TRUSTEE, 3) + # Steward or Trustee constraint steward_or_trustee_constraint = AuthConstraintOr([AuthConstraint(STEWARD, 1), AuthConstraint(TRUSTEE, 1)]) @@ -324,6 +332,7 @@ (add_revoc_reg_entry.get_action_id(), endorser_or_steward_or_trustee_owner_constraint), (edit_revoc_reg_def.get_action_id(), owner_constraint), (edit_revoc_reg_entry.get_action_id(), owner_constraint), + (edit_frozen_ledgers.get_action_id(), three_trustee_constraint), ]) # Edit Trustee: diff --git a/indy_common/test/test_strict_types.py b/indy_common/test/test_strict_types.py index 0044a0303..9ada510d7 100644 --- a/indy_common/test/test_strict_types.py +++ b/indy_common/test/test_strict_types.py @@ -12,14 +12,6 @@ def takesStr(s: str) -> int: pass -@strict_types() -def takesUnion(s: typing.Union[str, None]) -> int: - try: - return int(s) - except ValueError: - pass - - def testInvalidArgumentType(): with pytest.raises(TypeError): takesStr(1) @@ -34,10 +26,6 @@ def testValidInputAndReturn(): takesStr('1') -def testWorksWithComplexTypes(): - takesUnion('1') - - @decClassMethods(strict_types()) class TestClass: diff --git a/indy_common/types.py b/indy_common/types.py index f76e56b1a..b896fae67 100644 --- a/indy_common/types.py +++ b/indy_common/types.py @@ -496,7 +496,7 @@ class ClientOperationField(PClientOperationField): RICH_SCHEMA_CRED_DEF: ClientRichSchemaCredDefOperation(), RICH_SCHEMA_PRES_DEF: ClientRichSchemaPresDefOperation(), GET_RICH_SCHEMA_OBJECT_BY_ID: ClientGetRichSchemaObjectByIdOperation(), - GET_RICH_SCHEMA_OBJECT_BY_METADATA: ClientGetRichSchemaObjectByMetadataOperation(), + GET_RICH_SCHEMA_OBJECT_BY_METADATA: ClientGetRichSchemaObjectByMetadataOperation() } # TODO: it is a workaround because INDY-338, `operations` must be a class diff --git a/indy_node/server/node_bootstrap.py b/indy_node/server/node_bootstrap.py index bd5d2232c..89d7bca75 100644 --- a/indy_node/server/node_bootstrap.py +++ b/indy_node/server/node_bootstrap.py @@ -12,6 +12,7 @@ from indy_node.server.request_handlers.config_req_handlers.auth_rule.auth_rule_handler import AuthRuleHandler from indy_node.server.request_handlers.config_req_handlers.auth_rule.auth_rule_handler_1_9_1 import AuthRuleHandler191 from indy_node.server.request_handlers.config_req_handlers.auth_rule.auth_rules_handler import AuthRulesHandler +from indy_node.server.request_handlers.config_req_handlers.ledgers_freeze_handler import LedgersFreezeHandler from indy_node.server.request_handlers.config_req_handlers.node_upgrade_handler import NodeUpgradeHandler from indy_node.server.request_handlers.config_req_handlers.pool_config_handler import PoolConfigHandler from indy_node.server.request_handlers.config_req_handlers.pool_upgrade_handler import PoolUpgradeHandler @@ -45,6 +46,7 @@ from indy_node.server.request_handlers.read_req_handlers.get_attribute_handler import GetAttributeHandler from indy_node.server.request_handlers.read_req_handlers.get_auth_rule_handler import GetAuthRuleHandler from indy_node.server.request_handlers.read_req_handlers.get_claim_def_handler import GetClaimDefHandler +from plenum.server.request_handlers.ledgers_freeze.get_frozen_ledgers_handler import GetFrozenLedgersHandler from indy_node.server.request_handlers.read_req_handlers.get_nym_handler import GetNymHandler from indy_node.server.request_handlers.read_req_handlers.get_revoc_reg_def_handler import GetRevocRegDefHandler from indy_node.server.request_handlers.read_req_handlers.get_revoc_reg_delta_handler import GetRevocRegDeltaHandler @@ -197,6 +199,9 @@ def _register_config_req_handlers(self): get_taa_aml_handler = GetTxnAuthorAgreementAmlHandler(database_manager=self.node.db_manager) get_taa_handler = GetTxnAuthorAgreementHandler(database_manager=self.node.db_manager) node_upgrade_handler = NodeUpgradeHandler(database_manager=self.node.db_manager) + ledgers_freeze_handler = LedgersFreezeHandler(database_manager=self.node.db_manager, + write_req_validator=self.node.write_req_validator) + get_frozen_ledgers_handler = GetFrozenLedgersHandler(database_manager=self.node.db_manager) # Register write handlers self.node.write_manager.register_req_handler(auth_rule_handler) self.node.write_manager.register_req_handler(auth_rules_handler) @@ -206,10 +211,12 @@ def _register_config_req_handlers(self): self.node.write_manager.register_req_handler(taa_handler) self.node.write_manager.register_req_handler(taa_disable_handler) self.node.write_manager.register_req_handler(node_upgrade_handler) + self.node.write_manager.register_req_handler(ledgers_freeze_handler) # Register read handlers self.node.read_manager.register_req_handler(get_auth_rule_handler) self.node.read_manager.register_req_handler(get_taa_aml_handler) self.node.read_manager.register_req_handler(get_taa_handler) + self.node.read_manager.register_req_handler(get_frozen_ledgers_handler) # Register write handlers for a version self.node.write_manager.register_req_handler_with_version(auth_rule_handler_1_9_1, version="1.9.1") diff --git a/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rule_handler.py b/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rule_handler.py index 376c7e7e8..0bda7a0c7 100644 --- a/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rule_handler.py +++ b/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rule_handler.py @@ -21,7 +21,7 @@ def static_validation(self, request: Request): self._validate_request_type(request) self._static_validation_for_rule(operation, identifier, req_id) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) self.write_req_validator.validate(request, [AuthActionEdit(txn_type=AUTH_RULE, diff --git a/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rules_handler.py b/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rules_handler.py index d739c4736..807b035a0 100644 --- a/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rules_handler.py +++ b/indy_node/server/request_handlers/config_req_handlers/auth_rule/auth_rules_handler.py @@ -22,7 +22,7 @@ def static_validation(self, request: Request): for rule in operation.get(RULES): self._static_validation_for_rule(rule, identifier, req_id) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) self.write_req_validator.validate(request, [AuthActionEdit(txn_type=AUTH_RULES, diff --git a/indy_node/server/request_handlers/config_req_handlers/ledgers_freeze_handler.py b/indy_node/server/request_handlers/config_req_handlers/ledgers_freeze_handler.py new file mode 100644 index 000000000..b189bb637 --- /dev/null +++ b/indy_node/server/request_handlers/config_req_handlers/ledgers_freeze_handler.py @@ -0,0 +1,23 @@ +from typing import Optional + +from indy_common.authorize.auth_actions import AuthActionEdit +from indy_common.authorize.auth_request_validator import WriteRequestValidator +from plenum.common.constants import LEDGERS_FREEZE +from plenum.common.request import Request +from plenum.server.database_manager import DatabaseManager +from plenum.server.request_handlers.ledgers_freeze.ledgers_freeze_handler import LedgersFreezeHandler as PLedgersFreezeHandler + + +class LedgersFreezeHandler(PLedgersFreezeHandler): + + def __init__(self, database_manager: DatabaseManager, + write_req_validator: WriteRequestValidator): + super().__init__(database_manager) + self.write_req_validator = write_req_validator + + def authorize(self, request): + self.write_req_validator.validate(request, + [AuthActionEdit(txn_type=LEDGERS_FREEZE, + field='*', + old_value='*', + new_value='*')]) diff --git a/indy_node/server/request_handlers/config_req_handlers/node_upgrade_handler.py b/indy_node/server/request_handlers/config_req_handlers/node_upgrade_handler.py index f0cb00241..28b96d8ea 100644 --- a/indy_node/server/request_handlers/config_req_handlers/node_upgrade_handler.py +++ b/indy_node/server/request_handlers/config_req_handlers/node_upgrade_handler.py @@ -16,7 +16,7 @@ def __init__(self, database_manager: DatabaseManager): def update_state(self, txn, prev_result, request, is_committed=False): pass - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): pass def static_validation(self, request: Request): diff --git a/indy_node/server/request_handlers/config_req_handlers/pool_config_handler.py b/indy_node/server/request_handlers/config_req_handlers/pool_config_handler.py index 2e046fee0..d65adbb96 100644 --- a/indy_node/server/request_handlers/config_req_handlers/pool_config_handler.py +++ b/indy_node/server/request_handlers/config_req_handlers/pool_config_handler.py @@ -22,7 +22,7 @@ def __init__(self, database_manager: DatabaseManager, def static_validation(self, request: Request): self._validate_request_type(request) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) action = '*' status = '*' diff --git a/indy_node/server/request_handlers/config_req_handlers/pool_upgrade_handler.py b/indy_node/server/request_handlers/config_req_handlers/pool_upgrade_handler.py index 5d31c74b4..f68bb2a69 100644 --- a/indy_node/server/request_handlers/config_req_handlers/pool_upgrade_handler.py +++ b/indy_node/server/request_handlers/config_req_handlers/pool_upgrade_handler.py @@ -48,7 +48,7 @@ def static_validation(self, request: Request): "{} not a valid schedule since {}". format(schedule, msg)) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) identifier, req_id, operation = get_request_data(request) status = '*' diff --git a/indy_node/server/request_handlers/domain_req_handlers/attribute_handler.py b/indy_node/server/request_handlers/domain_req_handlers/attribute_handler.py index 1afe389f8..b1d7147db 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/attribute_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/attribute_handler.py @@ -55,7 +55,7 @@ def static_validation(self, request: Request): 'Attribute field must be dict while adding it as a row field'. format(TARGET_NYM)) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) identifier, req_id, operation = get_request_data(request) diff --git a/indy_node/server/request_handlers/domain_req_handlers/claim_def_handler.py b/indy_node/server/request_handlers/domain_req_handlers/claim_def_handler.py index 5e1b1855e..b8b8528e6 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/claim_def_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/claim_def_handler.py @@ -29,7 +29,7 @@ def __init__(self, database_manager: DatabaseManager, def static_validation(self, request: Request): pass - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): # we can not add a Claim Def with existent ISSUER_DID # sine a Claim Def needs to be identified by seqNo self._validate_request_type(request) diff --git a/indy_node/server/request_handlers/domain_req_handlers/idr_cache_nym_handler.py b/indy_node/server/request_handlers/domain_req_handlers/idr_cache_nym_handler.py index b796b45ba..988f195ca 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/idr_cache_nym_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/idr_cache_nym_handler.py @@ -36,7 +36,7 @@ def update_state(self, txn, prev_result, request, is_committed=False): def static_validation(self, request): pass - def dynamic_validation(self, request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request, req_pp_time: Optional[int]): pass def gen_state_key(self, txn): diff --git a/indy_node/server/request_handlers/domain_req_handlers/nym_handler.py b/indy_node/server/request_handlers/domain_req_handlers/nym_handler.py index 3ea822cbd..5107ee588 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/nym_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/nym_handler.py @@ -42,7 +42,7 @@ def static_validation(self, request: Request): "{} not a valid role". format(role)) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) operation = request.operation diff --git a/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_def_handler.py b/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_def_handler.py index f387cfe5f..2a17f5de8 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_def_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_def_handler.py @@ -40,7 +40,7 @@ def static_validation(self, request: Request): "Expected: 'did:marker:signature_type:schema_ref' or " "'did:marker:signature_type:schema_ref:tag'".format(CRED_DEF_ID)) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) operation = request.operation cred_def_id = operation.get(CRED_DEF_ID) diff --git a/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_entry_handler.py b/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_entry_handler.py index 05ac52613..61b7b3951 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_entry_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/revoc_reg_entry_handler.py @@ -28,7 +28,7 @@ def __init__(self, database_manager: DatabaseManager, def static_validation(self, request: Request): pass - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) rev_reg_tags = request.operation[REVOC_REG_DEF_ID] author_did, req_id, operation = get_request_data(request) diff --git a/indy_node/server/request_handlers/domain_req_handlers/rich_schema/abstract_rich_schema_object_handler.py b/indy_node/server/request_handlers/domain_req_handlers/rich_schema/abstract_rich_schema_object_handler.py index 0c3530fb4..b6b171ebc 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/rich_schema/abstract_rich_schema_object_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/rich_schema/abstract_rich_schema_object_handler.py @@ -72,7 +72,7 @@ def do_static_validation_json_ld(self, content_as_dict, request): raise InvalidClientRequest(request.identifier, request.reqId, "content's @id must be equal to id={}".format(request.operation[RS_ID])) - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): self._validate_request_type(request) rs_id = request.operation[RS_ID] diff --git a/indy_node/server/request_handlers/domain_req_handlers/schema_handler.py b/indy_node/server/request_handlers/domain_req_handlers/schema_handler.py index fe34d2039..4702bc2dd 100644 --- a/indy_node/server/request_handlers/domain_req_handlers/schema_handler.py +++ b/indy_node/server/request_handlers/domain_req_handlers/schema_handler.py @@ -27,7 +27,7 @@ def __init__(self, database_manager: DatabaseManager, def static_validation(self, request: Request): pass - def dynamic_validation(self, request: Request, req_pp_time: Optional[int]): + def additional_dynamic_validation(self, request: Request, req_pp_time: Optional[int]): # we can not add a Schema with already existent NAME and VERSION # sine a Schema needs to be identified by seqNo self._validate_request_type(request) diff --git a/indy_node/server/request_handlers/read_req_handlers/get_revoc_reg_delta_handler.py b/indy_node/server/request_handlers/read_req_handlers/get_revoc_reg_delta_handler.py index b4c84e47c..cdb16e7a8 100644 --- a/indy_node/server/request_handlers/read_req_handlers/get_revoc_reg_delta_handler.py +++ b/indy_node/server/request_handlers/read_req_handlers/get_revoc_reg_delta_handler.py @@ -1,4 +1,4 @@ -from collections import Callable +from collections.abc import Callable from indy_common.constants import FROM, TO, REVOC_REG_DEF_ID, ISSUANCE_TYPE, REVOKED, ISSUED, VALUE, REVOC_TYPE, \ ACCUM_TO, STATE_PROOF_FROM, ACCUM_FROM, GET_REVOC_REG_DELTA diff --git a/indy_node/test/auth_rule/auth_framework/edit_frozen_ledgers.py b/indy_node/test/auth_rule/auth_framework/edit_frozen_ledgers.py new file mode 100644 index 000000000..8dd0d3801 --- /dev/null +++ b/indy_node/test/auth_rule/auth_framework/edit_frozen_ledgers.py @@ -0,0 +1,79 @@ +import pytest +from plenum.common.constants import STEWARD, TRUSTEE_STRING, LEDGERS_FREEZE + +from indy_node.server.request_handlers.action_req_handlers.pool_restart_handler import PoolRestartHandler + +from indy_common.authorize.auth_actions import EDIT_PREFIX +from indy_common.authorize.auth_constraints import AuthConstraint +from indy_node.test.auth_rule.auth_framework.basic import AuthTest +from plenum.common.exceptions import RequestRejectedException +from plenum.test.freeze_ledgers.helper import sdk_send_freeze_ledgers +from plenum.test.pool_transactions.helper import sdk_add_new_nym + +from indy_node.test.helper import build_auth_rule_request_json, sdk_send_and_check_req_json + + +class EditFrozenLedgersTest(AuthTest): + def __init__(self, env, action_id): + super().__init__(env, action_id) + self.trustee_wallets = [self.trustee_wallet] + + def prepare(self): + for i in range(3): + wallet = sdk_add_new_nym(self.looper, + self.sdk_pool_handle, + self.trustee_wallet, + alias='trustee{}'.format(i), + role=TRUSTEE_STRING) + self.trustee_wallets.append(wallet) + self.default_auth_rule = self.get_default_auth_rule() + self.changed_auth_rule = self.get_changed_auth_rule() + for n in self.env.txnPoolNodeSet: + for h in n.action_manager.request_handlers.values(): + if isinstance(h, PoolRestartHandler): + h.restarter.handleRestartRequest = lambda *args, **kwargs: True + + def run(self): + frozen_ledgers_ids = [] + + # Step 1. Check default auth rule + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, self.trustee_wallets, frozen_ledgers_ids) + with pytest.raises(RequestRejectedException): + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, [self.new_default_wallet], frozen_ledgers_ids) + + # Step 2. Change auth rule + self.send_and_check(self.changed_auth_rule, wallet=self.trustee_wallet) + + # Step 3. Check, that we cannot send txn the old way + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, [self.new_default_wallet], frozen_ledgers_ids) + with pytest.raises(RequestRejectedException): + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, self.trustee_wallets, frozen_ledgers_ids) + + # Step 4. Check, that we can send restart action in changed way + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, [self.new_default_wallet], frozen_ledgers_ids) + + # Step 5. Return default auth rule + self.send_and_check(self.default_auth_rule, self.trustee_wallet) + + # Step 6. Check, that default auth rule works + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, self.trustee_wallets, frozen_ledgers_ids) + with pytest.raises(RequestRejectedException): + sdk_send_freeze_ledgers(self.looper, self.sdk_pool_handle, [self.new_default_wallet], frozen_ledgers_ids) + + def result(self): + pass + + def get_changed_auth_rule(self): + self.new_default_wallet = sdk_add_new_nym(self.looper, self.sdk_pool_handle, self.trustee_wallet, role=STEWARD) + constraint = AuthConstraint(role=STEWARD, + sig_count=1, + need_to_be_owner=False) + return build_auth_rule_request_json( + self.looper, self.trustee_wallet[1], + auth_action=EDIT_PREFIX, + auth_type=LEDGERS_FREEZE, + field='*', + old_value='*', + new_value='*', + constraint=constraint.as_dict + ) diff --git a/indy_node/test/auth_rule/auth_framework/test_auth_rule_using.py b/indy_node/test/auth_rule/auth_framework/test_auth_rule_using.py index 2f7341461..8744fca1b 100644 --- a/indy_node/test/auth_rule/auth_framework/test_auth_rule_using.py +++ b/indy_node/test/auth_rule/auth_framework/test_auth_rule_using.py @@ -4,6 +4,7 @@ from collections import OrderedDict from indy_node.test.auth_rule.auth_framework.disable_taa import TAADisableTest +from indy_node.test.auth_rule.auth_framework.edit_frozen_ledgers import EditFrozenLedgersTest from plenum.common.constants import STEWARD, TRUSTEE, IDENTITY_OWNER from indy_common.constants import ( @@ -123,6 +124,7 @@ class TestAuthRuleUsing(): auth_map.change_client_port.get_action_id(): EditNodeClientPortTest, auth_map.change_bls_key.get_action_id(): EditNodeBlsTest, auth_map.disable_txn_author_agreement.get_action_id(): TAADisableTest, + auth_map.edit_frozen_ledgers.get_action_id(): EditFrozenLedgersTest, }) # TODO a workaround until sdk aceepts empty TAA to make possible its deactivation diff --git a/indy_node/test/auth_rule/test_auth_txn_with_deprecated_key.py b/indy_node/test/auth_rule/test_auth_txn_with_deprecated_key.py new file mode 100644 index 000000000..d2c270b29 --- /dev/null +++ b/indy_node/test/auth_rule/test_auth_txn_with_deprecated_key.py @@ -0,0 +1,150 @@ +import shutil +from contextlib import contextmanager + +import pytest + +from indy_common.config_helper import NodeConfigHelper +from indy_node.test.helper import TestNode +from plenum.test.node_catchup.helper import ensure_all_nodes_have_same_data +from plenum.test.test_node import ensureElectionsDone, ensure_node_disconnected, checkNodesConnected +from indy_node.test.auth_rule.helper import sdk_send_and_check_auth_rule_request, sdk_send_and_check_get_auth_rule_request +from indy_common.authorize.auth_actions import ADD_PREFIX, AuthActionAdd +from indy_common.authorize.auth_constraints import AuthConstraint, ROLE +from indy_common.constants import CONSTRAINT, AUTH_TYPE, CONFIG_LEDGER_ID, NYM +from indy_common.authorize.auth_map import one_trustee_constraint +from plenum.common.constants import STEWARD, DATA +from plenum.common.exceptions import RequestNackedException + + +@contextmanager +def extend_auth_map(nodes, key, constraint): + """ + Context manager to add a new auth rule to the auth map and remove it on exit. + + :param nodes: nodes list which auth maps should be changed + :param key: str gotten from AuthActionAdd(...).get_action_id() + :param constraint: AuthConstraint + """ + for node in nodes: + node.write_req_validator.auth_map[key] = constraint + yield + for node in nodes: + node.write_req_validator.auth_map.pop(key, None) + + +def test_auth_txn_with_deprecated_key(tconf, tdir, allPluginsPath, + txnPoolNodeSet, + looper, + sdk_wallet_trustee, + sdk_pool_handle): + """ + Add to the auth_map a fake rule + Send AUTH_RULE txn to change this fake rule (and set the fake key to the config state) + Send GET_AUTH_RULE txn and check that the fake rule was changed + Remove the fake auth rule from the map + Check that we can't get the fake auth rule + Restart the last node with its state regeneration + Check that nodes data is equal after changing the existing auth rule (restarted node regenerate config state) + """ + + fake_txn_type = "100002" + fake_key = AuthActionAdd(txn_type=fake_txn_type, + field="*", + value="*").get_action_id() + fake_constraint = one_trustee_constraint + new_auth_constraint = AuthConstraint(role=STEWARD, sig_count=1, need_to_be_owner=False).as_dict + + # Add to the auth_map a fake rule + with extend_auth_map(txnPoolNodeSet, + fake_key, + fake_constraint): + # Send AUTH_RULE txn to change this fake rule (and set the fake key to the config state) + sdk_send_and_check_auth_rule_request(looper, + sdk_pool_handle, + sdk_wallet_trustee, + auth_action=ADD_PREFIX, + auth_type=fake_txn_type, + field='*', + new_value='*', + constraint=new_auth_constraint) + # Send GET_AUTH_RULE txn and check that the fake rule was changed + result = sdk_send_and_check_get_auth_rule_request( + looper, + sdk_pool_handle, + sdk_wallet_trustee, + auth_type=fake_txn_type, + auth_action=ADD_PREFIX, + field="*", + new_value="*" + )[0][1]["result"][DATA][0] + assert result[AUTH_TYPE] == fake_txn_type + assert result[CONSTRAINT] == new_auth_constraint + + # Remove the fake auth rule from the map + # Check that we can't get the fake auth rule + with pytest.raises(RequestNackedException, match="not found in authorization map"): + sdk_send_and_check_auth_rule_request(looper, + sdk_pool_handle, + sdk_wallet_trustee, + auth_action=ADD_PREFIX, + auth_type=fake_txn_type, + field='*', + new_value='*', + constraint=AuthConstraint(role=STEWARD, sig_count=2, + need_to_be_owner=False).as_dict) + + resp = sdk_send_and_check_get_auth_rule_request(looper, + sdk_pool_handle, + sdk_wallet_trustee) + + assert all(rule[AUTH_TYPE] != fake_txn_type for rule in resp[0][1]["result"][DATA]) + + with pytest.raises(RequestNackedException, match="not found in authorization map"): + sdk_send_and_check_get_auth_rule_request( + looper, + sdk_pool_handle, + sdk_wallet_trustee, + auth_type=fake_txn_type, + auth_action=ADD_PREFIX, + field="*", + new_value="*" + ) + # Restart the last node with its state regeneration + ensure_all_nodes_have_same_data(looper, txnPoolNodeSet) + + node_to_stop = txnPoolNodeSet[-1] + node_state = node_to_stop.states[CONFIG_LEDGER_ID] + assert not node_state.isEmpty + state_db_path = node_state._kv.db_path + node_to_stop.cleanupOnStopping = False + node_to_stop.stop() + looper.removeProdable(node_to_stop) + ensure_node_disconnected(looper, node_to_stop, txnPoolNodeSet[:-1]) + + shutil.rmtree(state_db_path) + + config_helper = NodeConfigHelper(node_to_stop.name, tconf, chroot=tdir) + restarted_node = TestNode( + node_to_stop.name, + config_helper=config_helper, + config=tconf, + pluginPaths=allPluginsPath, + ha=node_to_stop.nodestack.ha, + cliha=node_to_stop.clientstack.ha) + looper.add(restarted_node) + txnPoolNodeSet[-1] = restarted_node + + # Check that nodes data is equal (restarted node regenerate config state) + looper.run(checkNodesConnected(txnPoolNodeSet)) + ensureElectionsDone(looper, txnPoolNodeSet, customTimeout=30) + sdk_send_and_check_auth_rule_request(looper, + sdk_pool_handle, + sdk_wallet_trustee, + auth_action=ADD_PREFIX, + auth_type=NYM, + field=ROLE, + new_value=STEWARD, + constraint=AuthConstraint(role=STEWARD, sig_count=2, + need_to_be_owner=False).as_dict) + ensure_all_nodes_have_same_data(looper, txnPoolNodeSet, custom_timeout=20) + diff --git a/indy_node/test/auth_rule/test_catching_up_auth_rule_txn.py b/indy_node/test/auth_rule/test_catching_up_auth_rule_txn.py index 0b2e09f75..45da913c9 100644 --- a/indy_node/test/auth_rule/test_catching_up_auth_rule_txn.py +++ b/indy_node/test/auth_rule/test_catching_up_auth_rule_txn.py @@ -43,12 +43,16 @@ def test_catching_up_auth_rule_txn(looper, auth_type=action.txn_type, field=action.field, new_value=action.value, old_value=None, constraint=changed_constraint.as_dict) + sdk_add_new_nym(looper, + sdk_pool_handle, + sdk_wallet_trustee, + 'newSteward2') delayed_node.start_catchup() looper.run(eventually(lambda: assertExp(delayed_node.mode == Mode.participating))) sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward, - 'newSteward2', + 'newSteward3', STEWARD_STRING, dest=new_steward_did, verkey=new_steward_verkey) ensure_all_nodes_have_same_data(looper, txnPoolNodeSet) diff --git a/indy_node/test/conftest.py b/indy_node/test/conftest.py index bcdc9008c..5a36bf404 100644 --- a/indy_node/test/conftest.py +++ b/indy_node/test/conftest.py @@ -97,7 +97,21 @@ def _(): return _ -@pytest.fixture(scope='module') +@pytest.fixture(scope='module', name="sdk_node_theta_added") +def sdk_node_theta_added_fixture(looper, + txnPoolNodeSet, + tdir, + tconf, + sdk_pool_handle, + sdk_wallet_trustee, + allPluginsPath, + node_config_helper_class, + testNodeClass, + name=None, + services=[VALIDATOR]): + return sdk_node_theta_added(looper, txnPoolNodeSet, tdir, tconf, sdk_pool_handle, sdk_wallet_trustee, allPluginsPath, node_config_helper_class, testNodeClass, name) + + def sdk_node_theta_added(looper, txnPoolNodeSet, tdir, @@ -244,21 +258,21 @@ def nodeIds(nodeSet): @pytest.fixture(scope="module") def pool_ledger(tconf, tmpdir_factory): - tdir = tmpdir_factory.mktemp('').strpath + tdir = tmpdir_factory.mktemp('tmp').strpath return Ledger(CompactMerkleTree(), dataDir=tdir) @pytest.fixture(scope="module") def domain_ledger(tconf, tmpdir_factory): - tdir = tmpdir_factory.mktemp('').strpath + tdir = tmpdir_factory.mktemp('tmp').strpath return Ledger(CompactMerkleTree(), dataDir=tdir) @pytest.fixture(scope="module") def config_ledger(tconf, tmpdir_factory): - tdir = tmpdir_factory.mktemp('').strpath + tdir = tmpdir_factory.mktemp('tmp').strpath return Ledger(CompactMerkleTree(), dataDir=tdir) diff --git a/indy_node/test/freeze_ledgers/__init__.py b/indy_node/test/freeze_ledgers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/indy_node/test/freeze_ledgers/test_send_ledgers_freeze.py b/indy_node/test/freeze_ledgers/test_send_ledgers_freeze.py new file mode 100644 index 000000000..2f692fef7 --- /dev/null +++ b/indy_node/test/freeze_ledgers/test_send_ledgers_freeze.py @@ -0,0 +1,39 @@ +import pytest +from plenum.common.constants import DATA +from plenum.common.exceptions import RequestRejectedException +from plenum.test.freeze_ledgers.helper import sdk_get_frozen_ledgers, sdk_send_freeze_ledgers +from plenum.test.helper import freshness + +FRESHNESS_TIMEOUT = 5 + + +@pytest.fixture(scope="module") +def tconf(tconf): + with freshness(tconf, enabled=True, timeout=FRESHNESS_TIMEOUT): + yield tconf + + +def test_send_freeze_ledgers(looper, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_trustee_list): + with pytest.raises(RequestRejectedException, match="Not enough TRUSTEE signatures"): + sdk_send_freeze_ledgers( + looper, sdk_pool_handle, + sdk_wallet_trustee_list[:-1], + [] + ) + + # check that the config state doesn't contain frozen ledgers records + result = sdk_get_frozen_ledgers(looper, sdk_pool_handle, + sdk_wallet_trustee_list[0])[1]["result"][DATA] + assert result is None + + # add to the config state a frozen ledgers record with an empty list + sdk_send_freeze_ledgers( + looper, sdk_pool_handle, + sdk_wallet_trustee_list, + [] + ) + + # check that the config state contains a frozen ledgers record with an empty list + result = sdk_get_frozen_ledgers(looper, sdk_pool_handle, + sdk_wallet_trustee_list[0])[1]["result"][DATA] + assert len(result) == 0 diff --git a/indy_node/test/node_control_utils/test_node_control_util.py b/indy_node/test/node_control_utils/test_node_control_util.py index 0ad8344fb..f104d3155 100644 --- a/indy_node/test/node_control_utils/test_node_control_util.py +++ b/indy_node/test/node_control_utils/test_node_control_util.py @@ -127,7 +127,7 @@ def test_get_latest_pkg_version_invalid_args(): (APP_NAME, None, 'Version: 1.2.3\nVersion: 1.2.4\nVersion: 1.2.5~rc1\nVersion: 1.2.5~dev1\nVersion: 1.2.3.4.5', '1.2.5rc1'), (APP_NAME, '1.2.5', 'Version: 1.2.3\nVersion: 1.2.4\nVersion: 1.2.5~rc1\nVersion: 1.2.5~dev1\nVersion: 1.2.3.4.5', None), ], - ids=lambda s: s.replace('\n', '_').replace(' ', '_') + ids=lambda s: s.replace('\n', '_').replace(' ', '_') if s else None ) def test_get_latest_pkg_version( monkeypatch, pkg_name, upstream, output, expected): diff --git a/indy_node/test/nym_txn/test_nym_additional.py b/indy_node/test/nym_txn/test_nym_additional.py index 1c589a27b..612f84b1f 100644 --- a/indy_node/test/nym_txn/test_nym_additional.py +++ b/indy_node/test/nym_txn/test_nym_additional.py @@ -25,7 +25,7 @@ def set_verkey(looper, sdk_pool_handle, sdk_wallet_sender, dest, verkey): return wh, new_did -@pytest.fixture("module") +@pytest.fixture(scope="module") def endorser_did_verkey(looper, sdk_wallet_client): wh, _ = sdk_wallet_client named_did, verkey = looper.loop.run_until_complete( diff --git a/indy_node/test/pool_restart/test_pool_restart.py b/indy_node/test/pool_restart/test_pool_restart.py index e0ef51421..876c79fb1 100644 --- a/indy_node/test/pool_restart/test_pool_restart.py +++ b/indy_node/test/pool_restart/test_pool_restart.py @@ -11,7 +11,6 @@ from indy_node.test.pool_restart.helper import _createServer, _stopServer, sdk_send_restart from plenum.common.constants import REPLY, TXN_TYPE from plenum.common.types import f -from plenum.test.testing_utils import FakeSomething def test_pool_restart( @@ -89,9 +88,9 @@ def test_pool_restart_cancel( def test_pool_restart_now_without_datetime( - sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf): + sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf, txnPoolNodeSet): pool_restart_now(sdk_pool_handle, sdk_wallet_trustee, looper, - tdir, tconf, START) + tdir, tconf, START, txnPoolNodeSet) def test_pool_restart_in_view_change(sdk_pool_handle, sdk_wallet_trustee, looper, @@ -101,11 +100,11 @@ def test_pool_restart_in_view_change(sdk_pool_handle, sdk_wallet_trustee, looper node.master_replica._consensus_data.waiting_for_new_view = True pool_restart_now(sdk_pool_handle, sdk_wallet_trustee, looper, - tdir, tconf, START) + tdir, tconf, START, txnPoolNodeSet) def pool_restart_now(sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf, - action, datetime=None): + action, txnPoolNodeSet, use_time=True): server, indicator = looper.loop.run_until_complete( _createServer( host=tconf.controlServiceHost, @@ -113,11 +112,15 @@ def pool_restart_now(sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf, ) ) + time = None + if use_time: + unow = datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) + time = str(datetime.isoformat(unow + timedelta(seconds=1000))) req_obj, resp = sdk_send_restart(looper, sdk_wallet_trustee, sdk_pool_handle, action=action, - datetime=datetime) + datetime=time) _stopServer(server) _comparison_reply(resp, req_obj) diff --git a/indy_node/test/pool_restart/test_pool_restart_now_with_empty_datetime.py b/indy_node/test/pool_restart/test_pool_restart_now_with_empty_datetime.py index ac3e22215..bf807da64 100644 --- a/indy_node/test/pool_restart/test_pool_restart_now_with_empty_datetime.py +++ b/indy_node/test/pool_restart/test_pool_restart_now_with_empty_datetime.py @@ -3,6 +3,6 @@ def test_pool_restart_now_with_empty_datetime( - sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf): + sdk_pool_handle, sdk_wallet_trustee, looper, tdir, tconf, txnPoolNodeSet): pool_restart_now(sdk_pool_handle, sdk_wallet_trustee, looper, - tdir, tconf, START, "") \ No newline at end of file + tdir, tconf, START, "", use_time=False) diff --git a/indy_node/test/suspension/test_node_suspension.py b/indy_node/test/suspension/test_node_suspension.py index 8352b345d..6c778079b 100644 --- a/indy_node/test/suspension/test_node_suspension.py +++ b/indy_node/test/suspension/test_node_suspension.py @@ -13,7 +13,7 @@ nodeCount = 7 -def testSuspendNode(looper, sdk_pool_handle, sdk_wallet_trustee, nodeSet, +def test_suspend_node(looper, sdk_pool_handle, sdk_wallet_trustee, nodeSet, tdir, tconf, allPluginsPath): """ Suspend a node and then cancel suspension. Suspend while suspended diff --git a/indy_node/test/tools/test_nsreplay.py b/indy_node/test/tools/test_nsreplay.py index d3d9d42f3..176955d63 100644 --- a/indy_node/test/tools/test_nsreplay.py +++ b/indy_node/test/tools/test_nsreplay.py @@ -78,7 +78,7 @@ def tconf(tconf): return tconf -@pytest.yield_fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=True) def warncheck(warnfilters): pass diff --git a/indy_node/test/upgrade/test_upgrader.py b/indy_node/test/upgrade/test_upgrader.py index 52344e9a6..78f5a53fb 100644 --- a/indy_node/test/upgrade/test_upgrader.py +++ b/indy_node/test/upgrade/test_upgrader.py @@ -38,7 +38,7 @@ def wrapped(*x, **y): return pkg_version(pkg_name, version) return wrapped - marker = request.node.get_marker('pkg_info') + marker = request.node.get_closest_marker('pkg_info') if marker: assert len(marker.args) > 1 monkeypatch.setattr( @@ -50,7 +50,7 @@ def wrapped(*x, **y): ) ) - marker = request.node.get_marker('latest_pkg_ver') + marker = request.node.get_closest_marker('latest_pkg_ver') if marker: assert len(marker.args) > 0 monkeypatch.setattr( diff --git a/scripts/performance/perf_load/perf_utils.py b/scripts/performance/perf_load/perf_utils.py index 9753dfdbd..c02237385 100644 --- a/scripts/performance/perf_load/perf_utils.py +++ b/scripts/performance/perf_load/perf_utils.py @@ -1,7 +1,7 @@ import json import os import argparse -from collections import Sequence +from collections.abc import Sequence from typing import Dict, List from queue import Queue, Empty diff --git a/scripts/remove_ledger.py b/scripts/remove_ledger.py new file mode 100644 index 000000000..c8d36e4cf --- /dev/null +++ b/scripts/remove_ledger.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import shutil +from sys import argv +from indy_common.config_util import getConfig +from indy_common.config_helper import ConfigHelper + + +def warn(ledger_name, directories_path): + print('The following directories will be deleted:') + + for path in directories_path: + print(str(path)) + + print('Deleting a ledger is an irrevocable operation.\nProceed only if you know the consequences.') + answer = input('Do you want to delete ledger ' + ledger_name + '?\n Press [y/N]') + + if answer.lower() == 'yes' or answer.lower() == 'y': + return True + + return False + + +def remove(ledger_name): + exceptions = ["domain", "config", "pool", "audit"] + if ledger_name not in exceptions: + directories_path = [] + + for path in Path(config_helper.ledger_data_dir).rglob(ledger_name + "_*"): + directories_path.append(path) + + if not len(directories_path): + print('Ledger doesn`t exist: ' + ledger_name) + + elif warn(ledger_name, directories_path): + for path in directories_path: + shutil.rmtree(str(path)) + print('Ledger removed successfully!') + + else: + print('Can`t delete built in ledger: ' + ledger_name) + + +if __name__ == '__main__': + config = getConfig() + config_helper = ConfigHelper(config) + script, ledger_name = argv + remove(ledger_name) diff --git a/setup.py b/setup.py index 5e8cd71e7..40b59b904 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ BASE_DIR = os.path.join(os.path.expanduser("~"), ".indy") tests_require = ['attrs==19.1.0', 'pytest==3.3.1', 'pytest-xdist==1.22.1', 'pytest-forked==0.2', - 'python3-indy==1.13.0-dev-1420', 'pytest-asyncio==0.8.0'] + 'python3-indy==1.15.0-dev-1618', 'pytest-asyncio==0.8.0'] setup( name=metadata['__title__'], @@ -54,9 +54,7 @@ data_files=[( (BASE_DIR, ['data/nssm_original.exe']) )], - install_requires=['indy-plenum==1.13.0.dev1032', - 'timeout-decorator==0.4.0', - 'distro==1.3.0'], + install_requires=['indy-plenum==1.13.0.dev1034', 'timeout-decorator>=0.5.0', 'distro>=1.5.0'], setup_requires=['pytest-runner'], extras_require={ 'tests': tests_require