diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..65216f7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,23 @@ +version: 2 +updates: + + # Docker + - package-ecosystem: docker + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 25 + + # Python + - package-ecosystem: "pip" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "monthly" + open-pull-requests-limit: 25 + + # GitHub Actions + - package-ecosystem: "github-actions" + directory: ".github/workflows" + schedule: + interval: "monthly" + open-pull-requests-limit: 25 \ No newline at end of file diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..7b4e224 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,38 @@ +name: "CodeQL" + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + push: + # run workflow when merging to main or develop + branches: + - main + - master + - develop + +jobs: + CodeQL-Build: + # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest + runs-on: ubuntu-latest + + permissions: + # required for all workflows + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + # Override language selection by uncommenting this and choosing your languages + with: + languages: python + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 \ No newline at end of file diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000..83e1f1c --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,57 @@ +name: Docker + +on: + workflow_dispatch: + push: + branches: [ "main", "master", "develop" ] + # Publish semver tags as releases. + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - '[0-9]+.[0-9]+.[0-9]+' + - '[0-9]+.[0-9]+.[0-9]+-*' + pull_request: + branches: [ "main", "master", "develop" ] + release: + types: [published] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v5 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..cd921df --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,100 @@ +name: ID Mapping Service test + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + push: + # run workflow when merging to main or develop + branches: + - main + - master + - develop + +jobs: + id_mapping_service_container_tests: + runs-on: ubuntu-latest + steps: + - name: Check out GitHub repo + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.9.19" + + - name: Install dependencies and set up test config + shell: bash + run: | + # set up python dependencies + pip install pytest requests + + # set up deploy.cfg + cp -n deploy.cfg.example deploy.cfg + sed -i "s#^mongo-host.*#mongo-host=mongo#" deploy.cfg + sed -i "s#^mongo-db.*#mongo-db=idmapping#" deploy.cfg + + - name: Run tests + shell: bash + run: | + sh container_test/run_tests.sh + + id_mapping_service_tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - python: '3.9.19' + mongo: 'mongodb-linux-x86_64-3.6.23' + wired_tiger: 'false' + - python: '3.9.19' + mongo: 'mongodb-linux-x86_64-ubuntu2204-7.0.4' + wired_tiger: 'true' + + steps: + - name: Check out GitHub repo + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{matrix.python}} + + - name: Install dependencies and set up test config + shell: bash + run: | + + # install python libraries + python -m pip install --upgrade pip + pip install pipenv + pipenv sync --system --dev + + # set up mongo + cd .. + wget -q http://fastdl.mongodb.org/linux/${{matrix.mongo}}.tgz + tar xfz ${{matrix.mongo}}.tgz + export MONGOD=`pwd`/${{matrix.mongo}}/bin/mongod + cd - + + # set up test config + cp -n test.cfg.example test.cfg + sed -i "s#^test.temp.dir =.*#test.temp.dir=temp_test_dir#" test.cfg + sed -i "s#^test.mongo.exe.*#test.mongo.exe=$MONGOD#" test.cfg + sed -i "s#^test.mongo.wired_tiger.*#test.mongo.wired_tiger=${{matrix.wired_tiger}}#" test.cfg + + - name: Run tests + shell: bash + run: | + HOMEDIR=`pwd` + PYTHONPATH=$HOMEDIR/src:$PYTHONPATH + make test + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 35ea190..0000000 --- a/.travis.yml +++ /dev/null @@ -1,44 +0,0 @@ -sudo: required -language: python -python: "3.6" - -env: - # Runs parallel jobs, 1 for each line. Tests different DB configurations for compatibility. - - MONGODB_VER=mongodb-linux-x86_64-2.6.12 WIRED_TIGER=false MAKE=test - - MONGODB_VER=mongodb-linux-x86_64-3.4.16 WIRED_TIGER=false MAKE=test-mongo - - MONGODB_VER=mongodb-linux-x86_64-3.4.16 WIRED_TIGER=true MAKE=test-mongo - - MONGODB_VER=mongodb-linux-x86_64-3.6.6 WIRED_TIGER=false MAKE=test-mongo - - MONGODB_VER=mongodb-linux-x86_64-3.6.6 WIRED_TIGER=true MAKE=test-mongo - -install: - - pip install -r requirements.txt - - pip install -r dev-requirements.txt - -script: - - - wget http://fastdl.mongodb.org/linux/$MONGODB_VER.tgz - - tar xfz $MONGODB_VER.tgz - - export MONGOD=`pwd`/$MONGODB_VER/bin/mongod - - cp -n test.cfg.example test.cfg - - sed -i "s#^test.temp.dir=.*#test.temp.dir=temp_test_dir#" test.cfg - - sed -i "s#^test.mongo.exe.*#test.mongo.exe=$MONGOD#" test.cfg - - sed -i "s#^test.mongo.wired_tiger.*#test.mongo.wired_tiger=$WIRED_TIGER#" test.cfg - - cat test.cfg - - - make $MAKE - -jobs: - include: - - stage: deploy - env: # The following are secure declarations for DOCKER_USER, DOCKER_PASS - - secure: "XjD5QORyBxRLfDe15DCz0R75b/e0ySUVOGABjmS2O4wcGYUFnz0b7LZ5P5p4cQQNVVOE3rZAp5Mce6450E4TvdiKxk/f2J9NOjpgw+P5IHgN+MGUDtfcskPAJtQ2v02YKX5eQp4/UEKBurvqQLYiQHoJR5o/HM4TCpZEwKK/Ee3fZygJYuo2Z7/aId5sJEw9Bb+AJsXL6Z+5Xu3Fzi+zex442M8z0Qk+ylI3GJp4al1e8gko6GbZHldI4/TnZ6WGVQPnBDers13HYnpacbW0n8ol4+jGY94wAWzOOpwmpCpp+JgjfBwzkVcjTBWmnRkrpZaWPIOp3NfUXX3Ovqk6qsPurDbz2ct8t2ST4sP/GbgfGpBH/aby5KfsgUU34+moYQ+JzxhT3DucW5hJONvhmDDc2adgM4qynxBRTVb9HCXKeDVzUERnGWs/GlWQwSQGCFL/nUpyho1NQJc1rE+A2d99vuegJwLGFaI0HuYf7cHv0dgx6zNKDC/B81jMiNuHOAgSeD7yqpOUxS9RzEoI4/IT2BTwP5/Mkp/4TcgIBBktyXgUhWFOugzWgFe1FzrhM7drTvnevNXPMsOIzVCJ4aGVyOTJGgmAlEK5OddAhuvZ5HpoL4zY1QWk7PZxFMBAiem6UAxPVAYKZeyMdNxem2NhsxiRYx7bS8/bFxkGoR8=" - - secure: "yDYnBJPhIsTHr5QJUMuAzA/DstBX1mEGjUKw/KFPz6MeZ8W16KBwbE8Jv9VHuxHkkoZeewztbUzRyF/XW/VIBfLaCVu6L8gm/+FXOz3RcLSc/0BbaUhAtaB0yXg+Z4m86QsK8ZmTgGAPliq2GNyw3NWYLSHxFZ+rFS/83Z3cuRoqHciot22/lbiC+OfrpufGw7jcU18XG3MWA4eAfhAFj/x0lAqd6yOOARvGhx81s9UWk5w49tFvueHaWJRph5egfEVtpk395ia/e4COzF9m6fUBfUal/NvPBTOhmmijN5U2unJ/XJOwsMtLFc73a0g5nGncSpJWpqhUR4s8teBwk+farTIylZpEnh4f9pw5oRakTb9CkR2TvD8uAbRRYwo5xgzwtDoRcS42qIOlLl9mTiqrEU6ypn0fVMrbndrou3GY56Va7AHqbiC0ru1uUEQeHTDO2uuSoXyu5KjxsfrTPD3F6lkX2E7zyAUwwE4fgXJztbWgBqJYl+JgKDsrTRaq+VSVHh6poykI8Ke6Sqzdyizs3dYqGF7FLUH7ZQqJJhxPDKc5adzily5xKx0uqbV9xhtL5LJR6Nr0HtoscNg24NR3wTsmY6TTrtR3x7Lx6QSgvIMl0FNV6e01kygYH/C9I9lXNrI7L3GdrJSwwcYiftKHLAynL+RhBa6fDU1FvJg=" - script: # Only push to dockerhub if this isn't a PR and we're updating master or develop - - make docker_image - - IMAGE_NAME=kbase/id_mapper build/push2dockerhub.sh - -after_success: - - | - if [ "${MAKE}" == "test" ]; then - coveralls - fi diff --git a/Dockerfile b/Dockerfile index 107e576..8e7b782 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM kbase/kb_jre:latest as dockerize -FROM python:3.6-alpine +FROM python:3.9.19-alpine # These ARGs values are passed in via the docker build command ARG BUILD_DATE @@ -10,13 +10,16 @@ ARG BRANCH=develop RUN apk add gcc linux-headers libc-dev make git COPY --from=dockerize /kb/deployment/bin/dockerize /usr/bin/ -ADD requirements.txt /tmp/ - -RUN pip install -r /tmp/requirements.txt +# install pipenv +RUN pip install --upgrade pip && \ + pip install pipenv +WORKDIR /kb ADD . /kb -RUN cd /kb && make +# install deps +RUN pipenv sync --system +RUN make # The BUILD_DATE value seem to bust the docker cache when the timestamp changes, move to # the end @@ -27,7 +30,6 @@ LABEL org.label-schema.build-date=$BUILD_DATE \ us.kbase.vcs-branch=$BRANCH \ maintainer="Steve Chan sychan@lbl.gov" -WORKDIR /kb/ ENV KB_DEPLOYMENT_CONFIG=/kb/deploy.cfg ENV PYTHONPATH=$PYTHONPATH:/kb/src diff --git a/Makefile b/Makefile index 587a2a3..60348b9 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ test: all flake8 src mypy src IDMAP_TEST_FILE=$(TEST_CFG) pytest --verbose src --cov src/jgikbase/idmapping - bandit --recursive src --exclude src/jgikbase/test + bandit --configfile bandit.yaml --recursive src --exclude src/jgikbase/test # Only test the MongoDB related code. test-mongo: diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..b619b66 --- /dev/null +++ b/Pipfile @@ -0,0 +1,28 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +cacheout = "==0.10.2" +flask = "==2.0.0" +gevent = "==24.2.1" +gunicorn = "==22.0.0" +pymongo = "==4.7.2" +requests = "==2.20.0" +types-requests = "==2.25.0" +werkzeug = "==2.0.3" + +[dev-packages] +bandit = "==1.7.9" +coverage = "==7.5.3" +flake8 = "==7.1.0" +mypy = "==1.10.0" +pytest-cov = "==2.5.1" +requests-mock = "==1.5.2" +semver = "==2.8.1" +sphinx = "==1.7.4" +sphinx-autodoc-typehints = "==1.3.0" + +[requires] +python_version = "3.9" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..e32c9e6 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,976 @@ +{ + "_meta": { + "hash": { + "sha256": "64e459eb467c3ae215a980b94f5d4fc418dceb8dd1343b3b2014b61b2927a541" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "cacheout": { + "hashes": [ + "sha256:0832ec705bc3c642e04a793d52c61d7a9578957b6e82f48b563013d594c2e543", + "sha256:c888c328bfc2cfa3f5dd12f88dd8885fefc05566a8bff25d2df75028cf8b5deb" + ], + "index": "pypi", + "version": "==0.10.2" + }, + "certifi": { + "hashes": [ + "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", + "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.7.4" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "dnspython": { + "hashes": [ + "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50", + "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc" + ], + "markers": "python_version >= '3.8'", + "version": "==2.6.1" + }, + "flask": { + "hashes": [ + "sha256:168e8507792cb8a3aa06afbe5d4d431d3e07c6318bc3893ceecb81aff09f848d", + "sha256:1833a4b36ace08dfa1510d86f1bb6fc595d990ec1b838e03ac8dd80ac0705954" + ], + "index": "pypi", + "markers": "python_version >= '3.6'", + "version": "==2.0.0" + }, + "gevent": { + "hashes": [ + "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5", + "sha256:117e5837bc74a1673605fb53f8bfe22feb6e5afa411f524c835b2ddf768db0de", + "sha256:141a2b24ad14f7b9576965c0c84927fc85f824a9bb19f6ec1e61e845d87c9cd8", + "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5", + "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc", + "sha256:2955eea9c44c842c626feebf4459c42ce168685aa99594e049d03bedf53c2800", + "sha256:2ae3a25ecce0a5b0cd0808ab716bfca180230112bb4bc89b46ae0061d62d4afe", + "sha256:2e9ac06f225b696cdedbb22f9e805e2dd87bf82e8fa5e17756f94e88a9d37cf7", + "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9", + "sha256:3adfb96637f44010be8abd1b5e73b5070f851b817a0b182e601202f20fa06533", + "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc", + "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056", + "sha256:44098038d5e2749b0784aabb27f1fcbb3f43edebedf64d0af0d26955611be8d6", + "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026", + "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40", + "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07", + "sha256:782a771424fe74bc7e75c228a1da671578c2ba4ddb2ca09b8f959abdf787331e", + "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be", + "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8", + "sha256:8f4b8e777d39013595a7740b4463e61b1cfe5f462f1b609b28fbc1e4c4ff01e5", + "sha256:90cbac1ec05b305a1b90ede61ef73126afdeb5a804ae04480d6da12c56378df1", + "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789", + "sha256:9202f22ef811053077d01f43cc02b4aaf4472792f9fd0f5081b0b05c926cca19", + "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5", + "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7", + "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388", + "sha256:a7ceb59986456ce851160867ce4929edaffbd2f069ae25717150199f8e1548b8", + "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98", + "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3", + "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7", + "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060", + "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d", + "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661", + "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c", + "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb", + "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f", + "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91", + "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0", + "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f", + "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836", + "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==24.2.1" + }, + "greenlet": { + "hashes": [ + "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67", + "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6", + "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257", + "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4", + "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676", + "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61", + "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc", + "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca", + "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7", + "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728", + "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305", + "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6", + "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379", + "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414", + "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04", + "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a", + "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf", + "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", + "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559", + "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e", + "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274", + "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb", + "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b", + "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9", + "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b", + "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be", + "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506", + "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405", + "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113", + "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f", + "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5", + "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230", + "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d", + "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f", + "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a", + "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e", + "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61", + "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6", + "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d", + "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71", + "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22", + "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2", + "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3", + "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067", + "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc", + "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881", + "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3", + "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e", + "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac", + "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53", + "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0", + "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b", + "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83", + "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41", + "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c", + "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf", + "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da", + "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33" + ], + "markers": "python_version < '3.11' and platform_python_implementation == 'CPython'", + "version": "==3.0.3" + }, + "gunicorn": { + "hashes": [ + "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9", + "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==22.0.0" + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "itsdangerous": { + "hashes": [ + "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", + "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173" + ], + "markers": "python_version >= '3.8'", + "version": "==2.2.0" + }, + "jinja2": { + "hashes": [ + "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", + "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.4" + }, + "markupsafe": { + "hashes": [ + "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", + "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", + "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", + "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", + "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", + "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", + "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", + "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", + "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", + "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", + "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", + "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", + "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", + "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", + "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", + "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", + "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", + "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", + "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", + "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", + "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", + "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", + "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", + "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", + "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", + "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", + "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", + "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", + "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", + "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", + "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", + "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", + "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", + "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", + "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", + "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", + "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", + "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", + "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", + "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", + "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", + "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", + "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", + "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", + "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", + "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", + "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", + "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", + "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", + "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", + "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", + "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", + "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", + "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", + "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", + "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", + "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", + "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", + "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", + "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.5" + }, + "packaging": { + "hashes": [ + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" + ], + "markers": "python_version >= '3.8'", + "version": "==24.1" + }, + "pymongo": { + "hashes": [ + "sha256:02efd1bb3397e24ef2af45923888b41a378ce00cb3a4259c5f4fc3c70497a22f", + "sha256:0d833651f1ba938bb7501f13e326b96cfbb7d98867b2d545ca6d69c7664903e0", + "sha256:12c466e02133b7f8f4ff1045c6b5916215c5f7923bc83fd6e28e290cba18f9f6", + "sha256:12d1fef77d25640cb78893d07ff7d2fac4c4461d8eec45bd3b9ad491a1115d6e", + "sha256:194065c9d445017b3c82fb85f89aa2055464a080bde604010dc8eb932a6b3c95", + "sha256:1c78f156edc59b905c80c9003e022e1a764c54fd40ac4fea05b0764f829790e2", + "sha256:1e37faf298a37ffb3e0809e77fbbb0a32b6a2d18a83c59cfc2a7b794ea1136b0", + "sha256:25eeb2c18ede63891cbd617943dd9e6b9cbccc54f276e0b2e693a0cc40f243c5", + "sha256:268d8578c0500012140c5460755ea405cbfe541ef47c81efa9d6744f0f99aeca", + "sha256:2cb77d09bd012cb4b30636e7e38d00b5f9be5eb521c364bde66490c45ee6c4b4", + "sha256:347c49cf7f0ba49ea87c1a5a1984187ecc5516b7c753f31938bf7b37462824fd", + "sha256:35b3f0c7d49724859d4df5f0445818d525824a6cd55074c42573d9b50764df67", + "sha256:37e9ea81fa59ee9274457ed7d59b6c27f6f2a5fe8e26f184ecf58ea52a019cb8", + "sha256:47a1a4832ef2f4346dcd1a10a36ade7367ad6905929ddb476459abb4fd1b98cb", + "sha256:4bdb5ffe1cd3728c9479671a067ef44dacafc3743741d4dc700c377c4231356f", + "sha256:4ffd1519edbe311df73c74ec338de7d294af535b2748191c866ea3a7c484cd15", + "sha256:5239776633f7578b81207e5646245415a5a95f6ae5ef5dff8e7c2357e6264bfc", + "sha256:5239ef7e749f1326ea7564428bf861d5250aa39d7f26d612741b1b1273227062", + "sha256:56bf8b706946952acdea0fe478f8e44f1ed101c4b87f046859e6c3abe6c0a9f4", + "sha256:65b4c00dedbd333698b83cd2095a639a6f0d7c4e2a617988f6c65fb46711f028", + "sha256:6a87eef394039765679f75c6a47455a4030870341cb76eafc349c5944408c882", + "sha256:727ad07952c155cd20045f2ce91143c7dc4fb01a5b4e8012905a89a7da554b0c", + "sha256:730778b6f0964b164c187289f906bbc84cb0524df285b7a85aa355bbec43eb21", + "sha256:743552033c63f0afdb56b9189ab04b5c1dbffd7310cf7156ab98eebcecf24621", + "sha256:7e9d9d2c0aae73aa4369bd373ac2ac59f02c46d4e56c4b6d6e250cfe85f76802", + "sha256:82102e353be13f1a6769660dd88115b1da382447672ba1c2662a0fbe3df1d861", + "sha256:827611beb6c483260d520cfa6a49662d980dfa5368a04296f65fa39e78fccea7", + "sha256:84bc00200c3cbb6c98a2bb964c9e8284b641e4a33cf10c802390552575ee21de", + "sha256:87032f818bf5052ab742812c715eff896621385c43f8f97cdd37d15b5d394e95", + "sha256:87832d6076c2c82f42870157414fd876facbb6554d2faf271ffe7f8f30ce7bed", + "sha256:87bb453ac3eb44db95cb6d5a616fbc906c1c00661eec7f55696253a6245beb8a", + "sha256:9024e1661c6e40acf468177bf90ce924d1bc681d2b244adda3ed7b2f4c4d17d7", + "sha256:9349f0bb17a31371d4cacb64b306e4ca90413a3ad1fffe73ac7cd495570d94b5", + "sha256:9385654f01a90f73827af4db90c290a1519f7d9102ba43286e187b373e9a78e9", + "sha256:9a8bd37f5dabc86efceb8d8cbff5969256523d42d08088f098753dba15f3b37a", + "sha256:9d892fb91e81cccb83f507cdb2ea0aa026ec3ced7f12a1d60f6a5bf0f20f9c1f", + "sha256:a754e366c404d19ff3f077ddeed64be31e0bb515e04f502bf11987f1baa55a16", + "sha256:b48a5650ee5320d59f6d570bd99a8d5c58ac6f297a4e9090535f6561469ac32e", + "sha256:bcf337d1b252405779d9c79978d6ca15eab3cdaa2f44c100a79221bddad97c8a", + "sha256:c44efab10d9a3db920530f7bcb26af8f408b7273d2f0214081d3891979726328", + "sha256:c72d16fede22efe7cdd1f422e8da15760e9498024040429362886f946c10fe95", + "sha256:cb6e00a79dff22c9a72212ad82021b54bdb3b85f38a85f4fc466bde581d7d17a", + "sha256:ce1a374ea0e49808e0380ffc64284c0ce0f12bd21042b4bef1af3eb7bdf49054", + "sha256:cecd2df037249d1c74f0af86fb5b766104a5012becac6ff63d85d1de53ba8b98", + "sha256:cf17ea9cea14d59b0527403dd7106362917ced7c4ec936c4ba22bd36c912c8e0", + "sha256:cf28430ec1924af1bffed37b69a812339084697fd3f3e781074a0148e6475803", + "sha256:d1bcd58669e56c08f1e72c5758868b5df169fe267501c949ee83c418e9df9155", + "sha256:d275596f840018858757561840767b39272ac96436fcb54f5cac6d245393fd97", + "sha256:d2dcf608d35644e8d276d61bf40a93339d8d66a0e5f3e3f75b2c155a421a1b71", + "sha256:d4d59776f435564159196d971aa89422ead878174aff8fe18e06d9a0bc6d648c", + "sha256:d9b6cbc037108ff1a0a867e7670d8513c37f9bcd9ee3d2464411bfabf70ca002", + "sha256:db4380d1e69fdad1044a4b8f3bb105200542c49a0dde93452d938ff9db1d6d29", + "sha256:e004527ea42a6b99a8b8d5b42b42762c3bdf80f88fbdb5c3a9d47f3808495b86", + "sha256:e6eab12c6385526d386543d6823b07187fefba028f0da216506e00f0e1855119", + "sha256:eb0642e5f0dd7e86bb358749cc278e70b911e617f519989d346f742dc9520dfb", + "sha256:f91073049c43d14e66696970dd708d319b86ee57ef9af359294eee072abaac79", + "sha256:fadc6e8db7707c861ebe25b13ad6aca19ea4d2c56bf04a26691f46c23dadf6e4", + "sha256:fc5af24fcf5fc6f7f40d65446400d45dd12bea933d0299dc9e90c5b22197f1e9", + "sha256:fcaf8c911cb29316a02356f89dbc0e0dfcc6a712ace217b6b543805690d2aefd", + "sha256:ffd4d7cb2e6c6e100e2b39606d38a9ffc934e18593dc9bb326196afc7d93ce3d" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==4.7.2" + }, + "requests": { + "hashes": [ + "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", + "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" + ], + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.20.0" + }, + "setuptools": { + "hashes": [ + "sha256:3d8531791a27056f4a38cd3e54084d8b1c4228ff9cf3f2d7dd075ec99f9fd70d", + "sha256:f501b6e6db709818dc76882582d9c516bf3b67b948864c5fa1d1624c09a49207" + ], + "markers": "python_version >= '3.8'", + "version": "==71.0.3" + }, + "types-requests": { + "hashes": [ + "sha256:ee0d0c507210141b7d5b8639cc43eaa726084178775db2a5fb06fbf85c185808", + "sha256:fa5c1e5e832ff6193507d8da7e1159281383908ee193a2f4b37bc08140b51844" + ], + "index": "pypi", + "version": "==2.25.0" + }, + "urllib3": { + "hashes": [ + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", + "version": "==1.24.3" + }, + "werkzeug": { + "hashes": [ + "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8", + "sha256:b863f8ff057c522164b6067c9e28b041161b4be5ba4d0daceeaa50a163822d3c" + ], + "index": "pypi", + "markers": "python_version >= '3.6'", + "version": "==2.0.3" + }, + "zope.event": { + "hashes": [ + "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26", + "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd" + ], + "markers": "python_version >= '3.7'", + "version": "==5.0" + }, + "zope.interface": { + "hashes": [ + "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854", + "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b", + "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827", + "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4", + "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671", + "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e", + "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c", + "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc", + "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438", + "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8", + "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874", + "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4", + "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82", + "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7", + "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede", + "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341", + "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1", + "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e", + "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2", + "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb", + "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530", + "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934", + "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b", + "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43", + "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7", + "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc", + "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde", + "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79", + "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e", + "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9", + "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15", + "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b", + "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5", + "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250", + "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e", + "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1" + ], + "markers": "python_version >= '3.7'", + "version": "==6.4.post2" + } + }, + "develop": { + "alabaster": { + "hashes": [ + "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", + "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92" + ], + "markers": "python_version >= '3.9'", + "version": "==0.7.16" + }, + "babel": { + "hashes": [ + "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb", + "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413" + ], + "markers": "python_version >= '3.8'", + "version": "==2.15.0" + }, + "bandit": { + "hashes": [ + "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec", + "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.7.9" + }, + "certifi": { + "hashes": [ + "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", + "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.7.4" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "coverage": { + "hashes": [ + "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523", + "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f", + "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d", + "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb", + "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0", + "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c", + "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98", + "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83", + "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8", + "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7", + "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac", + "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84", + "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb", + "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3", + "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884", + "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614", + "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd", + "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807", + "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd", + "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8", + "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc", + "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db", + "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0", + "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08", + "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232", + "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d", + "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a", + "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1", + "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286", + "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303", + "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341", + "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84", + "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45", + "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc", + "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec", + "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd", + "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155", + "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52", + "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d", + "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485", + "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31", + "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d", + "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d", + "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d", + "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85", + "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce", + "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb", + "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974", + "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24", + "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56", + "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9", + "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==7.5.3" + }, + "docutils": { + "hashes": [ + "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", + "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2" + ], + "markers": "python_version >= '3.9'", + "version": "==0.21.2" + }, + "exceptiongroup": { + "hashes": [ + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" + ], + "markers": "python_version < '3.11'", + "version": "==1.2.2" + }, + "flake8": { + "hashes": [ + "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a", + "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==7.1.0" + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "imagesize": { + "hashes": [ + "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", + "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.4.1" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "jinja2": { + "hashes": [ + "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", + "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.4" + }, + "markdown-it-py": { + "hashes": [ + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, + "markupsafe": { + "hashes": [ + "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", + "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", + "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", + "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", + "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", + "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", + "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", + "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", + "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", + "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", + "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", + "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", + "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", + "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", + "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", + "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", + "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", + "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", + "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", + "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", + "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", + "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", + "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", + "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", + "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", + "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", + "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", + "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", + "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", + "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", + "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", + "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", + "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", + "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", + "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", + "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", + "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", + "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", + "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", + "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", + "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", + "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", + "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", + "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", + "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", + "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", + "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", + "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", + "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", + "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", + "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", + "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", + "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", + "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", + "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", + "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", + "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", + "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", + "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", + "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.5" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, + "mypy": { + "hashes": [ + "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061", + "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99", + "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de", + "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a", + "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9", + "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec", + "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1", + "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131", + "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f", + "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821", + "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5", + "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee", + "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e", + "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746", + "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2", + "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0", + "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b", + "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53", + "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30", + "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda", + "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051", + "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2", + "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7", + "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee", + "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727", + "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976", + "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.10.0" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "packaging": { + "hashes": [ + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" + ], + "markers": "python_version >= '3.8'", + "version": "==24.1" + }, + "pbr": { + "hashes": [ + "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda", + "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9" + ], + "markers": "python_version >= '2.6'", + "version": "==6.0.0" + }, + "pluggy": { + "hashes": [ + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" + ], + "markers": "python_version >= '3.8'", + "version": "==1.5.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c", + "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4" + ], + "markers": "python_version >= '3.8'", + "version": "==2.12.0" + }, + "pyflakes": { + "hashes": [ + "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", + "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" + ], + "markers": "python_version >= '3.8'", + "version": "==3.2.0" + }, + "pygments": { + "hashes": [ + "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", + "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" + ], + "markers": "python_version >= '3.8'", + "version": "==2.18.0" + }, + "pytest": { + "hashes": [ + "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343", + "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977" + ], + "markers": "python_version >= '3.8'", + "version": "==8.2.2" + }, + "pytest-cov": { + "hashes": [ + "sha256:03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d", + "sha256:890fe5565400902b0c78b5357004aab1c814115894f4f21370e2433256a3eeec" + ], + "index": "pypi", + "version": "==2.5.1" + }, + "pyyaml": { + "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", + "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", + "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", + "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", + "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", + "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", + "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", + "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", + "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", + "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", + "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", + "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", + "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", + "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", + "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", + "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", + "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", + "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", + "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", + "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", + "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", + "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", + "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", + "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", + "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", + "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", + "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", + "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", + "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", + "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", + "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", + "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", + "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", + "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", + "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", + "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", + "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", + "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", + "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", + "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", + "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", + "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" + ], + "markers": "python_version >= '3.6'", + "version": "==6.0.1" + }, + "requests": { + "hashes": [ + "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", + "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" + ], + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.20.0" + }, + "requests-mock": { + "hashes": [ + "sha256:7a5fa99db5e3a2a961b6f20ed40ee6baeff73503cf0a553cc4d679409e6170fb", + "sha256:8ca0628dc66d3f212878932fd741b02aa197ad53fd2228164800a169a4a826af" + ], + "index": "pypi", + "version": "==1.5.2" + }, + "rich": { + "hashes": [ + "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", + "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==13.7.1" + }, + "semver": { + "hashes": [ + "sha256:41c9aa26c67dc16c54be13074c352ab666bce1fa219c7110e8f03374cd4206b0", + "sha256:5b09010a66d9a3837211bb7ae5a20d10ba88f8cb49e92cb139a69ef90d5060d8" + ], + "index": "pypi", + "version": "==2.8.1" + }, + "setuptools": { + "hashes": [ + "sha256:3d8531791a27056f4a38cd3e54084d8b1c4228ff9cf3f2d7dd075ec99f9fd70d", + "sha256:f501b6e6db709818dc76882582d9c516bf3b67b948864c5fa1d1624c09a49207" + ], + "markers": "python_version >= '3.8'", + "version": "==71.0.3" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "snowballstemmer": { + "hashes": [ + "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", + "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" + ], + "version": "==2.2.0" + }, + "sphinx": { + "hashes": [ + "sha256:2e7ad92e96eff1b2006cf9f0cdb2743dacbae63755458594e9e8238b0c3dc60b", + "sha256:e9b1a75a3eae05dded19c80eb17325be675e0698975baae976df603b6ed1eb10" + ], + "index": "pypi", + "version": "==1.7.4" + }, + "sphinx-autodoc-typehints": { + "hashes": [ + "sha256:1a9df6cb3ba72453ea4bfbe96ea887abc0d796b2ce9508c2189217a1bb69b366", + "sha256:46cc9e985ee6d8bbbd07fffd95b815c39a72df6afb600f59671f85f7340e7d0d" + ], + "index": "pypi", + "markers": "python_full_version not in '3.5.0, 3.5.1'", + "version": "==1.3.0" + }, + "sphinxcontrib-serializinghtml": { + "hashes": [ + "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7", + "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f" + ], + "markers": "python_version >= '3.9'", + "version": "==1.1.10" + }, + "sphinxcontrib-websupport": { + "hashes": [ + "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232", + "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7" + ], + "markers": "python_version >= '3.5'", + "version": "==1.2.4" + }, + "stevedore": { + "hashes": [ + "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9", + "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d" + ], + "markers": "python_version >= '3.8'", + "version": "==5.2.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + }, + "typing-extensions": { + "hashes": [ + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" + ], + "markers": "python_version >= '3.8'", + "version": "==4.12.2" + }, + "urllib3": { + "hashes": [ + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", + "version": "==1.24.3" + } + } +} diff --git a/README.md b/README.md index de731fe..a170e44 100644 --- a/README.md +++ b/README.md @@ -247,8 +247,8 @@ A maximum of 10000 ids may be supplied. ## Requirements -* Python 3.6+ -* MongoDB 2.6+ +* Python 3.9+ +* MongoDB 3.6+ * Make * git @@ -257,7 +257,8 @@ The system is tested on Ubuntu, but should probably work on other operating syst ## Setup * Install the runtime dependencies - * `pip install -r requirements.txt` + * `pipenv sync` + * `pipenv shell` * Start MongoDB * From the IDMappingService repo: * `make` @@ -425,8 +426,6 @@ Anything else is mapped to 500. ## TODO -* travis - * try mongo 4 - maybe wait for a couple bugfix versions * integration tests with KBase auth server? - lot of work for little gain * if performance becomes an issue * push the bulk operations further down the stack diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 5a745f4..4a33e7e 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,5 +1,15 @@ # ID Mapping Service release notes +## 0.1.2 +* The MongoDB clients have been updated to the most recent version and the service tested against Mongo 7. +* Added id mapping service container test in GHA +* Added the mongo-retrywrites configuration setting in deployment.cfg.templ and deploy.cfg.example, defaulting to false. +* Updated the docker-compose file to start a id mapping service server. +* Added pipenv to handle dependencies. +* Added Dependabot, CodeQL, and release image build. +* Replaced Travis CI with GitHub Actions workflows. +* Updated Python to 3.9.19, Flask to 2.0.0, and werkzeug to 2.0.3 + ## 0.1.1 * Updated `pymongo` to fix mongo authentication issues diff --git a/bandit.yaml b/bandit.yaml new file mode 100644 index 0000000..5e34719 --- /dev/null +++ b/bandit.yaml @@ -0,0 +1,5 @@ +# bandit.yaml + +# Configuration for Bandit +skips: + - B113 # Requests call without timeout \ No newline at end of file diff --git a/container_test/README.md b/container_test/README.md new file mode 100644 index 0000000..b46a0a1 --- /dev/null +++ b/container_test/README.md @@ -0,0 +1 @@ +This directory contains a very small test suite that runs against the id mapping service in a docker container. \ No newline at end of file diff --git a/container_test/id_mapping_service_container_test.py b/container_test/id_mapping_service_container_test.py new file mode 100644 index 0000000..ec8fc11 --- /dev/null +++ b/container_test/id_mapping_service_container_test.py @@ -0,0 +1,139 @@ +import json +import os +import pytest +import requests +import time + + +""" id_mapping_service_container_test.py + +Very simple tests to ensure that local id mapping server is functioning correctly. +Requires the python libraries `pytest` and `requests` to be installed. + +Assumes that the id mapping service is running locally on ports 8080. + +Use the wrapper shell script, `run_tests.sh`, to create the necessary set up and run the tests: + +sh container_test/run_tests.sh + +""" + +SERVICE = "ID Mapping Service" +ID_MAPPING_SERVICE_VERSION = "0.1.2" + +ID_MAPPING_URL = "http://localhost:8080" +WAIT_TIMES = [1, 2, 5, 10, 30] + +NAMESPACE_1 = "sijie" +NAMESPACE_2 = "gavin" + + +@pytest.fixture(scope="module") +def ready(): + wait_for_id_mapping_service() + + yield + + +def wait_for_id_mapping_service(): + print("waiting for id mapping service...") + + attempt = 1 + max_attempts = len(WAIT_TIMES) + 1 + while attempt <= max_attempts: + print(f"Attempt {attempt} of {max_attempts}") + try: + res = requests.get(ID_MAPPING_URL) + res.raise_for_status() + return + except Exception as e: + if attempt < max_attempts: + t = WAIT_TIMES[attempt - 1] + print( + f"Failed to connect to id mapping service, waiting {t} sec " + f"and trying again:\n\t{e}" + ) + time.sleep(t) + attempt += 1 + raise Exception( + f"Couldn't connect to the id mapping service after {max_attempts} attempts" + ) + + +def test_id_mapping_service(ready) -> None: + """create two namespaces, add admins, create mappings, and list mappings""" + user, token = get_user_and_token() + test_id_mapping_version() + create_namespaces(token) + add_admins(user, token) + create_mappings(token) + list_mappings() + + +def create_namespaces(token) -> None: + response_1 = requests.put( + ID_MAPPING_URL + f"/api/v1/namespace/{NAMESPACE_1}", + headers={"authorization": f"local {token}"}, + ) + + response_2 = requests.put( + ID_MAPPING_URL + f"/api/v1/namespace/{NAMESPACE_2}", + headers={"authorization": f"local {token}"}, + ) + + assert response_1.status_code == 204 + assert response_2.status_code == 204 + + +def add_admins(user: str, token: str) -> None: + response_1 = requests.put( + ID_MAPPING_URL + f"/api/v1/namespace/{NAMESPACE_1}/user/local/{user}", + headers={"authorization": f"local {token}"}, + ) + + response_2 = requests.put( + ID_MAPPING_URL + f"/api/v1/namespace/{NAMESPACE_2}/user/local/{user}", + headers={"authorization": f"local {token}"}, + ) + + assert response_1.status_code == 204 + assert response_2.status_code == 204 + + +def create_mappings(token: str) -> None: + response = requests.put( + ID_MAPPING_URL + f"/api/v1/mapping/{NAMESPACE_1}/{NAMESPACE_2}", + headers={"Authorization": "local " + token}, + json={"id1": "id2", "id3": "id4", "id5": "id6"}, + ) + + assert response.status_code == 204 + + +def list_mappings() -> None: + response = requests.get( + ID_MAPPING_URL + f"/api/v1/mapping/{NAMESPACE_2}?separate", + data=json.dumps({"ids": ["id2", "id4", "id8"]}), + ) + + assert response.status_code == 200 + assert response.json() == { + "id2": {"admin": [], "other": [{"id": "id1", "ns": NAMESPACE_1}]}, + "id4": {"admin": [], "other": [{"id": "id3", "ns": NAMESPACE_1}]}, + "id8": {"admin": [], "other": []}, + } + + +def test_id_mapping_version() -> None: + """get the current id mapping service version""" + res = requests.get(ID_MAPPING_URL) + assert res.status_code == 200 + data = res.json() + assert data["service"] == SERVICE + assert data["version"] == ID_MAPPING_SERVICE_VERSION + + +def get_user_and_token() -> tuple[str, str]: + user = os.environ["USER"] + token = os.environ["ID_MAPPER_OUTPUT"].split("\n")[1] + return user, token diff --git a/container_test/run_tests.sh b/container_test/run_tests.sh new file mode 100644 index 0000000..e257330 --- /dev/null +++ b/container_test/run_tests.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +# +# Script to run the python id_mapping_service_container_test.py locally or on GitHub Actions. +# Builds and mounts id mapping service and mongo docker containers, and then calls +# the python script. +# +# See .github/workflows/test.yml for GitHub Actions implementation. + +# build and start the containers +docker compose up -d --build +compose_up_exit_code=$? +if [ $compose_up_exit_code -ne 0 ]; then + echo "Error: docker-compose up -d --build command failed with exit code $compose_up_exit_code." + exit $compose_up_exit_code +fi + +export USER=myname +# create user myname with token +export ID_MAPPER_OUTPUT=$(docker exec id_mapper_container /bin/sh ./id_mapper --user $USER --create) +# set user myname's admin state to true +docker exec id_mapper_container /bin/sh ./id_mapper --user $USER --admin true + +# get the path to the current directory and add it to the python execution path +current_dir="$( dirname -- "$( readlink -f -- "$0"; )"; )" +PYTHONPATH="$current_dir":$PYTHONPATH python -m pytest container_test/id_mapping_service_container_test.py +exit_code=$? + +docker compose down +exit $exit_code \ No newline at end of file diff --git a/deploy.cfg.example b/deploy.cfg.example index c3fdf37..a384681 100644 --- a/deploy.cfg.example +++ b/deploy.cfg.example @@ -9,6 +9,10 @@ mongo-db= mongo-user= mongo-pwd= +# Whether to enable ('true') the MongoDB retryWrites parameter or not (anything other than 'true'). +# See https://www.mongodb.com/docs/manual/core/retryable-writes/ +mongo-retrywrites=false + # If "true", make the server ignore the X-Forwarded-For and X-Real-IP headers. Otherwise # (the default behavior), the logged IP address for a request, in order of precedence, is # 1) the first address in X-Forwarded-For, 2) X-Real-IP, and 3) the address of the client. diff --git a/deployment/conf/.templates/deployment.cfg.templ b/deployment/conf/.templates/deployment.cfg.templ index c8de6de..578e43a 100644 --- a/deployment/conf/.templates/deployment.cfg.templ +++ b/deployment/conf/.templates/deployment.cfg.templ @@ -3,6 +3,7 @@ mongo-host={{ default .Env.mongo_host "ci-mongo" }} mongo-db={{ default .Env.mongo_db "idmapping" }} mongo-user={{ default .Env.mongo_user "" }} mongo-pwd={{ default .Env.mongo_pwd "" }} +mongo-retrywrites={{ default .Env.mongo_retrywrites "false" }} authentication-enabled={{ default .Env.authentication_enabled "local, kbase" }} authentication-admin-enabled={{ default .Env.authentication_admin_enabled "local, kbase" }} diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 40c87cf..0000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -bandit==1.4.0 -mypy==0.610 -flake8==3.5.0 -coverage==4.0.3 -pytest-cov==2.5.1 -sphinx==1.7.4 -sphinx-autodoc-typehints==1.3.0 -python-coveralls==2.9.1 -semver==2.8.1 -requests-mock==1.5.2 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 914b9f0..6164695 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,16 +5,18 @@ version: "3.1" # that is started up and polled services: id_mapper: - image: kbase/id_mapper:latest + build: + context: . + dockerfile: Dockerfile + container_name: id_mapper_container ports: - "8080:8080" environment: + - mongo_host=mongo:27017 - ID_MAPPING_CONFIG=/kb/deployment/conf/deployment.cfg command: - "-wait" - - "tcp://ci-mongo:27017" - - "-wait" - - "tcp://mongoinit:8080" + - "tcp://mongo:27017" - "-timeout" - "120s" - "-template" @@ -42,22 +44,10 @@ services: # If your server is using self-signed certs, or otherwise problematic for cert validation # you can add the following flag: # - "-validateCert=false" - depends_on: ["ci-mongo", "mongoinit"] + depends_on: ["mongo"] - mongoinit: - image: kbase/db_initialize:latest - entrypoint: - - "/kb/deployment/bin/dockerize.sh" - - "-wait" - - "tcp://ci-mongo:27017" - - "-timeout" - - "120s" - depends_on: [ "ci-mongo" ] - - ci-mongo: - image: mongo:2 - command: - - "--smallfiles" + mongo: + image: mongo:3.6 ports: - "27017:27017" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 6ae302d..0000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -pymongo==3.8.0 -requests==2.20.0 -cacheout==0.10.2 -Flask==1.0.2 -gunicorn==19.9.0 -gevent==1.3.5 \ No newline at end of file diff --git a/src/jgikbase/idmapping/builder.py b/src/jgikbase/idmapping/builder.py index 0bc14d6..a9889d1 100644 --- a/src/jgikbase/idmapping/builder.py +++ b/src/jgikbase/idmapping/builder.py @@ -1,11 +1,18 @@ """ Contains code for building the core ID mapping code given a configuration. """ + from jgikbase.idmapping.config import KBaseConfig -from jgikbase.idmapping.core.user_lookup import LocalUserLookup, UserLookupSet, UserLookup,\ - LookupInitializationError +from jgikbase.idmapping.core.user_lookup import ( + LocalUserLookup, + UserLookupSet, + UserLookup, + LookupInitializationError, +) from pymongo.mongo_client import MongoClient -from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import IDMappingMongoStorage +from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import ( + IDMappingMongoStorage, +) from pymongo.errors import ConnectionFailure from jgikbase.idmapping.core.mapper import IDMapper from pathlib import Path @@ -13,17 +20,17 @@ import importlib from jgikbase.idmapping.core.arg_check import not_none from jgikbase.idmapping.storage.id_mapping_storage import IDMappingStorage -from typing import Dict, Set # @UnusedImport pydev +from typing import Dict, Set, Optional # @UnusedImport pydev from typing import cast class IDMappingBuildException(Exception): - """ Thrown when the build fails. """ + """Thrown when the build fails.""" class _SometimesMyPyIsReallyStupid: # pragma: no cover @staticmethod - def build_lookup(config: Dict[str, str]) -> UserLookup: + def build_lookup(config: Dict[str, str]) -> UserLookup: # type: ignore[empty-body] pass @@ -48,7 +55,7 @@ def __init__(self) -> None: Create a builder. """ - def build_local_user_lookup(self, cfgpath: Path=None) -> LocalUserLookup: + def build_local_user_lookup(self, cfgpath: Optional[Path] = None) -> LocalUserLookup: """ Build a local user lookup handler. @@ -60,37 +67,44 @@ def build_local_user_lookup(self, cfgpath: Path=None) -> LocalUserLookup: return LocalUserLookup(self._build_storage()) def _set_cfg(self, cfgpath) -> KBaseConfig: - if not hasattr(self, 'cfg'): + if not hasattr(self, "cfg"): self.cfg = KBaseConfig(cfgpath) return self.cfg - def get_cfg(self, cfgpath: Path=None) -> KBaseConfig: + def get_cfg(self, cfgpath: Optional[Path] = None) -> KBaseConfig: """ Get the system configuration. :param cfgpath: the the path to the build configuration file. The configuration is memoized and used in any future builds, and any other configurations are ignored. - """ + """ return self._set_cfg(cfgpath) def _build_storage(self) -> IDMappingStorage: - if not hasattr(self, '_storage'): + if not hasattr(self, "_storage"): if self.cfg.mongo_user: # NOTE this is currently only tested manually. - client = MongoClient(self.cfg.mongo_host, authSource=self.cfg.mongo_db, - username=self.cfg.mongo_user, password=self.cfg.mongo_pwd) + client: MongoClient = MongoClient( + self.cfg.mongo_host, + authSource=self.cfg.mongo_db, + username=self.cfg.mongo_user, + password=self.cfg.mongo_pwd, + retryWrites=self.cfg.mongo_retrywrites, + ) else: - client = MongoClient(self.cfg.mongo_host) + client = MongoClient( + self.cfg.mongo_host, retryWrites=self.cfg.mongo_retrywrites + ) try: # The ismaster command is cheap and does not require auth. - client.admin.command('ismaster') + client.admin.command("ismaster") except ConnectionFailure as e: - raise IDMappingBuildException('Connection to database failed') from e - db = client[self.cfg.mongo_db] + raise IDMappingBuildException("Connection to database failed") from e + db = client[self.cfg.mongo_db] # type: ignore self._storage: IDMappingStorage = IDMappingMongoStorage(db) return self._storage - def build_id_mapping_system(self, cfgpath: Path=None) -> IDMapper: + def build_id_mapping_system(self, cfgpath: Optional[Path] = None) -> IDMapper: """ Build the ID Mapping system. @@ -105,31 +119,40 @@ def build_id_mapping_system(self, cfgpath: Path=None) -> IDMapper: lookups.add(self.build_local_user_lookup(cfgpath)) else: lookups.add(self.build_user_lookup(asID, *cfg.lookup_configs[asID])) - return IDMapper(UserLookupSet(lookups), cfg.auth_admin_enabled, self._build_storage()) + return IDMapper( + UserLookupSet(lookups), cfg.auth_admin_enabled, self._build_storage() + ) def build_user_lookup( - self, - config_authsource_id: AuthsourceID, - factory_module: str, - config: Dict[str, str] - ) -> UserLookup: - not_none(config_authsource_id, 'config_authsource_id') - not_none(factory_module, 'factory_module') - not_none(config, 'config') + self, + config_authsource_id: AuthsourceID, + factory_module: str, + config: Dict[str, str], + ) -> UserLookup: + not_none(config_authsource_id, "config_authsource_id") + not_none(factory_module, "factory_module") + not_none(config, "config") try: - mod = cast(_SometimesMyPyIsReallyStupid, importlib.import_module(factory_module)) + mod = cast( + _SometimesMyPyIsReallyStupid, importlib.import_module(factory_module) + ) except Exception as e: - raise IDMappingBuildException('Could not import module {}: {}'.format( - factory_module, str(e))) from e + raise IDMappingBuildException( + "Could not import module {}: {}".format(factory_module, str(e)) + ) from e try: lookup = mod.build_lookup(config) except LookupInitializationError as e: raise e except Exception as e: - raise IDMappingBuildException('Could not build module {}: {}'.format( - factory_module, str(e))) from e + raise IDMappingBuildException( + "Could not build module {}: {}".format(factory_module, str(e)) + ) from e if config_authsource_id != lookup.get_authsource_id(): raise IDMappingBuildException( - 'User lookup authsource ID mismatch: configuration ID is {}, module reports ID {}' - .format(config_authsource_id.id, lookup.get_authsource_id().id)) + "User lookup authsource ID mismatch: configuration ID is {}, " + "module reports ID {}".format( + config_authsource_id.id, lookup.get_authsource_id().id + ) + ) return lookup diff --git a/src/jgikbase/idmapping/cli.py b/src/jgikbase/idmapping/cli.py index 093b203..433d7b8 100644 --- a/src/jgikbase/idmapping/cli.py +++ b/src/jgikbase/idmapping/cli.py @@ -24,7 +24,7 @@ class IDMappingCLI: _USER = '--user' _LIST = '--list-users' _CREATE = '--create' - _NEW_TOKEN = '--new-token' + _NEW_TOKEN = '--new-token' # nosec _ADMIN = '--admin' _TRUE = 'true' @@ -164,7 +164,7 @@ def _parse_args(self) -> argparse.Namespace: help='Create a user. Requires the {} option.'.format(self._USER)) parser.add_argument(self._NEW_TOKEN, action='store_true', help='Make a new token for a user. Requires the {} option.'.format( - self._USER, self._CREATE)) + self._USER)) parser.add_argument(self._ADMIN, help=( "Set whether the user is an admin ('{}') or not ('{}'). Any other values are " + 'not permitted. Requires the {} option.').format(self._TRUE, self._FALSE, self._USER)) diff --git a/src/jgikbase/idmapping/config.py b/src/jgikbase/idmapping/config.py index c086405..adc766b 100644 --- a/src/jgikbase/idmapping/config.py +++ b/src/jgikbase/idmapping/config.py @@ -1,6 +1,7 @@ """ Configuration handlers for the ID Mapping system. """ + from typing import Dict, Optional, Set, Tuple from pathlib import Path import os @@ -8,12 +9,14 @@ from jgikbase.idmapping.core.user import AuthsourceID from jgikbase.idmapping.core.errors import MissingParameterError from jgikbase.idmapping.core.user_lookup import LocalUserLookup + # May want different configuration implementations based on the deployment environment. # YAGNI for now. class IDMappingConfigError(Exception): - """ Thrown when there's an error in the ID Mapping system configuration. """ + """Thrown when there's an error in the ID Mapping system configuration.""" + pass @@ -27,6 +30,7 @@ class KBaseConfig: mongo-db mongo-user (optional) mongo-pwd (optional) + mongo-retrywrites (optional) authentication-enabled (optional) authentication-admin-enabled (optional) keys specific to each authentication source. See the example deploy.cfg file in this repo @@ -40,6 +44,7 @@ class KBaseConfig: :ivar mongo_db: the MongoDB database to use for the ID mapping service. :ivar mongo_user: the username to use with MongoDB, if any. :ivar mongo_pwd: the password to use with MongoDB, if any. + :ivar mongo_retrywrites: whether to enable retryWrites parameter with MongoDB. :ivar auth_enabled: the set of authentication sources that are enabled. :ivar auth_admin_enabled: the set of authentication sources that are trusted to define system administrators. @@ -52,68 +57,71 @@ class KBaseConfig: lookup instance. """ - ENV_VAR_IDMAPPING = 'ID_MAPPING_CONFIG' + ENV_VAR_IDMAPPING = "ID_MAPPING_CONFIG" """ The first environment variable where the system will look for the path to the config file. """ - ENV_VAR_KB_DEP = 'KB_DEPLOYMENT_CONFIG' + ENV_VAR_KB_DEP = "KB_DEPLOYMENT_CONFIG" """ The second environment variable where the system will look for the path to the config file. """ - CFG_SEC = 'idmapping' + CFG_SEC = "idmapping" """ The section of the config file where the configuration is located. """ - _TEMP_KEY_CFG_FILE = 'temp-key-config-file' + _TEMP_KEY_CFG_FILE = "temp-key-config-file" - KEY_MONGO_HOST = 'mongo-host' + KEY_MONGO_HOST = "mongo-host" """ The key corresponding to the value containing the MongoDB host. """ - KEY_MONGO_DB = 'mongo-db' + KEY_MONGO_DB = "mongo-db" """ The key corresponding to the value containing the MongoDB database. """ - KEY_MONGO_USER = 'mongo-user' + KEY_MONGO_USER = "mongo-user" """ The key corresponding to the value containing the MongoDB username. """ - KEY_MONGO_PWD = 'mongo-pwd' + KEY_MONGO_PWD = "mongo-pwd" # nosec """ The key corresponding to the value containing the MongoDB user password. """ - KEY_AUTH_ENABLED = 'authentication-enabled' + KEY_MONGO_RETRYWRITES = "mongo-retrywrites" + """ The key corresponding to the value containing the MongoDB retrywrites. """ + + KEY_AUTH_ENABLED = "authentication-enabled" """ The key corresponding to the value containing a comma separated list of authentication sources that should be enabled on system start up. """ - KEY_AUTH_ADMIN_ENABLED = 'authentication-admin-enabled' + KEY_AUTH_ADMIN_ENABLED = "authentication-admin-enabled" """ The key corresponding to the value containing a comma separated list of authentication sources that are trusted to define system administrators. """ - KEY_IGNORE_IP_HEADERS = 'dont-trust-x-ip-headers' + KEY_IGNORE_IP_HEADERS = "dont-trust-x-ip-headers" """ The key corresponding to the value containing a boolean designating whether the X-Real_IP and X-Forwarded-For headers should be ignored. """ - AUTH_PREFIX = 'auth-source-' + AUTH_PREFIX = "auth-source-" """ The prefix for keys for specific authentication sources. """ - FACTORY_MODULE = '-factory-module' + FACTORY_MODULE = "-factory-module" """ The suffix for the key for a specific authentication source that defines the python module containing the factory for the user lookup instance. """ - INIT = '-init-' + INIT = "-init-" """ The portion of the key after the authentication source name that defines the key as a key-value configuration item. """ - _TRUE = 'true' + _TRUE = "true" - def __init__(self, cfgfile: Path=None) -> None: + def __init__(self, cfgfile: Optional[Path] = None) -> None: """ Load the configuration. @@ -129,31 +137,45 @@ def __init__(self, cfgfile: Path=None) -> None: self.mongo_db = self._get_string(self.KEY_MONGO_DB, cfg) self.mongo_user = self._get_string(self.KEY_MONGO_USER, cfg, False) mongo_pwd = self._get_string(self.KEY_MONGO_PWD, cfg, False) + mongo_retrywrites_value = self._get_string(self.KEY_MONGO_RETRYWRITES, cfg, False) + self.mongo_retrywrites = self._TRUE == mongo_retrywrites_value if bool(self.mongo_user) ^ bool(mongo_pwd): # xor mongo_pwd = None raise IDMappingConfigError( - ('Must provide both {} and {} params in config file ' + - '{} section {} if MongoDB authentication is to be used').format( - self.KEY_MONGO_USER, self.KEY_MONGO_PWD, cfg[self._TEMP_KEY_CFG_FILE], - self.CFG_SEC)) + ( + "Must provide both {} and {} params in config file " + + "{} section {} if MongoDB authentication is to be used" + ).format( + self.KEY_MONGO_USER, + self.KEY_MONGO_PWD, + cfg[self._TEMP_KEY_CFG_FILE], + self.CFG_SEC, + ) + ) self.mongo_pwd = mongo_pwd self.auth_enabled = self._get_authsource_ids(self.KEY_AUTH_ENABLED, cfg) - self.auth_admin_enabled = self._get_authsource_ids(self.KEY_AUTH_ADMIN_ENABLED, cfg) + self.auth_admin_enabled = self._get_authsource_ids( + self.KEY_AUTH_ADMIN_ENABLED, cfg + ) self.lookup_configs = self._get_lookup_configs(cfg) def _get_cfg(self, cfgfile: Path) -> Dict[str, str]: if not cfgfile.is_file(): - raise IDMappingConfigError('{} does not exist or is not a file'.format(cfgfile)) + raise IDMappingConfigError( + "{} does not exist or is not a file".format(cfgfile) + ) config = configparser.ConfigParser() with cfgfile.open() as cfg: try: config.read_file(cfg) except configparser.Error as e: - raise IDMappingConfigError('Error parsing config file {}: {}'.format( - cfgfile, e)) from e + raise IDMappingConfigError( + "Error parsing config file {}: {}".format(cfgfile, e) + ) from e if self.CFG_SEC not in config: - raise IDMappingConfigError('No section {} found in config file {}'.format( - self.CFG_SEC, cfgfile)) + raise IDMappingConfigError( + "No section {} found in config file {}".format(self.CFG_SEC, cfgfile) + ) sec = config[self.CFG_SEC] # a section is not a real map and is missing methods c = {x: sec[x] for x in sec.keys()} @@ -165,43 +187,64 @@ def _get_cfg_from_env(self) -> Path: return Path(os.environ[self.ENV_VAR_IDMAPPING]) if os.environ.get(self.ENV_VAR_KB_DEP): return Path(os.environ[self.ENV_VAR_KB_DEP]) - raise IDMappingConfigError('Could not find deployment configuration file from either ' + - 'permitted environment variable: {}, {}'.format( - self.ENV_VAR_IDMAPPING, self.ENV_VAR_KB_DEP)) + raise IDMappingConfigError( + "Could not find deployment configuration file from either " + + "permitted environment variable: {}, {}".format( + self.ENV_VAR_IDMAPPING, self.ENV_VAR_KB_DEP + ) + ) - def _get_string(self, param_name: str, config: Dict[str, str], raise_on_err: bool=True - ) -> Optional[str]: + def _get_string( + self, param_name: str, config: Dict[str, str], raise_on_err: bool = True + ) -> Optional[str]: s = config.get(param_name) if s and s.strip(): return s.strip() elif raise_on_err: raise IDMappingConfigError( - 'Required parameter {} not provided in configuration file {}, section {}'.format( - param_name, config[self._TEMP_KEY_CFG_FILE], self.CFG_SEC)) + "Required parameter {} not provided in configuration file {}, section {}".format( + param_name, config[self._TEMP_KEY_CFG_FILE], self.CFG_SEC + ) + ) else: return None - def _get_authsource_ids(self, param_name: str, config: Dict[str, str]) -> Set[AuthsourceID]: + def _get_authsource_ids( + self, param_name: str, config: Dict[str, str] + ) -> Set[AuthsourceID]: s = self._get_string(param_name, config, False) ret: Set[AuthsourceID] = set() if not s: return ret - ids = s.split(',') + ids = s.split(",") for id_ in ids: try: ret.add(AuthsourceID(id_.strip())) except MissingParameterError as e: raise IDMappingConfigError( - ('Parameter {} in configuration file {}, section {}, has whitespace-only entry' - ).format(param_name, config[self._TEMP_KEY_CFG_FILE], self.CFG_SEC, str(e)) - ) from e + ( + "Parameter {} in configuration file {}, section {}, " + "has whitespace-only entry" + ).format( + param_name, + config[self._TEMP_KEY_CFG_FILE], + self.CFG_SEC, + ) + ) from e except Exception as e: raise IDMappingConfigError( - 'Parameter {} in configuration file {}, section {}, is invalid: {}'.format( - param_name, config[self._TEMP_KEY_CFG_FILE], self.CFG_SEC, str(e))) from e + "Parameter {} in configuration file {}, section {}, is invalid: {}".format( + param_name, + config[self._TEMP_KEY_CFG_FILE], + self.CFG_SEC, + str(e), + ) + ) from e return ret - def _get_lookup_configs(self, cfg) -> Dict[AuthsourceID, Tuple[str, Dict[str, str]]]: + def _get_lookup_configs( + self, cfg + ) -> Dict[AuthsourceID, Tuple[str, Dict[str, str]]]: ret = {} for asID in self.auth_enabled: if asID == LocalUserLookup.LOCAL: @@ -217,12 +260,18 @@ def _get_lookup_configs(self, cfg) -> Dict[AuthsourceID, Tuple[str, Dict[str, st lookupcfg[key[len(prefix + self.INIT):]] = val.strip() else: raise IDMappingConfigError( - 'Unexpected parameter {} in configuration file {}, section {}' - .format(key, cfg[self._TEMP_KEY_CFG_FILE], self.CFG_SEC)) + "Unexpected parameter {} in configuration file {}, section {}".format( + key, cfg[self._TEMP_KEY_CFG_FILE], self.CFG_SEC + ) + ) if not factory: raise IDMappingConfigError( - 'Required parameter {} not provided in configuration file {}, section {}' - .format(prefix + self.FACTORY_MODULE, cfg[self._TEMP_KEY_CFG_FILE], - self.CFG_SEC)) + "Required parameter {} not provided in configuration file {}, " + "section {}".format( + prefix + self.FACTORY_MODULE, + cfg[self._TEMP_KEY_CFG_FILE], + self.CFG_SEC, + ) + ) ret[asID] = (factory, lookupcfg) return ret diff --git a/src/jgikbase/idmapping/core/arg_check.py b/src/jgikbase/idmapping/core/arg_check.py index 9f02640..8ae1d32 100644 --- a/src/jgikbase/idmapping/core/arg_check.py +++ b/src/jgikbase/idmapping/core/arg_check.py @@ -3,11 +3,16 @@ Utility functions """ -from typing import Dict as _Dict # @UnusedImport PyDev thinks it's unused, flake & mypy get it -from typing import Pattern as _Pattern # @UnusedImport PyDev sez it's unused, flake & mypy get it + +from typing import ( + Dict as _Dict, +) # @UnusedImport PyDev thinks it's unused, flake & mypy get it +from typing import ( + Pattern as _Pattern, +) # @UnusedImport PyDev sez it's unused, flake & mypy get it import re as _re from jgikbase.idmapping.core.errors import MissingParameterError, IllegalParameterError -from typing import Iterable, Any +from typing import Any, Iterable, Optional def not_none(obj: object, name: str): @@ -19,14 +24,16 @@ def not_none(obj: object, name: str): :raises TypeError: if the object is None. """ if obj is None: - raise TypeError(name + ' cannot be None') + raise TypeError(name + " cannot be None") _REGEX_CACHE: _Dict[str, _Pattern] = {} -def check_string(string: str, name: str, legal_characters: str=None, max_len: int=None) -> None: - ''' +def check_string( + string: str, name: str, legal_characters: Optional[str] = None, max_len: Optional[int] = None +) -> None: + """ Check that a string meets a set of criteria: - it is not None or whitespace only - (optional) it is less than some specified maximum length @@ -39,31 +46,33 @@ def check_string(string: str, name: str, legal_characters: str=None, max_len: in :param max_len: the maximum length of the string. :raises MissingParameterError: if the string is None or whitespace only. :raises IllegalParameterError: if the string is too long or contains illegal characters. - ''' + """ if not string or not string.strip(): raise MissingParameterError(name) if max_len and len(string) > max_len: - raise IllegalParameterError('{} {} exceeds maximum length of {}' - .format(name, string, max_len)) + raise IllegalParameterError( + "{} {} exceeds maximum length of {}".format(name, string, max_len) + ) if legal_characters: global _REGEX_CACHE if legal_characters not in _REGEX_CACHE: - _REGEX_CACHE[legal_characters] = _re.compile('[^' + legal_characters + ']') + _REGEX_CACHE[legal_characters] = _re.compile("[^" + legal_characters + "]") match = _REGEX_CACHE[legal_characters].search(string) if match: - raise IllegalParameterError('Illegal character in {} {}: {}' - .format(name, string, match.group())) + raise IllegalParameterError( + "Illegal character in {} {}: {}".format(name, string, match.group()) + ) def no_Nones_in_iterable(iterable: Iterable[Any], name: str) -> None: - ''' + """ Check that an iterable is not None and contains no None items. :param iterable: the iterable to check. :param name: the name of the iterable to be used in error messages. :raises TypeError: if the iterable is None or contains None. - ''' + """ not_none(iterable, name) for item in iterable: if item is None: - raise TypeError('None item in ' + name) + raise TypeError("None item in " + name) diff --git a/src/jgikbase/idmapping/core/errors.py b/src/jgikbase/idmapping/core/errors.py index 662175c..abf75a6 100644 --- a/src/jgikbase/idmapping/core/errors.py +++ b/src/jgikbase/idmapping/core/errors.py @@ -1,7 +1,9 @@ """ Exceptions thrown by the ID mapping system. """ + from enum import Enum +from typing import Optional class ErrorType(Enum): @@ -64,20 +66,20 @@ class IDMappingError(Exception): :ivar message: the message for this error. """ - def __init__(self, error_type: ErrorType, message: str=None) -> None: - ''' + def __init__(self, error_type: ErrorType, message: Optional[str] = None) -> None: + """ Create an ID mapping error. :param error_type: the error type of this error. :param message: an error message. :raises TypeError: if error_type is None - ''' + """ if not error_type: # don't use not_none here, causes circular import - raise TypeError('error_type cannot be None') - msg = '{} {}'.format(error_type.error_code, error_type.error_type) + raise TypeError("error_type cannot be None") + msg = "{} {}".format(error_type.error_code, error_type.error_type) message = message.strip() if message and message.strip() else None if message: - msg += ': ' + message + msg += ": " + message super().__init__(msg) self.error_type = error_type self.message = message @@ -142,8 +144,11 @@ class AuthenticationError(IDMappingError): An error thrown when authentication of a user fails. """ - def __init__(self, error_type: ErrorType=ErrorType.AUTHENTICATION_FAILED, message: str=None - ) -> None: + def __init__( + self, + error_type: ErrorType = ErrorType.AUTHENTICATION_FAILED, + message: Optional[str] = None, + ) -> None: super().__init__(error_type, message) @@ -152,7 +157,7 @@ class NoTokenError(AuthenticationError): An error thrown when a token is required but not provided. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.NO_TOKEN, message) @@ -161,7 +166,7 @@ class InvalidTokenError(AuthenticationError): An error thrown when a provided token is invalid. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.INVALID_TOKEN, message) @@ -170,7 +175,7 @@ class UnauthorizedError(IDMappingError): An error thrown when a user attempts a disallowed action. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.UNAUTHORIZED, message) @@ -179,7 +184,7 @@ class MissingParameterError(IDMappingError): An error thrown when a required parameter is missing. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.MISSING_PARAMETER, message) @@ -188,7 +193,7 @@ class IllegalParameterError(IDMappingError): An error thrown when a provided parameter is illegal. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.ILLEGAL_PARAMETER, message) @@ -197,5 +202,5 @@ class IllegalUsernameError(IDMappingError): An error thrown when a provided username is illegal. """ - def __init__(self, message: str=None) -> None: + def __init__(self, message: Optional[str] = None) -> None: super().__init__(ErrorType.ILLEGAL_USER_NAME, message) diff --git a/src/jgikbase/idmapping/core/mapper.py b/src/jgikbase/idmapping/core/mapper.py index 5a1a870..b087320 100644 --- a/src/jgikbase/idmapping/core/mapper.py +++ b/src/jgikbase/idmapping/core/mapper.py @@ -1,9 +1,10 @@ """ The core ID mapping code. """ + from jgikbase.idmapping.storage.id_mapping_storage import IDMappingStorage from jgikbase.idmapping.core.user_lookup import UserLookupSet -from typing import Set, cast, Tuple, Iterable +from typing import Set, cast, Tuple, Iterable, Optional from jgikbase.idmapping.core.arg_check import not_none, no_Nones_in_iterable from jgikbase.idmapping.core.object_id import NamespaceID, Namespace, ObjectID from jgikbase.idmapping.core.user import User, AuthsourceID @@ -23,11 +24,11 @@ class IDMapper: """ def __init__( - self, - user_lookup: UserLookupSet, - admin_authsources: Set[AuthsourceID], - storage: IDMappingStorage - ) -> None: + self, + user_lookup: UserLookupSet, + admin_authsources: Set[AuthsourceID], + storage: IDMappingStorage, + ) -> None: """ Create the mapper. @@ -37,9 +38,9 @@ def __init__( The admin state returned by other auth sources will be ignored. :param storage: the mapping storage system. """ - not_none(user_lookup, 'user_lookup') - no_Nones_in_iterable(admin_authsources, 'admin_authsources') - not_none(storage, 'storage') + not_none(user_lookup, "user_lookup") + no_Nones_in_iterable(admin_authsources, "admin_authsources") + not_none(storage, "storage") self._storage = storage self._lookup = user_lookup self._admin_authsources = admin_authsources @@ -50,22 +51,26 @@ def _check_sys_admin(self, authsource_id: AuthsourceID, token: Token) -> User: :raises InvalidTokenError: if the token is invalid. :raises UnauthorizedError: if the user is not a system administrator. """ - not_none(token, 'token') + not_none(token, "token") if authsource_id not in self._admin_authsources: - raise UnauthorizedError(('Auth source {} is not configured as a provider of ' + - 'system administration status').format(authsource_id.id)) + raise UnauthorizedError( + ( + "Auth source {} is not configured as a provider of " + + "system administration status" + ).format(authsource_id.id) + ) user, admin = self._lookup.get_user(authsource_id, token) if not admin: - raise UnauthorizedError('User {}/{} is not a system administrator'.format( - user.authsource_id.id, user.username.name)) + raise UnauthorizedError( + "User {}/{} is not a system administrator".format( + user.authsource_id.id, user.username.name + ) + ) return user def create_namespace( - self, - authsource_id: AuthsourceID, - token: Token, - namespace_id: NamespaceID - ) -> None: + self, authsource_id: AuthsourceID, token: Token, namespace_id: NamespaceID + ) -> None: """ Create a namespace. @@ -78,28 +83,34 @@ def create_namespace( :raises InvalidTokenError: if the token is invalid. :raises UnauthorizedError: if the user is not a system administrator. """ - not_none(namespace_id, 'namespace_id') + not_none(namespace_id, "namespace_id") admin = self._check_sys_admin(authsource_id, token) self._storage.create_namespace(namespace_id) - _log('Admin %s/%s created namespace %s', admin.authsource_id.id, admin.username.name, - namespace_id.id) + _log( + "Admin %s/%s created namespace %s", + admin.authsource_id.id, + admin.username.name, + namespace_id.id, + ) def _check_valid_user(self, user): """ :raises NoSuchAuthsourceError: if there's no handler for the user's authsource. :raises NoSuchUserError: if the user is invalid according to the appropriate user handler. """ - not_none(user, 'user') + not_none(user, "user") if not self._lookup.is_valid_user(user): - raise NoSuchUserError('{}/{}'.format(user.authsource_id.id, user.username.name)) + raise NoSuchUserError( + "{}/{}".format(user.authsource_id.id, user.username.name) + ) def add_user_to_namespace( - self, - authsource_id: AuthsourceID, - token: Token, - namespace_id: NamespaceID, - user: User - ) -> None: + self, + authsource_id: AuthsourceID, + token: Token, + namespace_id: NamespaceID, + user: User, + ) -> None: """ Add a user to a namespace. @@ -116,23 +127,27 @@ def add_user_to_namespace( :raises InvalidTokenError: if the token is invalid. :raises UnauthorizedError: if the user is not a system administrator. """ - not_none(namespace_id, 'namespace_id') - not_none(user, 'user') + not_none(namespace_id, "namespace_id") + not_none(user, "user") admin = self._check_sys_admin(authsource_id, token) self._check_valid_user(user) self._storage.add_user_to_namespace(namespace_id, user) - _log('Admin %s/%s added user %s/%s to namespace %s', - admin.authsource_id.id, admin.username.name, - user.authsource_id.id, user.username.name, - namespace_id.id) + _log( + "Admin %s/%s added user %s/%s to namespace %s", + admin.authsource_id.id, + admin.username.name, + user.authsource_id.id, + user.username.name, + namespace_id.id, + ) def remove_user_from_namespace( - self, - authsource_id: AuthsourceID, - token: Token, - namespace_id: NamespaceID, - user: User - ) -> None: + self, + authsource_id: AuthsourceID, + token: Token, + namespace_id: NamespaceID, + user: User, + ) -> None: """ Remove a user from a namespace. @@ -147,14 +162,18 @@ def remove_user_from_namespace( :raises InvalidTokenError: if the token is invalid. :raises UnauthorizedError: if the user is not a system administrator. """ - not_none(namespace_id, 'namespace_id') - not_none(user, 'user') + not_none(namespace_id, "namespace_id") + not_none(user, "user") admin = self._check_sys_admin(authsource_id, token) self._storage.remove_user_from_namespace(namespace_id, user) - _log('Admin %s/%s removed user %s/%s from namespace %s', - admin.authsource_id.id, admin.username.name, - user.authsource_id.id, user.username.name, - namespace_id.id) + _log( + "Admin %s/%s removed user %s/%s from namespace %s", + admin.authsource_id.id, + admin.username.name, + user.authsource_id.id, + user.username.name, + namespace_id.id, + ) def _check_authed_for_ns_get(self, user: User, namespace_id: NamespaceID) -> None: """ @@ -168,16 +187,19 @@ def _check_authed_for_ns(self, user: User, ns: Namespace) -> None: :raises UnauthorizedError: if the user is not authorized to administrate the namespace. """ if user not in ns.authed_users: - raise UnauthorizedError('User {}/{} may not administrate namespace {}'.format( - user.authsource_id.id, user.username.name, ns.namespace_id.id)) + raise UnauthorizedError( + "User {}/{} may not administrate namespace {}".format( + user.authsource_id.id, user.username.name, ns.namespace_id.id + ) + ) def set_namespace_publicly_mappable( - self, - authsource_id: AuthsourceID, - token: Token, - namespace_id: NamespaceID, - publicly_mappable: bool - ) -> None: + self, + authsource_id: AuthsourceID, + token: Token, + namespace_id: NamespaceID, + publicly_mappable: bool, + ) -> None: """ Set a namespace to be publicly mappable, or remove that state. A publicly mappable namespace may have ID mappings added to it without the user being an administrator @@ -194,21 +216,25 @@ def set_namespace_publicly_mappable( :raises NoSuchNamespaceError: if the namespace does not exist. :raises UnauthorizedError: if the user is not authorized to administrate the namespace. """ - not_none(token, 'token') - not_none(namespace_id, 'namespace_id') + not_none(token, "token") + not_none(namespace_id, "namespace_id") user, _ = self._lookup.get_user(authsource_id, token) self._check_authed_for_ns_get(user, namespace_id) self._storage.set_namespace_publicly_mappable(namespace_id, publicly_mappable) - _log('User %s/%s set namespace %s public map property to %s', - user.authsource_id.id, user.username.name, - namespace_id.id, publicly_mappable) + _log( + "User %s/%s set namespace %s public map property to %s", + user.authsource_id.id, + user.username.name, + namespace_id.id, + publicly_mappable, + ) def get_namespace( - self, - namespace_id: NamespaceID, - authsource_id: AuthsourceID=None, - token: Token=None - ) -> Namespace: + self, + namespace_id: NamespaceID, + authsource_id: Optional[AuthsourceID] = None, + token: Optional[Token] = None, + ) -> Namespace: """ Get a namespace. If user credentials are provided and the user is a system admin or an admin of the namespace, the namespace user list will be returned. Otherwise, the user @@ -223,12 +249,16 @@ def get_namespace( :raises NoSuchAuthsourceError: if there's no lookup handler for the provided authsource. :raises InvalidTokenError: if the token is invalid. """ - not_none(namespace_id, 'namespace_id') + not_none(namespace_id, "namespace_id") if bool(authsource_id) ^ bool(token): # xor - raise TypeError('If token or authsource_id is specified, both must be specified') + raise TypeError( + "If token or authsource_id is specified, both must be specified" + ) ns = self._storage.get_namespace(namespace_id) if token: - authsource_id = cast(AuthsourceID, authsource_id) # mypy doesn't understand the xor + authsource_id = cast( + AuthsourceID, authsource_id + ) # mypy doesn't understand the xor user, admin = self._lookup.get_user(authsource_id, token) if admin or user in ns.authed_users: return ns @@ -254,12 +284,12 @@ def get_namespaces(self) -> Tuple[Set[NamespaceID], Set[NamespaceID]]: return public, private def create_mapping( - self, - authsource_id: AuthsourceID, - token: Token, - administrative_oid: ObjectID, - oid: ObjectID - ) -> None: + self, + authsource_id: AuthsourceID, + token: Token, + administrative_oid: ObjectID, + oid: ObjectID, + ) -> None: """ Create a mapping. The user must be an administrator of the namespace in the administrative_oid and an administrator of the namespace in the oid if it is not @@ -277,9 +307,9 @@ def create_mapping( :raises UnauthorizedError: if the user is not authorized to administrate either of the namespaces. """ - not_none(token, 'token') - not_none(administrative_oid, 'administrative_oid') - not_none(oid, 'oid') + not_none(token, "token") + not_none(administrative_oid, "administrative_oid") + not_none(oid, "oid") user, _ = self._lookup.get_user(authsource_id, token) adminns = self._storage.get_namespace(administrative_oid.namespace_id) self._check_authed_for_ns(user, adminns) @@ -290,18 +320,23 @@ def create_mapping( # this might be too much of a performance hit. If so, push the bulk operations down to # this level and do... what exactly? Log 10000 entries? # Maybe need to add an id or timestamp or something to the mappings and just log that. - _log('User %s/%s created mapping %s/%s <---> %s/%s', - user.authsource_id.id, user.username.name, - administrative_oid.namespace_id.id, administrative_oid.id, - oid.namespace_id.id, oid.id) + _log( + "User %s/%s created mapping %s/%s <---> %s/%s", + user.authsource_id.id, + user.username.name, + administrative_oid.namespace_id.id, + administrative_oid.id, + oid.namespace_id.id, + oid.id, + ) def remove_mapping( - self, - authsource_id: AuthsourceID, - token: Token, - administrative_oid: ObjectID, - oid: ObjectID - ) -> None: + self, + authsource_id: AuthsourceID, + token: Token, + administrative_oid: ObjectID, + oid: ObjectID, + ) -> None: """ Delete a mapping. The user must be an administrator of the namespace in the administrative_oid. @@ -318,9 +353,9 @@ def remove_mapping( :raises UnauthorizedError: if the user is not authorized to administrate the administrative namespace. """ - not_none(token, 'token') - not_none(administrative_oid, 'administrative_oid') - not_none(oid, 'oid') + not_none(token, "token") + not_none(administrative_oid, "administrative_oid") + not_none(oid, "oid") user, _ = self._lookup.get_user(authsource_id, token) adminns = self._storage.get_namespace(administrative_oid.namespace_id) self._check_authed_for_ns(user, adminns) @@ -329,13 +364,19 @@ def remove_mapping( # this might be too much of a performance hit. If so, push the bulk operations down to # this level and do... what exactly? Log 10000 entries? # Maybe need to add an id or timestamp or something to the mappings and just log that. - _log('User %s/%s removed mapping %s/%s <---> %s/%s', - user.authsource_id.id, user.username.name, - administrative_oid.namespace_id.id, administrative_oid.id, - oid.namespace_id.id, oid.id) + _log( + "User %s/%s removed mapping %s/%s <---> %s/%s", + user.authsource_id.id, + user.username.name, + administrative_oid.namespace_id.id, + administrative_oid.id, + oid.namespace_id.id, + oid.id, + ) - def get_mappings(self, oid: ObjectID, ns_filter: Iterable[NamespaceID]=None - ) -> Tuple[Set[ObjectID], Set[ObjectID]]: + def get_mappings( + self, oid: ObjectID, ns_filter: Optional[Iterable[NamespaceID]] = None + ) -> Tuple[Set[ObjectID], Set[ObjectID]]: """ Find mappings given a namespace / id combination. @@ -350,10 +391,10 @@ def get_mappings(self, oid: ObjectID, ns_filter: Iterable[NamespaceID]=None :raise TypeError: if the object ID is None or the filter contains None. :raise NoSuchNamespaceError: if any of the namespaces do not exist. """ - not_none(oid, 'oid') + not_none(oid, "oid") check = [oid.namespace_id] if ns_filter: - no_Nones_in_iterable(ns_filter, 'ns_filter') + no_Nones_in_iterable(ns_filter, "ns_filter") check.extend(ns_filter) self._storage.get_namespaces(check) # check for existence return self._storage.find_mappings(oid, ns_filter=ns_filter) diff --git a/src/jgikbase/idmapping/core/object_id.py b/src/jgikbase/idmapping/core/object_id.py index 1a14f40..8087bb7 100644 --- a/src/jgikbase/idmapping/core/object_id.py +++ b/src/jgikbase/idmapping/core/object_id.py @@ -4,9 +4,14 @@ ID might be 'GCF_001598195.1'. The ID is expected to be unique and immutable within a particular namespace. """ -from jgikbase.idmapping.core.arg_check import check_string, not_none, no_Nones_in_iterable + +from jgikbase.idmapping.core.arg_check import ( + check_string, + not_none, + no_Nones_in_iterable, +) from jgikbase.idmapping.core.user import User -from typing import Set +from typing import Optional, Set # may want to consider a superclass for simple IDs that does checking & implements hash & eq @@ -18,18 +23,18 @@ class NamespaceID: :ivar id: the namespace ID. """ - __slots__ = ['id'] + __slots__ = ["id"] def __init__(self, id_: str) -> None: - ''' + """ Create a namespace ID. :param id_: A string identifier for a namespace, consisting of the characters a-zA-Z_0-9 and no longer than 256 characters. :raises MissingParameterError: if the id is None or whitespace only. :raises IllegalParameterError: if the id does not match the requirements. - ''' - check_string(id_, 'namespace id', 'a-zA-Z0-9_', 256) + """ + check_string(id_, "namespace id", "a-zA-Z0-9_", 256) self.id = id_ def __eq__(self, other): @@ -53,36 +58,38 @@ class Namespace: # TODO NS add user def/updatable attributes: free text desc, source (kbase/jgi), env (ci), db. def __init__( - self, - namespace_id: NamespaceID, - is_publicly_mappable: bool, - authed_users: Set[User]=None - ) -> None: - ''' + self, + namespace_id: NamespaceID, + is_publicly_mappable: bool, + authed_users: Optional[Set[User]] = None, + ) -> None: + """ Create a namespace. :param namespace_id: the ID of the namespace. :param is_publicly_mappable: whether the namespace is publicly mappable or not. :param authed_users: users that are authorized to administer the namespace. :raises TypeError: if namespace_id is None or authed_users contains None - ''' - not_none(namespace_id, 'namespace_id') + """ + not_none(namespace_id, "namespace_id") self.namespace_id = namespace_id self.is_publicly_mappable = is_publicly_mappable self.authed_users = frozenset(authed_users) if authed_users else frozenset() - no_Nones_in_iterable(self.authed_users, 'authed_users') + no_Nones_in_iterable(self.authed_users, "authed_users") def without_users(self): - ''' + """ Returns a copy of this namespace with an empty authed_users field. - ''' + """ return Namespace(self.namespace_id, self.is_publicly_mappable, None) def __eq__(self, other): if type(self) is type(other): - return (self.namespace_id == other.namespace_id and - self.is_publicly_mappable == other.is_publicly_mappable and - self.authed_users == other.authed_users) + return ( + self.namespace_id == other.namespace_id + and self.is_publicly_mappable == other.is_publicly_mappable + and self.authed_users == other.authed_users + ) return False def __hash__(self): @@ -90,14 +97,14 @@ def __hash__(self): class ObjectID: - ''' + """ An object ID consisting of a namespace ID and the ID of the data object within the namespace. :ivar namespace_id: The ID of the namespace. :ivar id: The ID of the data object. - ''' + """ - __slots__ = ['namespace_id', 'id'] + __slots__ = ["namespace_id", "id"] def __init__(self, namespace_id: NamespaceID, data_id: str) -> None: """ @@ -109,8 +116,10 @@ def __init__(self, namespace_id: NamespaceID, data_id: str) -> None: :raises MissingParameterError: if the data ID is None or whitespace only. :raises IllegalParameterError: if the data ID does not meet the requirements. """ - not_none(namespace_id, 'namespace_id') - check_string(data_id, 'data id', max_len=1000) # should maybe check for control chars + not_none(namespace_id, "namespace_id") + check_string( + data_id, "data id", max_len=1000 + ) # should maybe check for control chars self.namespace_id = namespace_id self.id = data_id diff --git a/src/jgikbase/idmapping/core/user_lookup.py b/src/jgikbase/idmapping/core/user_lookup.py index 8965764..a4e5177 100644 --- a/src/jgikbase/idmapping/core/user_lookup.py +++ b/src/jgikbase/idmapping/core/user_lookup.py @@ -14,6 +14,7 @@ class UserLookup: # pragma: no cover """ An interface for a handler for user information, including authentication. """ + __metaclass__ = _ABCMeta @_abstractmethod @@ -41,7 +42,9 @@ def get_user(self, token: Token) -> Tuple[User, bool, Optional[int], Optional[in raise NotImplementedError() @_abstractmethod - def is_valid_user(self, username: Username) -> Tuple[bool, Optional[int], Optional[int]]: + def is_valid_user( + self, username: Username + ) -> Tuple[bool, Optional[int], Optional[int]]: """ Check if a username is valid, which implies the user exists. @@ -62,13 +65,13 @@ class UserLookupSet: """ def __init__( - self, - user_lookup: Set[UserLookup], - cache_timer: Callable[[], int]=None, - cache_max_size: int=10000, - cache_user_expiration: int=300, - cache_is_valid_expiration: int=3600 - ) -> None: + self, + user_lookup: Set[UserLookup], + cache_timer: Optional[Callable[[], int]] = None, + cache_max_size: int = 10000, + cache_user_expiration: int = 300, + cache_is_valid_expiration: int = 3600, + ) -> None: """ Create the handler set. @@ -84,19 +87,23 @@ def __init__( :param cache_is_valid_expiration: the default expiration time for the username -> validity cache. This time can be overridden by a user handler on a per user basis. """ - no_Nones_in_iterable(user_lookup, 'user_lookup') - self._lookup = {l.get_authsource_id(): l for l in user_lookup} + no_Nones_in_iterable(user_lookup, "user_lookup") + self._lookup = {lookup.get_authsource_id(): lookup for lookup in user_lookup} self._cache_timer = time.time if not cache_timer else cache_timer - self._user_cache = LRUCache(timer=self._cache_timer, maxsize=cache_max_size, - ttl=cache_user_expiration) - self._valid_cache = LRUCache(timer=self._cache_timer, maxsize=cache_max_size, - ttl=cache_is_valid_expiration) + self._user_cache = LRUCache( + timer=self._cache_timer, maxsize=cache_max_size, ttl=cache_user_expiration + ) + self._valid_cache = LRUCache( + timer=self._cache_timer, + maxsize=cache_max_size, + ttl=cache_is_valid_expiration, + ) def _check_authsource_id(self, authsource_id: AuthsourceID) -> None: """ :raises NoSuchAuthsourceError: if there's no handler for the provided authsource. """ - not_none(authsource_id, 'authsource_id') + not_none(authsource_id, "authsource_id") if authsource_id not in self._lookup: raise NoSuchAuthsourceError(authsource_id.id) @@ -121,14 +128,16 @@ def get_user(self, authsource_id: AuthsourceID, token: Token) -> Tuple[User, boo :returns: a tuple of the user and a boolean indicating whether the authsource claims the user is a mapping service system admin. """ - not_none(token, 'token') + not_none(token, "token") self._check_authsource_id(authsource_id) # None default causes a key error cacheres = self._user_cache.get((authsource_id, token), default=False) if cacheres: return cacheres user, admin, epoch, rel = self._lookup[authsource_id].get_user(token) - self._user_cache.set((authsource_id, token), (user, admin), ttl=self._calc_ttl(epoch, rel)) + self._user_cache.set( + (authsource_id, token), (user, admin), ttl=self._calc_ttl(epoch, rel) + ) return (user, admin) def is_valid_user(self, user: User) -> bool: @@ -138,12 +147,14 @@ def is_valid_user(self, user: User) -> bool: :param user: the user to check. :raises NoSuchAuthsourceError: if there's no handler for the user's authsource. """ - not_none(user, 'user') + not_none(user, "user") self._check_authsource_id(user.authsource_id) # None default causes a key error exists = self._valid_cache.get(user, default=False) if not exists: - exists, epoch, rel = self._lookup[user.authsource_id].is_valid_user(user.username) + exists, epoch, rel = self._lookup[user.authsource_id].is_valid_user( + user.username + ) if exists: self._valid_cache.set(user, True, ttl=self._calc_ttl(epoch, rel)) return exists @@ -155,52 +166,54 @@ class LocalUserLookup(UserLookup): stored in the local database. """ - LOCAL = AuthsourceID('local') + LOCAL = AuthsourceID("local") """ The ID of the authentication source for local users. """ def __init__(self, storage: IDMappingStorage) -> None: - ''' + """ Create a local user handler. :param storage: the storage system in which users are stored. - ''' - not_none(storage, 'storage') + """ + not_none(storage, "storage") self._store = storage def get_authsource_id(self) -> AuthsourceID: return self.LOCAL def get_user(self, token: Token) -> Tuple[User, bool, Optional[int], Optional[int]]: - not_none(token, 'token') + not_none(token, "token") username, admin = self._store.get_user(token.get_hashed_token()) return (User(self.LOCAL, username), admin, None, 300) - def is_valid_user(self, username: Username) -> Tuple[bool, Optional[int], Optional[int]]: - not_none(username, 'username') + def is_valid_user( + self, username: Username + ) -> Tuple[bool, Optional[int], Optional[int]]: + not_none(username, "username") return (self._store.user_exists(username), None, 3600) def create_user(self, username: Username) -> Token: - ''' + """ Create a new user in the local storage system. Returns a new token for that user. :param username: The name of the user to create. :raises TypeError: if the user name is None. :raises UserExistsError: if the user already exists. - ''' - not_none(username, 'username') + """ + not_none(username, "username") t = tokens.generate_token() self._store.create_local_user(username, t.get_hashed_token()) return t def new_token(self, username: Username) -> Token: - ''' + """ Generate a new token for a user in the local storage system. :param username: The name of the user to update. :raises TypeError: if the user name is None. :raises NoSuchUserError: if the user does not exist. - ''' - not_none(username, 'username') + """ + not_none(username, "username") t = tokens.generate_token() self._store.update_local_user_token(username, t.get_hashed_token()) return t @@ -214,17 +227,17 @@ def set_user_as_admin(self, username: Username, admin: bool) -> None: is already in the given state, no further action is taken. :raises TypeError: if the username is None. """ - not_none(username, 'username') + not_none(username, "username") self._store.set_local_user_as_admin(username, admin) def get_users(self) -> Dict[Username, bool]: - ''' + """ Get the users in the local storage system. :returns: a mapping of username to a boolean denoting whether the user is an admin or not. - ''' + """ return self._store.get_users() class LookupInitializationError(Exception): - """ Thrown when a user lookup handler could not be initialized. """ + """Thrown when a user lookup handler could not be initialized.""" diff --git a/src/jgikbase/idmapping/service/mapper_service.py b/src/jgikbase/idmapping/service/mapper_service.py index ecc993a..f68c380 100644 --- a/src/jgikbase/idmapping/service/mapper_service.py +++ b/src/jgikbase/idmapping/service/mapper_service.py @@ -1,13 +1,22 @@ from jgikbase.idmapping.builder import IDMappingBuilder from flask.app import Flask from flask import request -from jgikbase.idmapping.core.errors import NoTokenError, AuthenticationError,\ - ErrorType, IllegalParameterError, IDMappingError, NoDataException, UnauthorizedError,\ - MissingParameterError +from jgikbase.idmapping.core.errors import ( + NoTokenError, + AuthenticationError, + ErrorType, + IllegalParameterError, + IDMappingError, + NoDataException, + UnauthorizedError, + MissingParameterError, +) from jgikbase.idmapping.core.user import AuthsourceID, User, Username from jgikbase.idmapping.core.tokens import Token from jgikbase.idmapping.core.object_id import NamespaceID, ObjectID -from http.client import responses # @UnresolvedImport dunno why pydev cries here, it's stdlib +from http.client import ( + responses, +) # @UnresolvedImport dunno why pydev cries here, it's stdlib import flask from flask import g as flask_req_global from typing import List, Tuple, Optional, Set, Dict, IO @@ -21,15 +30,17 @@ import logging from logging import StreamHandler, Formatter -VERSION = '0.1.1' +VERSION = "0.1.2" try: from jgikbase.idmapping import gitcommit except ImportError: # pragma: no cover # tested manually - raise ValueError('Did not find git commit file at ' + # pragma: no cover - 'src/jgikbase/idmapping/gitcommit.py. ' + # pragma: no cover - 'The build may not have completed correctly.') # pragma: no cover + raise ValueError( + "Did not find git commit file at " # pragma: no cover + + "src/jgikbase/idmapping/gitcommit.py. " # pragma: no cover + + "The build may not have completed correctly." + ) # pragma: no cover # TODO CODE try getting rid of src dir and see what happens @@ -41,15 +52,15 @@ # by peformance needs. -_APP = 'ID_MAPPER' -_IGNORE_IP_HEADERS = 'IGNORE_IP_HEADERS' +_APP = "ID_MAPPER" +_IGNORE_IP_HEADERS = "IGNORE_IP_HEADERS" -_X_REAL_IP = 'X-Real-IP' -_X_FORWARDED_FOR = 'X-Forwarded-For' -_USER_AGENT = 'User-Agent' +_X_REAL_IP = "X-Real-IP" +_X_FORWARDED_FOR = "X-Forwarded-For" +_USER_AGENT = "User-Agent" -_TRUE = 'true' -_FALSE = 'false' +_TRUE = "true" +_FALSE = "false" def epoch_ms(): @@ -62,7 +73,7 @@ def get_ip_address(request, ignore_ip_headers): real_ip = request.headers.get(_X_REAL_IP) if xff and xff.strip(): - return xff.split(',')[0].strip() + return xff.split(",")[0].strip() if real_ip and real_ip.strip(): return real_ip.strip() return request.remote_addr.strip() @@ -74,23 +85,29 @@ def _log(msg, *args): def _format_exception(err): # seriously what the fuck - return ''.join(traceback.format_exception(etype=type(err), value=err, tb=err.__traceback__)) + return "".join( + traceback.format_exception(etype=type(err), value=err, tb=err.__traceback__) + ) def _log_exception(err: Exception): - logging.getLogger(__name__).error('Logging exception:\n' + _format_exception(err)) - - -def _format_error(err: Exception, httpcode: int, errtype: ErrorType=None, errprefix: str=''): - errjson = {'httpcode': httpcode, - 'httpstatus': responses[httpcode], - 'message': errprefix + str(err), - 'callid': flask_req_global.req_id, - 'time': epoch_ms()} + logging.getLogger(__name__).error("Logging exception:\n" + _format_exception(err)) + + +def _format_error( + err: Exception, httpcode: int, errtype: Optional[ErrorType] = None, errprefix: str = "" +): + errjson = { + "httpcode": httpcode, + "httpstatus": responses[httpcode], + "message": errprefix + str(err), + "callid": flask_req_global.req_id, # type: ignore[attr-defined] + "time": epoch_ms(), + } if errtype: - errjson['appcode'] = errtype.error_code - errjson['apperror'] = errtype.error_type - return (flask.jsonify({'error': errjson}), httpcode) + errjson["appcode"] = errtype.error_code + errjson["apperror"] = errtype.error_type + return (flask.jsonify({"error": errjson}), httpcode) def format_ip_headers(request, ignore_ip_headers): @@ -100,12 +117,12 @@ def format_ip_headers(request, ignore_ip_headers): xff = request.headers.get(_X_FORWARDED_FOR) real_ip = request.headers.get(_X_REAL_IP) if xff and xff.strip(): - log.append(_X_FORWARDED_FOR + ': ' + xff.strip()) + log.append(_X_FORWARDED_FOR + ": " + xff.strip()) if real_ip and real_ip.strip(): - log.append(_X_REAL_IP + ': ' + real_ip.strip()) + log.append(_X_REAL_IP + ": " + real_ip.strip()) if log: - log.append('Remote IP: ' + request.remote_addr.strip()) - return ', '.join(log) + log.append("Remote IP: " + request.remote_addr.strip()) + return ", ".join(log) return None @@ -116,44 +133,48 @@ def _get_auth(request, required=True) -> Tuple[Optional[AuthsourceID], Optional[ :raises InvalidTokenError: if the authorization header is malformed. :raises IllegalParameterError: if the authsource is illegal. """ - auth = request.headers.get('Authorization') + auth = request.headers.get("Authorization") if not auth: if required: raise NoTokenError() return (None, None) auth = auth.strip().split() if len(auth) != 2: - raise IllegalParameterError('Expected authsource and token in header.') + raise IllegalParameterError("Expected authsource and token in header.") return AuthsourceID(auth[0]), Token(auth[1]) def _users_to_jsonable(users: List[User]) -> List[str]: - return sorted([u.authsource_id.id + '/' + u.username.name for u in users]) + return sorted([u.authsource_id.id + "/" + u.username.name for u in users]) def _objids_to_jsonable(oids: Set[ObjectID]): - return sorted([{'ns': o.namespace_id.id, 'id': o.id} for o in oids], - key=itemgetter('ns', 'id')) + return sorted( + [{"ns": o.namespace_id.id, "id": o.id} for o in oids], + key=itemgetter("ns", "id"), + ) def _get_object_id_dict_from_json(request) -> Dict[str, str]: # flask has a built in get_json() method but the errors it throws suck. ids = json.loads(request.get_data()) if not isinstance(ids, dict): - raise IllegalParameterError('Expected JSON mapping in request body') + raise IllegalParameterError("Expected JSON mapping in request body") if not ids: - raise MissingParameterError('No ids supplied') + raise MissingParameterError("No ids supplied") for id_ in ids: # json keys must be strings if not id_.strip(): - raise MissingParameterError('whitespace only key in input JSON') + raise MissingParameterError("whitespace only key in input JSON") val = ids[id_] if not isinstance(val, str): - raise IllegalParameterError('value for key {} in input JSON is not string: {}'.format( - id_, val)) + raise IllegalParameterError( + "value for key {} in input JSON is not string: {}".format(id_, val) + ) if not val.strip(): - raise MissingParameterError('value for key {} in input JSON is whitespace only'.format( - id_)) + raise MissingParameterError( + "value for key {} in input JSON is whitespace only".format(id_) + ) return ids @@ -161,53 +182,57 @@ def _get_object_id_list_from_json(request) -> List[str]: # flask has a built in get_json() method but the errors it throws suck. body = json.loads(request.get_data()) if not isinstance(body, dict): - raise IllegalParameterError('Expected JSON mapping in request body') - ids = body.get('ids') + raise IllegalParameterError("Expected JSON mapping in request body") + ids = body.get("ids") if not isinstance(ids, list): - raise IllegalParameterError('Expected list at /ids in request body') + raise IllegalParameterError("Expected list at /ids in request body") if not ids: - raise MissingParameterError('No ids supplied') + raise MissingParameterError("No ids supplied") for id_ in ids: if not id_ or not id_.strip(): - raise MissingParameterError('null or whitespace-only id in list') + raise MissingParameterError("null or whitespace-only id in list") return ids class JSONFlaskLogFormatter(Formatter): - """ A JSON formatter for service logs. """ + """A JSON formatter for service logs.""" def __init__(self, service_name): super().__init__() self.service_name = service_name def format(self, record): - log = {'service': self.service_name, - 'level': record.levelname, - 'time': epoch_ms(), - 'source': record.name, - 'ip': flask_req_global.ip, - 'method': flask_req_global.method, - 'callid': flask_req_global.req_id, - 'msg': record.getMessage() - } + log = { + "service": self.service_name, + "level": record.levelname, + "time": epoch_ms(), + "source": record.name, + "msg": record.getMessage(), + } # https://docs.python.org/3.6/library/sys.html#sys.exc_info if record.exc_info and record.exc_info != (None, None, None): - log['excep'] = _format_exception(record.exc_info[1]) + log["excep"] = _format_exception(record.exc_info[1]) + if flask_req_global: + log["ip"] = flask_req_global.ip + log["method"] = flask_req_global.method + log["callid"] = flask_req_global.req_id return json.dumps(log) -def _configure_loggers(logstream: IO[str]=None): +def _configure_loggers(logstream: Optional[IO[str]] = None): # make some of this configurable if needed handler = StreamHandler(logstream) - handler.setFormatter(JSONFlaskLogFormatter('IDMappingService')) + handler.setFormatter(JSONFlaskLogFormatter("IDMappingService")) logging.getLogger().addHandler(handler) - logging.getLogger().setLevel('INFO') - logging.getLogger('werkzeug').setLevel('WARNING') - logging.getLogger('flask.app').setLevel('WARNING') + logging.getLogger().setLevel("INFO") + logging.getLogger("werkzeug").setLevel("WARNING") + logging.getLogger("flask.app").setLevel("WARNING") -def create_app(builder: IDMappingBuilder=IDMappingBuilder(), logstream: IO[str]=None): - """ Create the flask app. """ +def create_app( + builder: IDMappingBuilder = IDMappingBuilder(), logstream: Optional[IO[str]] = None +): + """Create the flask app.""" _configure_loggers(logstream) app = Flask(__name__) app.url_map.strict_slashes = False # otherwise GET /loc/ won't match GET /loc @@ -226,127 +251,168 @@ def preprocess_request(): @app.after_request def postprocess_request(response): - _log('%s %s %s %s', request.method, request.path, response.status_code, - request.headers.get(_USER_AGENT)) + _log( + "%s %s %s %s", + request.method, + request.path, + response.status_code, + request.headers.get(_USER_AGENT), + ) return response ########### # Endpoints ########### - @app.route('/', methods=['GET']) + @app.route("/", methods=["GET"]) def root(): - """ Get information about the service. """ + """Get information about the service.""" # TODO ROOT add paths and a configurable contact email at some point. - return flask.jsonify({'service': 'ID Mapping Service', - 'version': VERSION, - 'gitcommithash': gitcommit.commit, - 'servertime': epoch_ms()}) - - @app.route('/api/v1/namespace/', methods=['PUT', 'POST']) + return flask.jsonify( + { + "service": "ID Mapping Service", + "version": VERSION, + "gitcommithash": gitcommit.commit, + "servertime": epoch_ms(), + } + ) + + @app.route("/api/v1/namespace/", methods=["PUT", "POST"]) def create_namespace(namespace): - """ Create a namespace. """ + """Create a namespace.""" authsource, token = _get_auth(request) app.config[_APP].create_namespace(authsource, token, NamespaceID(namespace)) - return ('', 204) + return ("", 204) - @app.route('/api/v1/namespace//user//', methods=['PUT']) + @app.route( + "/api/v1/namespace//user//", methods=["PUT"] + ) def add_user_to_namespace(namespace, authsource, user): - """ Add a user to a namespace. """ + """Add a user to a namespace.""" admin_authsource, token = _get_auth(request) - app.config[_APP].add_user_to_namespace(admin_authsource, token, NamespaceID(namespace), - User(AuthsourceID(authsource), Username(user))) - return ('', 204) - - @app.route('/api/v1/namespace//user//', methods=['DELETE']) + app.config[_APP].add_user_to_namespace( + admin_authsource, + token, + NamespaceID(namespace), + User(AuthsourceID(authsource), Username(user)), + ) + return ("", 204) + + @app.route( + "/api/v1/namespace//user//", methods=["DELETE"] + ) def remove_user_from_namespace(namespace, authsource, user): """ Remove a user from a namespace. Removing a non-existant user throws an error. """ admin_authsource, token = _get_auth(request) app.config[_APP].remove_user_from_namespace( - admin_authsource, token, NamespaceID(namespace), - User(AuthsourceID(authsource), Username(user))) - return ('', 204) - - @app.route('/api/v1/namespace//set', methods=['PUT']) + admin_authsource, + token, + NamespaceID(namespace), + User(AuthsourceID(authsource), Username(user)), + ) + return ("", 204) + + @app.route("/api/v1/namespace//set", methods=["PUT"]) def set_namespace_params(namespace): - """ Change settings on a namespace. """ + """Change settings on a namespace.""" authsource, token = _get_auth(request) - pubmap = request.args.get('publicly_mappable') + pubmap = request.args.get("publicly_mappable") if pubmap: # expand later if more settings are allowed if pubmap not in [_TRUE, _FALSE]: raise IllegalParameterError( - "Expected value of 'true' or 'false' for publicly_mappable") + "Expected value of 'true' or 'false' for publicly_mappable" + ) app.config[_APP].set_namespace_publicly_mappable( - authsource, token, NamespaceID(namespace), pubmap == _TRUE) + authsource, token, NamespaceID(namespace), pubmap == _TRUE + ) else: - raise MissingParameterError('No settings provided.') - return ('', 204) + raise MissingParameterError("No settings provided.") + return ("", 204) - @app.route('/api/v1/namespace/', methods=['GET']) + @app.route("/api/v1/namespace/", methods=["GET"]) def get_namespace(namespace): - """ Get a namespace. """ + """Get a namespace.""" authsource, token = _get_auth(request, False) ns = app.config[_APP].get_namespace(NamespaceID(namespace), authsource, token) - return flask.jsonify({'namespace': ns.namespace_id.id, - 'publicly_mappable': ns.is_publicly_mappable, - 'users': _users_to_jsonable(ns.authed_users)}) - - @app.route('/api/v1/namespace', methods=['GET']) + return flask.jsonify( + { + "namespace": ns.namespace_id.id, + "publicly_mappable": ns.is_publicly_mappable, + "users": _users_to_jsonable(ns.authed_users), + } + ) + + @app.route("/api/v1/namespace", methods=["GET"]) def get_namespaces(): - """ Get all namespaces. """ + """Get all namespaces.""" public, private = app.config[_APP].get_namespaces() - return flask.jsonify({'publicly_mappable': sorted([ns.id for ns in public]), - 'privately_mappable': sorted([ns.id for ns in private])}) - - @app.route('/api/v1/mapping//', methods=['PUT', 'POST']) + return flask.jsonify( + { + "publicly_mappable": sorted([ns.id for ns in public]), + "privately_mappable": sorted([ns.id for ns in private]), + } + ) + + @app.route("/api/v1/mapping//", methods=["PUT", "POST"]) def create_mapping(admin_ns, other_ns): - """ Create a mapping. """ + """Create a mapping.""" authsource, token = _get_auth(request) ids = _get_object_id_dict_from_json(request) if len(ids) > 10000: - raise IllegalParameterError('A maximum of 10000 ids are allowed') + raise IllegalParameterError("A maximum of 10000 ids are allowed") for id_ in ids: - app.config[_APP].create_mapping(authsource, token, - ObjectID(NamespaceID(admin_ns), id_.strip()), - ObjectID(NamespaceID(other_ns), ids[id_].strip())) - return ('', 204) - - @app.route('/api/v1/mapping//', methods=['DELETE']) + app.config[_APP].create_mapping( + authsource, + token, + ObjectID(NamespaceID(admin_ns), id_.strip()), + ObjectID(NamespaceID(other_ns), ids[id_].strip()), + ) + return ("", 204) + + @app.route("/api/v1/mapping//", methods=["DELETE"]) def remove_mapping(admin_ns, other_ns): - """ Remove a mapping. """ + """Remove a mapping.""" authsource, token = _get_auth(request) ids = _get_object_id_dict_from_json(request) if len(ids) > 10000: - raise IllegalParameterError('A maximum of 10000 ids are allowed') + raise IllegalParameterError("A maximum of 10000 ids are allowed") for id_ in ids: - app.config[_APP].remove_mapping(authsource, token, - ObjectID(NamespaceID(admin_ns), id_.strip()), - ObjectID(NamespaceID(other_ns), ids[id_].strip())) - return ('', 204) - - @app.route('/api/v1/mapping//', methods=['GET']) + app.config[_APP].remove_mapping( + authsource, + token, + ObjectID(NamespaceID(admin_ns), id_.strip()), + ObjectID(NamespaceID(other_ns), ids[id_].strip()), + ) + return ("", 204) + + @app.route("/api/v1/mapping//", methods=["GET"]) def get_mappings(ns): - """ Find mappings. """ - ns_filter = request.args.get('namespace_filter') - separate = request.args.get('separate') + """Find mappings.""" + ns_filter = request.args.get("namespace_filter") + separate = request.args.get("separate") if ns_filter and ns_filter.strip(): - ns_filter = [NamespaceID(n.strip()) for n in ns_filter.split(',')] + ns_filter = [NamespaceID(n.strip()) for n in ns_filter.split(",")] else: ns_filter = [] ids = _get_object_id_list_from_json(request) if len(ids) > 1000: - raise IllegalParameterError('A maximum of 1000 ids are allowed') + raise IllegalParameterError("A maximum of 1000 ids are allowed") ret = {} for id_ in ids: id_ = id_.strip() - a, o = app.config[_APP].get_mappings(ObjectID(NamespaceID(ns), id_), ns_filter) + a, o = app.config[_APP].get_mappings( + ObjectID(NamespaceID(ns), id_), ns_filter + ) if separate is not None: # empty string if in query with no value - ret[id_] = {'admin': _objids_to_jsonable(a), 'other': _objids_to_jsonable(o)} + ret[id_] = { + "admin": _objids_to_jsonable(a), + "other": _objids_to_jsonable(o), + } else: a.update(o) - ret[id_] = {'mappings': _objids_to_jsonable(a)} + ret[id_] = {"mappings": _objids_to_jsonable(a)} return flask.jsonify(ret) ################ @@ -355,49 +421,49 @@ def get_mappings(ns): @app.errorhandler(IDMappingError) def general_app_errors(err): - """ Handle general application errors. These are user-caused and always map to 400. """ + """Handle general application errors. These are user-caused and always map to 400.""" _log_exception(err) return _format_error(err, 400, err.error_type) @app.errorhandler(JSONDecodeError) def json_errors(err): - """ Handle invalid input JSON. """ + """Handle invalid input JSON.""" _log_exception(err) - return _format_error(err, 400, errprefix='Input JSON decode error: ') + return _format_error(err, 400, errprefix="Input JSON decode error: ") @app.errorhandler(AuthenticationError) def authentication_errors(err): - """ Handle authentication errors. """ + """Handle authentication errors.""" _log_exception(err) return _format_error(err, 401, err.error_type) @app.errorhandler(UnauthorizedError) def authorization_errors(err): - """ Handle authorization errors. """ + """Handle authorization errors.""" _log_exception(err) return _format_error(err, 403, err.error_type) @app.errorhandler(NoDataException) def no_data_errors(err): - """ Handle requests for data, such as namespaces, that don't exist. """ + """Handle requests for data, such as namespaces, that don't exist.""" _log_exception(err) return _format_error(err, 404, err.error_type) @app.errorhandler(NotFound) def not_found_errors(err): - """ Handle plain old not found errors thrown by Flask. """ + """Handle plain old not found errors thrown by Flask.""" _log_exception(err) return _format_error(err, 404) @app.errorhandler(MethodNotAllowed) def method_not_allowed(err): - """ Handle invalid method requests. """ + """Handle invalid method requests.""" _log_exception(err) return _format_error(err, 405) @app.errorhandler(Exception) def all_errors(err): - """ Catch-all error handler of last resort """ + """Catch-all error handler of last resort""" _log_exception(err) return _format_error(err, 500) diff --git a/src/jgikbase/idmapping/storage/id_mapping_storage.py b/src/jgikbase/idmapping/storage/id_mapping_storage.py index 571ce62..ae21d1d 100644 --- a/src/jgikbase/idmapping/storage/id_mapping_storage.py +++ b/src/jgikbase/idmapping/storage/id_mapping_storage.py @@ -2,6 +2,7 @@ Interface for a storage system for ID mappings. """ + # it'd be nice if you could just pragma: no cover the entire file, but that doesn't seem to work from abc import abstractmethod as _abstractmethod # pragma: no cover from abc import ABCMeta as _ABCMeta # pragma: no cover @@ -11,13 +12,14 @@ from jgikbase.idmapping.core.object_id import Namespace # pragma: no cover from typing import Iterable, Set, Tuple # pragma: no cover from jgikbase.idmapping.core.object_id import ObjectID # pragma: no cover -from typing import Dict +from typing import Dict, Optional class IDMappingStorage: # pragma: no cover """ An interface for a storage system for ID mappings. All methods are abstract. """ + __metaclass__ = _ABCMeta @_abstractmethod @@ -38,14 +40,14 @@ def create_local_user(self, username: Username, token: HashedToken) -> None: @_abstractmethod def set_local_user_as_admin(self, username: Username, admin: bool) -> None: - ''' + """ Mark a user as a system admin. Or not. :param username: the name of the user to alter. :param admin: True to give the user admin privileges, False to remove them. If the user is already in the given state, no further action is taken. :raises TypeError: if the usename is None. - ''' + """ raise NotImplementedError() @_abstractmethod @@ -88,12 +90,12 @@ def get_users(self) -> Dict[Username, bool]: @_abstractmethod def user_exists(self, username: Username) -> bool: - ''' + """ Check if a user exist in the system. Returns True if so. :param username: the username to check. :raises TypeError: if the username is None. - ''' + """ raise NotImplementedError() @_abstractmethod @@ -108,7 +110,9 @@ def create_namespace(self, namespace_id: NamespaceID) -> None: raise NotImplementedError() @_abstractmethod - def add_user_to_namespace(self, namespace_id: NamespaceID, admin_user: User) -> None: + def add_user_to_namespace( + self, namespace_id: NamespaceID, admin_user: User + ) -> None: """ Add a user to a namespace, giving them administration rights. A noop occurs if the user is already an administrator for the namespace. @@ -122,7 +126,9 @@ def add_user_to_namespace(self, namespace_id: NamespaceID, admin_user: User) -> raise NotImplementedError() @_abstractmethod - def remove_user_from_namespace(self, namespace_id: NamespaceID, admin_user: User) -> None: + def remove_user_from_namespace( + self, namespace_id: NamespaceID, admin_user: User + ) -> None: """ Remove a user from a namespace, removing their administration rights. @@ -135,8 +141,9 @@ def remove_user_from_namespace(self, namespace_id: NamespaceID, admin_user: User raise NotImplementedError() @_abstractmethod - def set_namespace_publicly_mappable(self, namespace_id: NamespaceID, publicly_mappable: bool - ) -> None: + def set_namespace_publicly_mappable( + self, namespace_id: NamespaceID, publicly_mappable: bool + ) -> None: """ Set the publicly mappable flag on a namespace. @@ -149,7 +156,7 @@ def set_namespace_publicly_mappable(self, namespace_id: NamespaceID, publicly_ma raise NotImplementedError() @_abstractmethod - def get_namespaces(self, nids: Iterable[NamespaceID]=None) -> Set[Namespace]: + def get_namespaces(self, nids: Optional[Iterable[NamespaceID]] = None) -> Set[Namespace]: """ Get all the namespaces in the system. @@ -198,8 +205,9 @@ def remove_mapping(self, primary_OID: ObjectID, secondary_OID: ObjectID) -> bool raise NotImplementedError() @_abstractmethod - def find_mappings(self, oid: ObjectID, ns_filter: Iterable[NamespaceID]=None - ) -> Tuple[Set[ObjectID], Set[ObjectID]]: + def find_mappings( + self, oid: ObjectID, ns_filter: Optional[Iterable[NamespaceID]] = None + ) -> Tuple[Set[ObjectID], Set[ObjectID]]: """ Find mappings given a namespace / id combination. diff --git a/src/jgikbase/idmapping/storage/mongo/id_mapping_mongo_storage.py b/src/jgikbase/idmapping/storage/mongo/id_mapping_mongo_storage.py index 4ff0884..b1b304d 100644 --- a/src/jgikbase/idmapping/storage/mongo/id_mapping_mongo_storage.py +++ b/src/jgikbase/idmapping/storage/mongo/id_mapping_mongo_storage.py @@ -1,17 +1,36 @@ """ A MongoDB based storage system for ID mapping. """ -from jgikbase.idmapping.storage.id_mapping_storage import IDMappingStorage as _IDMappingStorage + +from jgikbase.idmapping.storage.id_mapping_storage import ( + IDMappingStorage as _IDMappingStorage, +) from jgikbase.idmapping.core.tokens import HashedToken from jgikbase.idmapping.core.user import User, AuthsourceID, Username from pymongo.database import Database from jgikbase.idmapping.core.arg_check import not_none, no_Nones_in_iterable from pymongo.errors import DuplicateKeyError, PyMongoError import re -from jgikbase.idmapping.storage.errors import IDMappingStorageError, StorageInitException -from jgikbase.idmapping.core.errors import NoSuchUserError, UserExistsError, InvalidTokenError,\ - NamespaceExistsError, NoSuchNamespaceError -from typing import Set, Iterable, Tuple, Dict, Any, List # @UnusedImport pydev gets confused here +from jgikbase.idmapping.storage.errors import ( + IDMappingStorageError, + StorageInitException, +) +from jgikbase.idmapping.core.errors import ( + NoSuchUserError, + UserExistsError, + InvalidTokenError, + NamespaceExistsError, + NoSuchNamespaceError, +) +from typing import ( + Set, + Iterable, + Tuple, + Dict, + Any, + List, + Optional, +) # @UnusedImport pydev gets confused here from jgikbase.idmapping.core.object_id import NamespaceID, Namespace, ObjectID # Testing the (many) catch blocks for the general mongo exception is pretty hard, since it @@ -25,71 +44,68 @@ # schema version checking constants. # the schema version collection -_COL_CONFIG = 'config' +_COL_CONFIG = "config" # the current version of the database schema. _SCHEMA_VERSION = 1 # the key for the schema document used to ensure a singleton. -_FLD_SCHEMA_KEY = 'schema' +_FLD_SCHEMA_KEY = "schema" # the value for the schema key. -_SCHEMA_VALUE = 'schema' +_SCHEMA_VALUE = "schema" # whether the schema is in the process of an update. Value is a boolean. -_FLD_SCHEMA_UPDATE = 'inupdate' +_FLD_SCHEMA_UPDATE = "inupdate" # the version of the schema. Value is _SCHEMA_VERSION. -_FLD_SCHEMA_VERSION = 'schemaver' +_FLD_SCHEMA_VERSION = "schemaver" # database collections -_COL_USERS = 'users' -_COL_NAMESPACES = 'ns' -_COL_MAPPINGS = 'map' +_COL_USERS = "users" +_COL_NAMESPACES = "ns" +_COL_MAPPINGS = "map" # user collection fields -_FLD_AUTHSOURCE = 'auth' -_FLD_USER = 'user' -_FLD_TOKEN = 'hshtkn' -_FLD_ADMIN = 'admin' +_FLD_AUTHSOURCE = "auth" +_FLD_USER = "user" +_FLD_TOKEN = "hshtkn" # nosec +_FLD_ADMIN = "admin" # namespace collection fields -_FLD_NS_ID = 'nsid' -_FLD_PUB_MAP = 'pubmap' -_FLD_USERS = 'users' -_FLD_AUTHSOURCE = 'auth' -_FLD_NAME = 'name' +_FLD_NS_ID = "nsid" +_FLD_PUB_MAP = "pubmap" +_FLD_USERS = "users" +_FLD_AUTHSOURCE = "auth" +_FLD_NAME = "name" # mapping collection fields: -_FLD_PRIMARY_NS = 'pnsid' -_FLD_SECONDARY_NS = 'snsid' -_FLD_PRIMARY_ID = 'pid' -_FLD_SECONDARY_ID = 'sid' - -_INDEXES = {_COL_USERS: [{'idx': _FLD_USER, - 'kw': {'unique': True}, - }, - {'idx': _FLD_TOKEN, - 'kw': {'unique': True} - }], - _COL_NAMESPACES: [{'idx': _FLD_NS_ID, - 'kw': {'unique': True} - } - ], - _COL_MAPPINGS: [{'idx': [(_FLD_PRIMARY_NS, 1), - (_FLD_PRIMARY_ID, 1), - (_FLD_SECONDARY_NS, 1), - (_FLD_SECONDARY_ID, 1)], - 'kw': {'unique': True} - }, - # index for 'backwards' queries - # could improve performance by including the primary IDs for covered - # queries. Not sure if that's worth the index size increase. - {'idx': [(_FLD_SECONDARY_NS, 1), - (_FLD_SECONDARY_ID, 1)], - 'kw': {} - } - ], - _COL_CONFIG: [{'idx': _FLD_SCHEMA_KEY, - 'kw': {'unique': True} - } - ] - } +_FLD_PRIMARY_NS = "pnsid" +_FLD_SECONDARY_NS = "snsid" +_FLD_PRIMARY_ID = "pid" +_FLD_SECONDARY_ID = "sid" + +_INDEXES = { + _COL_USERS: [ + { + "idx": _FLD_USER, + "kw": {"unique": True}, + }, + {"idx": _FLD_TOKEN, "kw": {"unique": True}}, + ], + _COL_NAMESPACES: [{"idx": _FLD_NS_ID, "kw": {"unique": True}}], + _COL_MAPPINGS: [ + { + "idx": [ + (_FLD_PRIMARY_NS, 1), + (_FLD_PRIMARY_ID, 1), + (_FLD_SECONDARY_NS, 1), + (_FLD_SECONDARY_ID, 1), + ], + "kw": {"unique": True}, + }, + # index for 'backwards' queries + # could improve performance by including the primary IDs for covered + # queries. Not sure if that's worth the index size increase. + {"idx": [(_FLD_SECONDARY_NS, 1), (_FLD_SECONDARY_ID, 1)], "kw": {}}, + ], + _COL_CONFIG: [{"idx": _FLD_SCHEMA_KEY, "kw": {"unique": True}}], +} class IDMappingMongoStorage(_IDMappingStorage): @@ -107,7 +123,7 @@ def __init__(self, db: Database) -> None: :raises StorageInitException: if the storage system could not be initialized properly. :raises TypeError: if the Mongo database is None. """ - not_none(db, 'db') + not_none(db, "db") self._db = db self._ensure_indexes() self._check_schema() # MUST happen after ensuring indexes @@ -116,64 +132,90 @@ def _ensure_indexes(self): try: for col in _INDEXES: for idxinfo in _INDEXES[col]: - self._db[col].create_index(idxinfo['idx'], **idxinfo['kw']) + self._db[col].create_index(idxinfo["idx"], **idxinfo["kw"]) except PyMongoError as e: - raise StorageInitException('Failed to create index: ' + str(e)) from e + raise StorageInitException("Failed to create index: " + str(e)) from e def _check_schema(self): col = self._db[_COL_CONFIG] try: - col.insert_one({_FLD_SCHEMA_KEY: _SCHEMA_VALUE, - _FLD_SCHEMA_UPDATE: False, - _FLD_SCHEMA_VERSION: _SCHEMA_VERSION}) - except DuplicateKeyError as e: + col.insert_one( + { + _FLD_SCHEMA_KEY: _SCHEMA_VALUE, + _FLD_SCHEMA_UPDATE: False, + _FLD_SCHEMA_VERSION: _SCHEMA_VERSION, + } + ) + except DuplicateKeyError: # ok, the schema version document is already there, this isn't the first time this # database as been used. Now check the document is ok. - if col.count() != 1: + docs = list(col.find({})) + if len(docs) != 1: raise StorageInitException( - 'Multiple config objects found in the database. ' + - 'This should not happen, something is very wrong.') + "Multiple config objects found in the database. " + + "This should not happen, something is very wrong." + ) cfgdoc = col.find_one({_FLD_SCHEMA_KEY: _SCHEMA_VALUE}) if cfgdoc[_FLD_SCHEMA_VERSION] != _SCHEMA_VERSION: raise StorageInitException( - 'Incompatible database schema. Server is v{}, DB is v{}'.format( - _SCHEMA_VERSION, cfgdoc[_FLD_SCHEMA_VERSION])) + "Incompatible database schema. Server is v{}, DB is v{}".format( + _SCHEMA_VERSION, cfgdoc[_FLD_SCHEMA_VERSION] + ) + ) if cfgdoc[_FLD_SCHEMA_UPDATE]: raise StorageInitException( - 'The database is in the middle of an update from ' + - 'v{} of the schema. Aborting startup.'.format(cfgdoc[_FLD_SCHEMA_VERSION])) + "The database is in the middle of an update from " + + "v{} of the schema. Aborting startup.".format( + cfgdoc[_FLD_SCHEMA_VERSION] + ) + ) except PyMongoError as e: - raise StorageInitException('Connection to database failed: ' + str(e)) from e + raise StorageInitException( + "Connection to database failed: " + str(e) + ) from e def create_local_user(self, username: Username, token: HashedToken) -> None: - not_none(username, 'username') - not_none(token, 'token') + not_none(username, "username") + not_none(token, "token") try: - self._db[_COL_USERS].insert_one({_FLD_USER: username.name, - _FLD_TOKEN: token.token_hash, - _FLD_ADMIN: False}) + self._db[_COL_USERS].insert_one( + { + _FLD_USER: username.name, + _FLD_TOKEN: token.token_hash, + _FLD_ADMIN: False, + } + ) except DuplicateKeyError as e: coll, index = self._get_duplicate_location(e) if coll == _COL_USERS: - if index == _FLD_USER + '_1': + if index == _FLD_USER + "_1": raise UserExistsError(username.name) - elif index == _FLD_TOKEN + '_1': - raise ValueError('The provided token already exists in the database') + elif index == _FLD_TOKEN + "_1": + raise ValueError( + "The provided token already exists in the database" + ) # this is impossible to test - raise IDMappingStorageError('Unexpected duplicate key exception') + raise IDMappingStorageError("Unexpected duplicate key exception") except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def set_local_user_as_admin(self, username: Username, admin: bool) -> None: - not_none(username, 'username') + not_none(username, "username") admin = True if admin else False # more readable than admin and True try: - res = self._db[_COL_USERS].update_one({_FLD_USER: username.name}, - {'$set': {_FLD_ADMIN: admin}}) - if res.matched_count != 1: # don't care if user was updated or not, just found + res = self._db[_COL_USERS].update_one( + {_FLD_USER: username.name}, {"$set": {_FLD_ADMIN: admin}} + ) + if ( + res.matched_count != 1 + ): # don't care if user was updated or not, just found raise NoSuchUserError(username.name) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e # this regex is gross, but matches duplicate key error text across mongo versions 2 & 3 at # least. Example strings: @@ -182,8 +224,10 @@ def set_local_user_as_admin(self, username: Username, admin: bool) -> None: # 3.2+ # E11000 duplicate key error collection: test_id_mapping.users index: hshtkn_1 dup key: # { : "t" } - _DUPLICATE_KEY_REGEX = re.compile('duplicate key error (index|collection): ' + - r'\w+\.(\w+)( index: |\.\$)([\.\w]+)\s+') + _DUPLICATE_KEY_REGEX = re.compile( + "duplicate key error (index|collection): " + + r"\w+\.(\w+)( index: |\.\$)([\.\w]+)\s+" + ) def _get_duplicate_location(self, e: DuplicateKeyError): # To know where the duplicate key conflict occurred, we need the collection name and @@ -198,31 +242,40 @@ def _get_duplicate_location(self, e: DuplicateKeyError): else: # should never happen # the key value may be sensitive (e.g. a token) so remove it - raise IDMappingStorageError('unable to parse duplicate key error: ' + - e.args[0].split('dup key')[0]) + raise IDMappingStorageError( + "unable to parse duplicate key error: " + e.args[0].split("dup key")[0] + ) def update_local_user_token(self, username: Username, token: HashedToken) -> None: - not_none(username, 'username') - not_none(token, 'token') + not_none(username, "username") + not_none(token, "token") try: - res = self._db[_COL_USERS].update_one({_FLD_USER: username.name}, - {'$set': {_FLD_TOKEN: token.token_hash}}) - if res.matched_count != 1: # don't care if user was updated or not, just found + res = self._db[_COL_USERS].update_one( + {_FLD_USER: username.name}, {"$set": {_FLD_TOKEN: token.token_hash}} + ) + if ( + res.matched_count != 1 + ): # don't care if user was updated or not, just found raise NoSuchUserError(username.name) - except DuplicateKeyError as e: + except DuplicateKeyError: # since only the token can cause a duplicate key error here, we assume something # crazy isn't going and just raise that exception - raise ValueError('The provided token already exists in the database') + raise ValueError("The provided token already exists in the database") except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def get_user(self, token: HashedToken) -> Tuple[Username, bool]: - not_none(token, 'token') + not_none(token, "token") try: userdoc = self._db[_COL_USERS].find_one( - {_FLD_TOKEN: token.token_hash}, {_FLD_TOKEN: 0}) + {_FLD_TOKEN: token.token_hash}, {_FLD_TOKEN: 0} + ) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e if not userdoc: raise InvalidTokenError() @@ -233,88 +286,123 @@ def get_users(self) -> Dict[Username, bool]: userdocs = self._db[_COL_USERS].find({}, {_FLD_TOKEN: 0}) return {Username(u[_FLD_USER]): u[_FLD_ADMIN] for u in userdocs} except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def user_exists(self, username: Username) -> bool: - not_none(username, 'username') + not_none(username, "username") try: return self._db[_COL_USERS].count_documents({_FLD_USER: username.name}) == 1 except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def create_namespace(self, namespace_id: NamespaceID) -> None: - not_none(namespace_id, 'namespace_id') + not_none(namespace_id, "namespace_id") try: - self._db[_COL_NAMESPACES].insert_one({_FLD_NS_ID: namespace_id.id, - _FLD_PUB_MAP: False, - _FLD_USERS: []}) - except DuplicateKeyError as e: + self._db[_COL_NAMESPACES].insert_one( + {_FLD_NS_ID: namespace_id.id, _FLD_PUB_MAP: False, _FLD_USERS: []} + ) + except DuplicateKeyError: raise NamespaceExistsError(namespace_id.id) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def get_namespace(self, namespace_id: NamespaceID) -> Namespace: - not_none(namespace_id, 'namespace_id') + not_none(namespace_id, "namespace_id") try: nsdoc = self._db[_COL_NAMESPACES].find_one({_FLD_NS_ID: namespace_id.id}) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e if not nsdoc: raise NoSuchNamespaceError(namespace_id.id) return self._to_ns(nsdoc) def _to_user_set(self, userdocs) -> Set[User]: - return {User(AuthsourceID(u[_FLD_AUTHSOURCE]), Username(u[_FLD_NAME])) for u in userdocs} - - def add_user_to_namespace(self, namespace_id: NamespaceID, admin_user: User) -> None: + return { + User(AuthsourceID(u[_FLD_AUTHSOURCE]), Username(u[_FLD_NAME])) + for u in userdocs + } + + def add_user_to_namespace( + self, namespace_id: NamespaceID, admin_user: User + ) -> None: self._modify_namespace_users(True, namespace_id, admin_user) - def remove_user_from_namespace(self, namespace_id: NamespaceID, admin_user: User) -> None: + def remove_user_from_namespace( + self, namespace_id: NamespaceID, admin_user: User + ) -> None: self._modify_namespace_users(False, namespace_id, admin_user) def _modify_namespace_users(self, add: bool, namespace_id, admin_user): """ :param add: True to add the user to the namespace, False to remove. """ - not_none(namespace_id, 'namespace_id') - not_none(admin_user, 'admin_user') - op = '$addToSet' if add else '$pull' + not_none(namespace_id, "namespace_id") + not_none(admin_user, "admin_user") + op = "$addToSet" if add else "$pull" try: res = self._db[_COL_NAMESPACES].update_one( {_FLD_NS_ID: namespace_id.id}, - {op: {_FLD_USERS: {_FLD_AUTHSOURCE: admin_user.authsource_id.id, - _FLD_NAME: admin_user.username.name}}}) + { + op: { + _FLD_USERS: { + _FLD_AUTHSOURCE: admin_user.authsource_id.id, + _FLD_NAME: admin_user.username.name, + } + } + }, + ) if res.matched_count != 1: raise NoSuchNamespaceError(namespace_id.id) if res.modified_count != 1: - action = 'already administrates' if add else 'does not administrate' - ex = UserExistsError if add else NoSuchUserError # might want diff exceps here - raise ex('User {}/{} {} namespace {}'.format( - admin_user.authsource_id.id, admin_user.username.name, action, - namespace_id.id)) + action = "already administrates" if add else "does not administrate" + ex = ( + UserExistsError if add else NoSuchUserError + ) # might want diff exceps here + raise ex( + "User {}/{} {} namespace {}".format( + admin_user.authsource_id.id, + admin_user.username.name, + action, + namespace_id.id, + ) + ) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e - - def set_namespace_publicly_mappable(self, namespace_id: NamespaceID, publicly_mappable: bool - ) -> None: - not_none(namespace_id, 'namespace_id') + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e + + def set_namespace_publicly_mappable( + self, namespace_id: NamespaceID, publicly_mappable: bool + ) -> None: + not_none(namespace_id, "namespace_id") pm = True if publicly_mappable else False # more readable than 'and True' try: - res = self._db[_COL_NAMESPACES].update_one({_FLD_NS_ID: namespace_id.id}, - {'$set': {_FLD_PUB_MAP: pm}}) + res = self._db[_COL_NAMESPACES].update_one( + {_FLD_NS_ID: namespace_id.id}, {"$set": {_FLD_PUB_MAP: pm}} + ) if res.matched_count != 1: # don't care if modified or not raise NoSuchNamespaceError(namespace_id.id) except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e - def get_namespaces(self, nids: Iterable[NamespaceID]=None) -> Set[Namespace]: + def get_namespaces(self, nids: Optional[Iterable[NamespaceID]] = None) -> Set[Namespace]: query = {} nidstr: List[str] = [] if nids: - no_Nones_in_iterable(nids, 'nids') + no_Nones_in_iterable(nids, "nids") nidstr = [nid.id for nid in nids] - query[_FLD_NS_ID] = {'$in': nidstr} + query[_FLD_NS_ID] = {"$in": nidstr} try: nsdocs = self._db[_COL_NAMESPACES].find(query) nsobjs = {self._to_ns(nsdoc) for nsdoc in nsdocs} @@ -323,64 +411,88 @@ def get_namespaces(self, nids: Iterable[NamespaceID]=None) -> Set[Namespace]: raise NoSuchNamespaceError(str(sorted(missing))) return nsobjs except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def _to_ns(self, nsdoc): return Namespace( NamespaceID(nsdoc[_FLD_NS_ID]), nsdoc[_FLD_PUB_MAP], - self._to_user_set(nsdoc[_FLD_USERS])) + self._to_user_set(nsdoc[_FLD_USERS]), + ) def add_mapping(self, primary_OID: ObjectID, secondary_OID: ObjectID) -> None: - not_none(primary_OID, 'primary_OID') - not_none(secondary_OID, 'secondary_OID') + not_none(primary_OID, "primary_OID") + not_none(secondary_OID, "secondary_OID") try: self._db[_COL_MAPPINGS].insert_one( - self.to_mapping_mongo_doc(primary_OID, secondary_OID)) - except DuplicateKeyError as e: + self.to_mapping_mongo_doc(primary_OID, secondary_OID) + ) + except DuplicateKeyError: pass # don't care, record is already there except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e def to_mapping_mongo_doc(self, primary_OID, secondary_OID): - return {_FLD_PRIMARY_NS: primary_OID.namespace_id.id, - _FLD_PRIMARY_ID: primary_OID.id, - _FLD_SECONDARY_NS: secondary_OID.namespace_id.id, - _FLD_SECONDARY_ID: secondary_OID.id} + return { + _FLD_PRIMARY_NS: primary_OID.namespace_id.id, + _FLD_PRIMARY_ID: primary_OID.id, + _FLD_SECONDARY_NS: secondary_OID.namespace_id.id, + _FLD_SECONDARY_ID: secondary_OID.id, + } def remove_mapping(self, primary_OID: ObjectID, secondary_OID: ObjectID) -> bool: - not_none(primary_OID, 'primary_OID') - not_none(secondary_OID, 'secondary_OID') + not_none(primary_OID, "primary_OID") + not_none(secondary_OID, "secondary_OID") try: res = self._db[_COL_MAPPINGS].delete_one( - self.to_mapping_mongo_doc(primary_OID, secondary_OID)) + self.to_mapping_mongo_doc(primary_OID, secondary_OID) + ) return res.deleted_count == 1 except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e - - def find_mappings(self, oid: ObjectID, ns_filter: Iterable[NamespaceID]=None - ) -> Tuple[Set[ObjectID], Set[ObjectID]]: - not_none(oid, 'oid') + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e + + def find_mappings( + self, oid: ObjectID, ns_filter: Optional[Iterable[NamespaceID]] = None + ) -> Tuple[Set[ObjectID], Set[ObjectID]]: + not_none(oid, "oid") # could probably make a method & run it twice here but not worth the trouble - primary_query: Dict[str, Any] = {_FLD_PRIMARY_NS: oid.namespace_id.id, - _FLD_PRIMARY_ID: oid.id} - secondary_query: Dict[str, Any] = {_FLD_SECONDARY_NS: oid.namespace_id.id, - _FLD_SECONDARY_ID: oid.id} + primary_query: Dict[str, Any] = { + _FLD_PRIMARY_NS: oid.namespace_id.id, + _FLD_PRIMARY_ID: oid.id, + } + secondary_query: Dict[str, Any] = { + _FLD_SECONDARY_NS: oid.namespace_id.id, + _FLD_SECONDARY_ID: oid.id, + } if ns_filter: - no_Nones_in_iterable(ns_filter, 'ns_filter') + no_Nones_in_iterable(ns_filter, "ns_filter") fil = [ns.id for ns in ns_filter] - primary_query[_FLD_SECONDARY_NS] = {'$in': fil} - secondary_query[_FLD_PRIMARY_NS] = {'$in': fil} + primary_query[_FLD_SECONDARY_NS] = {"$in": fil} + secondary_query[_FLD_PRIMARY_NS] = {"$in": fil} try: mappings = self._db[_COL_MAPPINGS].find( - primary_query, {_FLD_PRIMARY_NS: 0, _FLD_PRIMARY_ID: 0}) - primary = {ObjectID(NamespaceID(m[_FLD_SECONDARY_NS]), m[_FLD_SECONDARY_ID]) - for m in mappings} + primary_query, {_FLD_PRIMARY_NS: 0, _FLD_PRIMARY_ID: 0} + ) + primary = { + ObjectID(NamespaceID(m[_FLD_SECONDARY_NS]), m[_FLD_SECONDARY_ID]) + for m in mappings + } mappings = self._db[_COL_MAPPINGS].find( - secondary_query, {_FLD_SECONDARY_NS: 0, _FLD_SECONDARY_ID: 0}) - secondary = {ObjectID(NamespaceID(m[_FLD_PRIMARY_NS]), m[_FLD_PRIMARY_ID]) - for m in mappings} + secondary_query, {_FLD_SECONDARY_NS: 0, _FLD_SECONDARY_ID: 0} + ) + secondary = { + ObjectID(NamespaceID(m[_FLD_PRIMARY_NS]), m[_FLD_PRIMARY_ID]) + for m in mappings + } return primary, secondary # nothing to check here. As long as the op doesn't fail we're good except PyMongoError as e: - raise IDMappingStorageError('Connection to database failed: ' + str(e)) from e + raise IDMappingStorageError( + "Connection to database failed: " + str(e) + ) from e diff --git a/src/jgikbase/idmapping/userlookup/kbase_user_lookup.py b/src/jgikbase/idmapping/userlookup/kbase_user_lookup.py index fe4a5ce..229e95b 100644 --- a/src/jgikbase/idmapping/userlookup/kbase_user_lookup.py +++ b/src/jgikbase/idmapping/userlookup/kbase_user_lookup.py @@ -61,7 +61,7 @@ def _check_error(self, r): if r.status_code != 200: try: j = r.json() - except Exception as e: # @UnusedVariable + except Exception: err = ('Non-JSON response from KBase auth server, status code: ' + str(r.status_code)) logging.getLogger(__name__).info('%s, response:\n%s', err, r.text) diff --git a/src/jgikbase/test/idmapping/config_test.py b/src/jgikbase/test/idmapping/config_test.py index 389a546..ef765b7 100644 --- a/src/jgikbase/test/idmapping/config_test.py +++ b/src/jgikbase/test/idmapping/config_test.py @@ -59,6 +59,7 @@ def test_kb_config_minimal_config(): assert c.auth_enabled == set() assert c.auth_admin_enabled == set() assert c.ignore_ip_headers is False + assert c.mongo_retrywrites is False def test_kb_config_minimal_config_whitespace(): @@ -66,6 +67,7 @@ def test_kb_config_minimal_config_whitespace(): 'mongo-host=foo', 'mongo-db=bar', 'mongo-user= \t ', 'mongo-pwd= \t ', 'dont-trust-x-ip-headers= crap', + 'mongo-retrywrites= another crap', 'authentication-enabled= \t ', 'authentication-admin-enabled= \t ']) c = KBaseConfig(p) @@ -77,12 +79,14 @@ def test_kb_config_minimal_config_whitespace(): assert c.auth_enabled == set() assert c.auth_admin_enabled == set() assert c.ignore_ip_headers is False + assert c.ignore_ip_headers is False def test_kb_config_maximal_config(): p = mock_path_to_file('path', [ '[idmapping]', 'mongo-host=foo', 'mongo-db=bar', 'mongo-user=u', 'mongo-pwd=p', 'dont-trust-x-ip-headers=true', + 'mongo-retrywrites=true', 'authentication-enabled= authone, auththree, \t authtwo , local ', 'authentication-admin-enabled= authone, autha, \t authbcd ', 'auth-source-authone-factory-module= some.module \t ', @@ -106,6 +110,7 @@ def test_kb_config_maximal_config(): 'whee': 'whoo'}), AuthsourceID('auththree'): ('some.other.other.module', {'x': 'Y'})} assert c.ignore_ip_headers is True + assert c.mongo_retrywrites is True def test_kb_config_fail_not_file(): diff --git a/src/jgikbase/test/idmapping/core/tokens_test.py b/src/jgikbase/test/idmapping/core/tokens_test.py index 5dee145..49b73e9 100644 --- a/src/jgikbase/test/idmapping/core/tokens_test.py +++ b/src/jgikbase/test/idmapping/core/tokens_test.py @@ -7,13 +7,13 @@ def test_hashed_token_init_pass(): - ht = HashedToken('foo') - assert ht.token_hash == 'foo' + ht = HashedToken("foo") + assert ht.token_hash == "foo" def test_hashed_token_init_fail(): - fail_hashed_token_init(None, MissingParameterError('token_hash')) - fail_hashed_token_init(' \t \n ', MissingParameterError('token_hash')) + fail_hashed_token_init(None, MissingParameterError("token_hash")) + fail_hashed_token_init(" \t \n ", MissingParameterError("token_hash")) def fail_hashed_token_init(htoken: str, expected: Exception): @@ -23,28 +23,28 @@ def fail_hashed_token_init(htoken: str, expected: Exception): def test_hashed_token_equals(): - assert HashedToken('foo') == HashedToken('foo') - assert HashedToken('foo') != HashedToken('bar') - assert HashedToken('foo') != 'foo' + assert HashedToken("foo") == HashedToken("foo") + assert HashedToken("foo") != HashedToken("bar") + assert HashedToken("foo") != "foo" def test_hashed_token_hash(): # string hashes will change from instance to instance of the python interpreter, and therefore # tests can't be written that directly test the hash value. See # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - assert hash(HashedToken('foo')) == hash(HashedToken('foo')) - assert hash(HashedToken('bar')) == hash(HashedToken('bar')) - assert hash(HashedToken('foo')) != hash(HashedToken('bar')) + assert hash(HashedToken("foo")) == hash(HashedToken("foo")) + assert hash(HashedToken("bar")) == hash(HashedToken("bar")) + assert hash(HashedToken("foo")) != hash(HashedToken("bar")) def test_token_init_pass(): - t = Token('foo') - assert t.token == 'foo' + t = Token("foo") + assert t.token == "foo" def test_token_init_fail(): - fail_token_init(None, MissingParameterError('token')) - fail_token_init(' \t \n ', MissingParameterError('token')) + fail_token_init(None, MissingParameterError("token")) + fail_token_init(" \t \n ", MissingParameterError("token")) def fail_token_init(token: str, expected: Exception): @@ -54,30 +54,33 @@ def fail_token_init(token: str, expected: Exception): def test_hash_token(): - t = Token('foo') + t = Token("foo") ht = t.get_hashed_token() - assert ht.token_hash == '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae' + assert ( + ht.token_hash + == "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae" + ) def test_token_equals(): - assert Token('foo') == Token('foo') - assert Token('foo') != Token('bar') - assert Token('foo') != 'foo' + assert Token("foo") == Token("foo") + assert Token("foo") != Token("bar") + assert Token("foo") != "foo" def test_token_hash(): # string hashes will change from instance to instance of the python interpreter, and therefore # tests can't be written that directly test the hash value. See # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - assert hash(Token('foo')) == hash(Token('foo')) - assert hash(Token('bar')) == hash(Token('bar')) - assert hash(Token('foo')) != hash(Token('bar')) + assert hash(Token("foo")) == hash(Token("foo")) + assert hash(Token("bar")) == hash(Token("bar")) + assert hash(Token("foo")) != hash(Token("bar")) def test_generate_token(): t = tokens.generate_token() assert is_base64(t.token) is True - assert len(t.token) is 28 + assert len(t.token) == 28 def is_base64(s: str): diff --git a/src/jgikbase/test/idmapping/core/user_lookup_test.py b/src/jgikbase/test/idmapping/core/user_lookup_test.py index 30c8261..8b62400 100644 --- a/src/jgikbase/test/idmapping/core/user_lookup_test.py +++ b/src/jgikbase/test/idmapping/core/user_lookup_test.py @@ -460,7 +460,7 @@ def test_local_create_user(): t = LocalUserLookup(storage).create_user(Username('foo')) assert is_base64(t.token) is True - assert len(t.token) is 28 + assert len(t.token) == 28 assert storage.create_local_user.call_args_list == \ [((Username('foo'), t.get_hashed_token()), {})] @@ -479,7 +479,7 @@ def test_local_new_token(): t = LocalUserLookup(storage).new_token(Username('bar')) assert is_base64(t.token) is True - assert len(t.token) is 28 + assert len(t.token) == 28 assert storage.update_local_user_token.call_args_list == \ [((Username('bar'), t.get_hashed_token()), {})] diff --git a/src/jgikbase/test/idmapping/core/user_test.py b/src/jgikbase/test/idmapping/core/user_test.py index 4002b64..20da8fe 100644 --- a/src/jgikbase/test/idmapping/core/user_test.py +++ b/src/jgikbase/test/idmapping/core/user_test.py @@ -1,28 +1,36 @@ from jgikbase.idmapping.core.user import AuthsourceID, User, Username from pytest import raises from jgikbase.test.idmapping.test_utils import assert_exception_correct -from jgikbase.idmapping.core.errors import IllegalUsernameError, MissingParameterError,\ - IllegalParameterError +from jgikbase.idmapping.core.errors import ( + IllegalUsernameError, + MissingParameterError, + IllegalParameterError, +) -LONG_STR = 'a' * 100 +LONG_STR = "a" * 100 def test_authsource_init_pass(): - as_ = AuthsourceID('abcdefghijklmnopqrst') - assert as_.id == 'abcdefghijklmnopqrst' + as_ = AuthsourceID("abcdefghijklmnopqrst") + assert as_.id == "abcdefghijklmnopqrst" - as_ = AuthsourceID('uvwxyz') - assert as_.id == 'uvwxyz' + as_ = AuthsourceID("uvwxyz") + assert as_.id == "uvwxyz" def test_authsource_init_fail(): - fail_authsource_init(None, MissingParameterError('authsource id')) - fail_authsource_init(' \t \n ', - MissingParameterError('authsource id')) - fail_authsource_init('abcdefghijklmnopqrstu', IllegalParameterError( - 'authsource id abcdefghijklmnopqrstu exceeds maximum length of 20')) - fail_authsource_init('fooo1b&', - IllegalParameterError('Illegal character in authsource id fooo1b&: 1')) + fail_authsource_init(None, MissingParameterError("authsource id")) + fail_authsource_init(" \t \n ", MissingParameterError("authsource id")) + fail_authsource_init( + "abcdefghijklmnopqrstu", + IllegalParameterError( + "authsource id abcdefghijklmnopqrstu exceeds maximum length of 20" + ), + ) + fail_authsource_init( + "fooo1b&", + IllegalParameterError("Illegal character in authsource id fooo1b&: 1"), + ) def fail_authsource_init(source: str, expected: Exception): @@ -32,37 +40,42 @@ def fail_authsource_init(source: str, expected: Exception): def test_authsource_equals(): - assert AuthsourceID('foo') == AuthsourceID('foo') - assert AuthsourceID('foo') != AuthsourceID('bar') - assert AuthsourceID('foo') != 'foo' + assert AuthsourceID("foo") == AuthsourceID("foo") + assert AuthsourceID("foo") != AuthsourceID("bar") + assert AuthsourceID("foo") != "foo" def test_authsource_hash(): # string hashes will change from instance to instance of the python interpreter, and therefore # tests can't be written that directly test the hash value. See # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - assert hash(AuthsourceID('foo')) == hash(AuthsourceID('foo')) - assert hash(AuthsourceID('bar')) == hash(AuthsourceID('bar')) - assert hash(AuthsourceID('foo')) != hash(AuthsourceID('bar')) + assert hash(AuthsourceID("foo")) == hash(AuthsourceID("foo")) + assert hash(AuthsourceID("bar")) == hash(AuthsourceID("bar")) + assert hash(AuthsourceID("foo")) != hash(AuthsourceID("bar")) def test_username_init_pass(): - u = Username(LONG_STR[0:64] + 'abcdefghijklmnopqrstuvwxyz0123456789') - assert u.name == LONG_STR[0:64] + 'abcdefghijklmnopqrstuvwxyz0123456789' + u = Username(LONG_STR[0:64] + "abcdefghijklmnopqrstuvwxyz0123456789") + assert u.name == LONG_STR[0:64] + "abcdefghijklmnopqrstuvwxyz0123456789" - u = Username('0123456789' * 10) - assert u.name == '0123456789' * 10 + u = Username("0123456789" * 10) + assert u.name == "0123456789" * 10 def test_username_init_fail(): - fail_username_init(None, MissingParameterError('username')) - fail_username_init(' \t \n ', MissingParameterError('username')) - fail_username_init(LONG_STR + 'b', IllegalUsernameError( - 'username ' + LONG_STR + 'b exceeds maximum length of 100')) - for c in '*&@-+\n\t~_': + fail_username_init(None, MissingParameterError("username")) + fail_username_init(" \t \n ", MissingParameterError("username")) + fail_username_init( + LONG_STR + "b", + IllegalUsernameError( + "username " + LONG_STR + "b exceeds maximum length of 100" + ), + ) + for c in "*&@-+\n\t~_": fail_username_init( - 'foo1d' + c, - IllegalUsernameError('Illegal character in username foo1d' + c + ': ' + c)) + "foo1d" + c, + IllegalUsernameError("Illegal character in username foo1d" + c + ": " + c), + ) def fail_username_init(username, expected): @@ -72,29 +85,29 @@ def fail_username_init(username, expected): def test_username_equals(): - assert Username('foo') == Username('foo') - assert Username('foo') != Username('bar') - assert Username('foo') != 'foo' + assert Username("foo") == Username("foo") + assert Username("foo") != Username("bar") + assert Username("foo") != "foo" def test_username_hash(): # string hashes will change from instance to instance of the python interpreter, and therefore # tests can't be written that directly test the hash value. See # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - assert hash(Username('foo')) == hash(Username('foo')) - assert hash(Username('bar')) == hash(Username('bar')) - assert hash(Username('foo')) != hash(Username('bar')) + assert hash(Username("foo")) == hash(Username("foo")) + assert hash(Username("bar")) == hash(Username("bar")) + assert hash(Username("foo")) != hash(Username("bar")) def test_user_init_pass(): - u = User(AuthsourceID('foo'), Username('bar')) - assert u.authsource_id == AuthsourceID('foo') - assert u.username == Username('bar') + u = User(AuthsourceID("foo"), Username("bar")) + assert u.authsource_id == AuthsourceID("foo") + assert u.username == Username("bar") def test_user_init_fail(): - fail_user_init(None, Username('foo'), TypeError('authsource_id cannot be None')) - fail_user_init(AuthsourceID('bar'), None, TypeError('username cannot be None')) + fail_user_init(None, Username("foo"), TypeError("authsource_id cannot be None")) + fail_user_init(AuthsourceID("bar"), None, TypeError("username cannot be None")) def fail_user_init(authsource: AuthsourceID, username: Username, expected: Exception): @@ -104,21 +117,31 @@ def fail_user_init(authsource: AuthsourceID, username: Username, expected: Excep def test_user_equals(): - assert User(AuthsourceID('foo'), Username('baz')) == User(AuthsourceID('foo'), Username('baz')) - assert User(AuthsourceID('foo'), Username('baz')) != User(AuthsourceID('bar'), Username('baz')) - assert User(AuthsourceID('foo'), Username('baz')) != User(AuthsourceID('foo'), Username('bar')) - assert User(AuthsourceID('foo'), Username('baz')) != AuthsourceID('foo') + assert User(AuthsourceID("foo"), Username("baz")) == User( + AuthsourceID("foo"), Username("baz") + ) + assert User(AuthsourceID("foo"), Username("baz")) != User( + AuthsourceID("bar"), Username("baz") + ) + assert User(AuthsourceID("foo"), Username("baz")) != User( + AuthsourceID("foo"), Username("bar") + ) + assert User(AuthsourceID("foo"), Username("baz")) != AuthsourceID("foo") def test_user_hash(): # string hashes will change from instance to instance of the python interpreter, and therefore # tests can't be written that directly test the hash value. See # https://docs.python.org/3/reference/datamodel.html#object.__hash__ - assert hash(User(AuthsourceID('foo'), Username('bar'))) == hash( - User(AuthsourceID('foo'), Username('bar'))) - assert hash(User(AuthsourceID('bar'), Username('foo'))) == hash( - User(AuthsourceID('bar'), Username('foo'))) - assert hash(User(AuthsourceID('baz'), Username('foo'))) != hash( - User(AuthsourceID('bar'), Username('foo'))) - assert hash(User(AuthsourceID('bar'), Username('fob'))) != hash( - User(AuthsourceID('bar'), Username('foo'))) + assert hash(User(AuthsourceID("foo"), Username("bar"))) == hash( + User(AuthsourceID("foo"), Username("bar")) + ) + assert hash(User(AuthsourceID("bar"), Username("foo"))) == hash( + User(AuthsourceID("bar"), Username("foo")) + ) + assert hash(User(AuthsourceID("baz"), Username("foo"))) != hash( + User(AuthsourceID("bar"), Username("foo")) + ) + assert hash(User(AuthsourceID("bar"), Username("fob"))) != hash( + User(AuthsourceID("bar"), Username("foo")) + ) diff --git a/src/jgikbase/test/idmapping/integration/service_test.py b/src/jgikbase/test/idmapping/integration/service_test.py index 8520a78..60fdb3f 100644 --- a/src/jgikbase/test/idmapping/integration/service_test.py +++ b/src/jgikbase/test/idmapping/integration/service_test.py @@ -13,10 +13,14 @@ import logging import time import re -from jgikbase.test.idmapping.test_utils import assert_ms_epoch_close_to_now,\ - assert_json_error_correct +from jgikbase.test.idmapping.test_utils import ( + assert_ms_epoch_close_to_now, + assert_json_error_correct, +) from pymongo.mongo_client import MongoClient -from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import IDMappingMongoStorage +from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import ( + IDMappingMongoStorage, +) from jgikbase.idmapping.core.user import Username, AuthsourceID, User from jgikbase.idmapping.core.tokens import Token from jgikbase.idmapping.core.object_id import NamespaceID @@ -30,39 +34,42 @@ # Should test logging here...? Skip for now. Maybe add later. -VERSION = '0.1.1' +VERSION = "0.1.2" -DB_NAME = 'test_db_idmapping_service_integration' +DB_NAME = "test_db_idmapping_service_integration" -KBASE_URL = 'http://fake_url_for_mocking.com' -KBASE_ADMIN_ROLE = 'fake_role_for_mocking' -KBASE_TOKEN = 'fake_token_for_mocking' +KBASE_URL = "http://fake_url_for_mocking.com" +KBASE_ADMIN_ROLE = "fake_role_for_mocking" +KBASE_TOKEN = "fake_token_for_mocking" def create_deploy_cfg(mongo_port): cfg = ConfigParser() - cfg.add_section('idmapping') - cfg['idmapping']['mongo-host'] = 'localhost:' + str(mongo_port) - cfg['idmapping']['mongo-db'] = DB_NAME - - cfg['idmapping']['authentication-enabled'] = 'local, kbase' - cfg['idmapping']['authentication-admin-enabled'] = 'local, kbase' - - cfg['idmapping']['auth-source-kbase-factory-module'] = ( - 'jgikbase.idmapping.userlookup.kbase_user_lookup') - cfg['idmapping']['auth-source-kbase-init-token'] = KBASE_TOKEN - cfg['idmapping']['auth-source-kbase-init-url'] = KBASE_URL - cfg['idmapping']['auth-source-kbase-init-admin-role'] = KBASE_ADMIN_ROLE - _, path = tempfile.mkstemp('.cfg', 'deploy-', dir=test_utils.get_temp_dir(), text=True) - - with open(path, 'w') as handle: + cfg.add_section("idmapping") + cfg["idmapping"]["mongo-host"] = "localhost:" + str(mongo_port) + cfg["idmapping"]["mongo-db"] = DB_NAME + + cfg["idmapping"]["authentication-enabled"] = "local, kbase" + cfg["idmapping"]["authentication-admin-enabled"] = "local, kbase" + + cfg["idmapping"][ + "auth-source-kbase-factory-module" + ] = "jgikbase.idmapping.userlookup.kbase_user_lookup" + cfg["idmapping"]["auth-source-kbase-init-token"] = KBASE_TOKEN + cfg["idmapping"]["auth-source-kbase-init-url"] = KBASE_URL + cfg["idmapping"]["auth-source-kbase-init-admin-role"] = KBASE_ADMIN_ROLE + _, path = tempfile.mkstemp( + ".cfg", "deploy-", dir=test_utils.get_temp_dir(), text=True + ) + + with open(path, "w") as handle: cfg.write(handle) return path -@fixture(scope='module') +@fixture(scope="module") def mongo(): # remove any current handlers, since tests run in one process logging.getLogger().handlers.clear() @@ -71,13 +78,19 @@ def mongo(): tempdir = test_utils.get_temp_dir() wt = test_utils.get_use_wired_tiger() mongo = MongoController(mongoexe, tempdir, wt) - print('running mongo {}{} on port {} in dir {}'.format( - mongo.db_version, ' with WiredTiger' if wt else '', mongo.port, mongo.temp_dir)) + print( + "running mongo {}{} on port {} in dir {}".format( + mongo.db_version, + " with WiredTiger" if wt else "", + mongo.port, + mongo.temp_dir, + ) + ) yield mongo del_temp = test_utils.get_delete_temp_files() - print('shutting down mongo, delete_temp_files={}'.format(del_temp)) + print("shutting down mongo, delete_temp_files={}".format(del_temp)) mongo.destroy(del_temp) if del_temp: shutil.rmtree(test_utils.get_temp_dir()) @@ -87,348 +100,448 @@ def mongo(): def service_port(mongo): mongo.clear_database(DB_NAME, drop_indexes=True) - os.environ['ID_MAPPING_CONFIG'] = create_deploy_cfg(mongo.port) + os.environ["ID_MAPPING_CONFIG"] = create_deploy_cfg(mongo.port) with requests_mock.Mocker() as m: - m.get('http://fake_url_for_mocking.com/', - request_headers={'Accept': 'application/json'}, - json={'version': '0.1.2', 'gitcommithash': 'hashyhash', 'servertime': 3}) + m.get( + "http://fake_url_for_mocking.com/", + request_headers={"Accept": "application/json"}, + json={"version": "0.1.2", "gitcommithash": "hashyhash", "servertime": 3}, + ) app = create_app() # this is probably the dumbest thing I've ever seen - @app.route('/ohgodnothehumanity') + @app.route("/ohgodnothehumanity") def kill(): - request.environ.get('werkzeug.server.shutdown')() - return ('', 200) + request.environ.get("werkzeug.server.shutdown")() + return ("", 200) portint = test_utils.find_free_port() - Thread(target=app.run, kwargs={'port': portint}).start() + Thread(target=app.run, kwargs={"port": portint}).start() time.sleep(0.05) port = str(portint) - print('running id mapping service at localhost:' + port) + print("running id mapping service at localhost:" + port) yield port # shutdown the server - requests.get('http://localhost:' + port + '/ohgodnothehumanity') + requests.get("http://localhost:" + port + "/ohgodnothehumanity") def get_storage_instance(mongo) -> IDMappingStorage: - client = MongoClient('localhost:' + str(mongo.port)) + client: MongoClient = MongoClient("localhost:" + str(mongo.port)) return IDMappingMongoStorage(client[DB_NAME]) def test_root(service_port): - r = requests.get('http://localhost:' + service_port) + r = requests.get("http://localhost:" + service_port) j = r.json() - time_ = j['servertime'] - commit = j['gitcommithash'] - del j['servertime'] - del j['gitcommithash'] + time_ = j["servertime"] + commit = j["gitcommithash"] + del j["servertime"] + del j["gitcommithash"] - assert j == {'service': 'ID Mapping Service', 'version': VERSION} - assert re.match('[a-f\d]{40}', commit) is not None + assert j == {"service": "ID Mapping Service", "version": VERSION} + assert re.match(r"[a-f\d]{40}", commit) is not None assert_ms_epoch_close_to_now(time_) assert r.status_code == 200 def test_create_and_get_namespace(service_port, mongo): storage = get_storage_instance(mongo) - t = Token('foobar') + t = Token("foobar") # fail to create a namespace - r = requests.put('http://localhost:' + service_port + '/api/v1/namespace/myns', - headers={'Authorization': 'local ' + t.token}) + r = requests.put( + "http://localhost:" + service_port + "/api/v1/namespace/myns", + headers={"Authorization": "local " + t.token}, + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 401, - 'httpstatus': 'Unauthorized', - 'appcode': 10020, - 'apperror': 'Invalid token', - 'message': '10020 Invalid token' - } - }) + { + "error": { + "httpcode": 401, + "httpstatus": "Unauthorized", + "appcode": 10020, + "apperror": "Invalid token", + "message": "10020 Invalid token", + } + }, + ) assert r.status_code == 401 # succeed at creating a namespace - storage.create_local_user(Username('user1'), t.get_hashed_token()) - storage.set_local_user_as_admin(Username('user1'), True) + storage.create_local_user(Username("user1"), t.get_hashed_token()) + storage.set_local_user_as_admin(Username("user1"), True) - r = requests.put('http://localhost:' + service_port + '/api/v1/namespace/myns', - headers={'Authorization': 'local ' + t.token}) + r = requests.put( + "http://localhost:" + service_port + "/api/v1/namespace/myns", + headers={"Authorization": "local " + t.token}, + ) assert r.status_code == 204 # get the namespace with a populated user list - r = requests.get('http://localhost:' + service_port + '/api/v1/namespace/myns', - headers={'Authorization': 'local ' + t.token}) + r = requests.get( + "http://localhost:" + service_port + "/api/v1/namespace/myns", + headers={"Authorization": "local " + t.token}, + ) - assert r.json() == {'namespace': 'myns', 'publicly_mappable': False, 'users': []} + assert r.json() == {"namespace": "myns", "publicly_mappable": False, "users": []} assert r.status_code == 200 # fail getting a namespace - r = requests.get('http://localhost:' + service_port + '/api/v1/namespace/myns1') + r = requests.get("http://localhost:" + service_port + "/api/v1/namespace/myns1") assert_json_error_correct( r.json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50010, - 'apperror': 'No such namespace', - 'message': '50010 No such namespace: myns1' - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50010, + "apperror": "No such namespace", + "message": "50010 No such namespace: myns1", + } + }, + ) assert r.status_code == 404 def test_add_remove_user(service_port, mongo): storage = get_storage_instance(mongo) - lut = Token('foobar') + lut = Token("foobar") - storage.create_local_user(Username('lu'), lut.get_hashed_token()) - storage.set_local_user_as_admin(Username('lu'), True) - storage.create_namespace(NamespaceID('myns')) + storage.create_local_user(Username("lu"), lut.get_hashed_token()) + storage.set_local_user_as_admin(Username("lu"), True) + storage.create_namespace(NamespaceID("myns")) # add a user # tests integration with all parts of the kbase user handler with requests_mock.Mocker(real_http=True) as m: - m.get(KBASE_URL + '/api/V2/token', request_headers={'Authorization': 'mytoken'}, - json={'user': 'u1', 'expires': 4800, 'cachefor': 5600}) - - m.get(KBASE_URL + '/api/V2/me', request_headers={'Authorization': 'mytoken'}, - json={'customroles': [KBASE_ADMIN_ROLE]}) - - m.get(KBASE_URL + '/api/V2/users/?list=imauser', - request_headers={'Authorization': KBASE_TOKEN}, - json={'imauser': 'im totally a user omg'}) - - r = requests.put('http://localhost:' + service_port + - '/api/v1/namespace/myns/user/kbase/imauser', - headers={'Authorization': 'kbase mytoken'}) + m.get( + KBASE_URL + "/api/V2/token", + request_headers={"Authorization": "mytoken"}, + json={"user": "u1", "expires": 4800, "cachefor": 5600}, + ) + + m.get( + KBASE_URL + "/api/V2/me", + request_headers={"Authorization": "mytoken"}, + json={"customroles": [KBASE_ADMIN_ROLE]}, + ) + + m.get( + KBASE_URL + "/api/V2/users/?list=imauser", + request_headers={"Authorization": KBASE_TOKEN}, + json={"imauser": "im totally a user omg"}, + ) + + r = requests.put( + "http://localhost:" + + service_port + + "/api/v1/namespace/myns/user/kbase/imauser", + headers={"Authorization": "kbase mytoken"}, + ) assert r.status_code == 204 # check the user is there - r = requests.get('http://localhost:' + service_port + '/api/v1/namespace/myns', - headers={'Authorization': 'local ' + lut.token}) + r = requests.get( + "http://localhost:" + service_port + "/api/v1/namespace/myns", + headers={"Authorization": "local " + lut.token}, + ) - assert r.json() == {'namespace': 'myns', - 'publicly_mappable': False, - 'users': ['kbase/imauser']} + assert r.json() == { + "namespace": "myns", + "publicly_mappable": False, + "users": ["kbase/imauser"], + } # fail adding the same user. The KBase info is cached now so we don't need to mock it again - r = requests.put('http://localhost:' + service_port + - '/api/v1/namespace/myns/user/kbase/imauser', - headers={'Authorization': 'kbase mytoken'}) + r = requests.put( + "http://localhost:" + + service_port + + "/api/v1/namespace/myns/user/kbase/imauser", + headers={"Authorization": "kbase mytoken"}, + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 40000, - 'apperror': 'User already exists', - 'message': ('40000 User already exists: User kbase/imauser already ' + - 'administrates namespace myns') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 40000, + "apperror": "User already exists", + "message": ( + "40000 User already exists: User kbase/imauser already " + + "administrates namespace myns" + ), + } + }, + ) assert r.status_code == 400 # remove the user using a local admin - r = requests.delete('http://localhost:' + service_port + - '/api/v1/namespace/myns/user/kbase/imauser', - headers={'Authorization': 'local ' + lut.token}) + r = requests.delete( + "http://localhost:" + + service_port + + "/api/v1/namespace/myns/user/kbase/imauser", + headers={"Authorization": "local " + lut.token}, + ) assert r.status_code == 204 # check the user is gone - r = requests.get('http://localhost:' + service_port + '/api/v1/namespace/myns', - headers={'Authorization': 'local ' + lut.token}) + r = requests.get( + "http://localhost:" + service_port + "/api/v1/namespace/myns", + headers={"Authorization": "local " + lut.token}, + ) - assert r.json() == {'namespace': 'myns', 'publicly_mappable': False, 'users': []} + assert r.json() == {"namespace": "myns", "publicly_mappable": False, "users": []} # fail removing the user with a kbase admin - r = requests.delete('http://localhost:' + service_port + - '/api/v1/namespace/myns/user/kbase/imauser', - headers={'Authorization': 'kbase mytoken'}) + r = requests.delete( + "http://localhost:" + + service_port + + "/api/v1/namespace/myns/user/kbase/imauser", + headers={"Authorization": "kbase mytoken"}, + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50000, - 'apperror': 'No such user', - 'message': ('50000 No such user: User kbase/imauser does not ' + - 'administrate namespace myns') - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50000, + "apperror": "No such user", + "message": ( + "50000 No such user: User kbase/imauser does not " + + "administrate namespace myns" + ), + } + }, + ) assert r.status_code == 404 def test_set_public_and_list_namespaces(service_port, mongo): storage = get_storage_instance(mongo) - lut = Token('foobar') + lut = Token("foobar") - u = Username('lu') + u = Username("lu") storage.create_local_user(u, lut.get_hashed_token()) - priv = NamespaceID('priv') + priv = NamespaceID("priv") storage.create_namespace(priv) - storage.add_user_to_namespace(priv, User(AuthsourceID('local'), u)) + storage.add_user_to_namespace(priv, User(AuthsourceID("local"), u)) storage.set_namespace_publicly_mappable(priv, True) - pub = NamespaceID('pub') + pub = NamespaceID("pub") storage.create_namespace(pub) - storage.add_user_to_namespace(pub, User(AuthsourceID('local'), u)) + storage.add_user_to_namespace(pub, User(AuthsourceID("local"), u)) - r = requests.put('http://localhost:' + service_port + - '/api/v1/namespace/priv/set?publicly_mappable=false', - headers={'Authorization': 'local ' + lut.token}) + r = requests.put( + "http://localhost:" + + service_port + + "/api/v1/namespace/priv/set?publicly_mappable=false", + headers={"Authorization": "local " + lut.token}, + ) assert r.status_code == 204 - r = requests.put('http://localhost:' + service_port + - '/api/v1/namespace/pub/set?publicly_mappable=true', - headers={'Authorization': 'local ' + lut.token}) + r = requests.put( + "http://localhost:" + + service_port + + "/api/v1/namespace/pub/set?publicly_mappable=true", + headers={"Authorization": "local " + lut.token}, + ) assert r.status_code == 204 - r = requests.get('http://localhost:' + service_port + '/api/v1/namespace') + r = requests.get("http://localhost:" + service_port + "/api/v1/namespace") - assert r.json() == {'publicly_mappable': ['pub'], 'privately_mappable': ['priv']} + assert r.json() == {"publicly_mappable": ["pub"], "privately_mappable": ["priv"]} - r = requests.put('http://localhost:' + service_port + - '/api/v1/namespace/missing/set?publicly_mappable=false', - headers={'Authorization': 'local ' + lut.token}) + r = requests.put( + "http://localhost:" + + service_port + + "/api/v1/namespace/missing/set?publicly_mappable=false", + headers={"Authorization": "local " + lut.token}, + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50010, - 'apperror': 'No such namespace', - 'message': '50010 No such namespace: missing' - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50010, + "apperror": "No such namespace", + "message": "50010 No such namespace: missing", + } + }, + ) assert r.status_code == 404 def test_mapping(service_port, mongo): storage = get_storage_instance(mongo) - lut = Token('foobar') + lut = Token("foobar") - u = Username('lu') + u = Username("lu") storage.create_local_user(u, lut.get_hashed_token()) - priv = NamespaceID('priv') + priv = NamespaceID("priv") storage.create_namespace(priv) - storage.add_user_to_namespace(priv, User(AuthsourceID('local'), u)) - pub = NamespaceID('pub') + storage.add_user_to_namespace(priv, User(AuthsourceID("local"), u)) + pub = NamespaceID("pub") storage.create_namespace(pub) storage.set_namespace_publicly_mappable(pub, True) # create mappings # test that the service ignores incorrect headers - r = requests.put('http://localhost:' + service_port + '/api/v1/mapping/priv/pub', - headers={'Authorization': 'local ' + lut.token, - 'content-type': 'x-www-form-urlencoded'}, - data=json.dumps({'id1': 'id2', 'id3': 'id4', 'id5': 'id6'})) + r = requests.put( + "http://localhost:" + service_port + "/api/v1/mapping/priv/pub", + headers={ + "Authorization": "local " + lut.token, + "content-type": "x-www-form-urlencoded", + }, + data=json.dumps({"id1": "id2", "id3": "id4", "id5": "id6"}), + ) assert r.status_code == 204 # fail create mappings - r = requests.put('http://localhost:' + service_port + '/api/v1/mapping/priv/pub', - headers={'Authorization': 'focal ' + lut.token}, - data=json.dumps({'id10': 'id11'})) + r = requests.put( + "http://localhost:" + service_port + "/api/v1/mapping/priv/pub", + headers={"Authorization": "focal " + lut.token}, + data=json.dumps({"id10": "id11"}), + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50020, - 'apperror': 'No such authentication source', - 'message': '50020 No such authentication source: focal' - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50020, + "apperror": "No such authentication source", + "message": "50020 No such authentication source: focal", + } + }, + ) assert r.status_code == 404 # get mappings - r = requests.get('http://localhost:' + service_port + '/api/v1/mapping/pub?separate', - headers={'Authorization': 'local ' + lut.token}, - data=json.dumps({'ids': ['id2', 'id4', 'id8']})) - - assert r.json() == {'id2': {'other': [{'ns': 'priv', 'id': 'id1'}], 'admin': []}, - 'id4': {'other': [{'ns': 'priv', 'id': 'id3'}], 'admin': []}, - 'id8': {'other': [], 'admin': []} - } + r = requests.get( + "http://localhost:" + service_port + "/api/v1/mapping/pub?separate", + headers={"Authorization": "local " + lut.token}, + data=json.dumps({"ids": ["id2", "id4", "id8"]}), + ) + + assert r.json() == { + "id2": {"other": [{"ns": "priv", "id": "id1"}], "admin": []}, + "id4": {"other": [{"ns": "priv", "id": "id3"}], "admin": []}, + "id8": {"other": [], "admin": []}, + } # fail get mappings - r = requests.get('http://localhost:' + service_port + '/api/v1/mapping/plub?separate', - headers={'Authorization': 'local ' + lut.token}, - data=json.dumps({'ids': ['id2', 'id4', 'id8']})) + r = requests.get( + "http://localhost:" + service_port + "/api/v1/mapping/plub?separate", + headers={"Authorization": "local " + lut.token}, + data=json.dumps({"ids": ["id2", "id4", "id8"]}), + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50010, - 'apperror': 'No such namespace', - 'message': "50010 No such namespace: ['plub']" - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50010, + "apperror": "No such namespace", + "message": "50010 No such namespace: ['plub']", + } + }, + ) assert r.status_code == 404 # delete mappings - r = requests.delete('http://localhost:' + service_port + '/api/v1/mapping/priv/pub', - headers={'Authorization': 'local ' + lut.token, - 'content-type': 'x-www-form-urlencoded'}, - data=json.dumps({'id1': 'id7', 'id3': 'id4', 'id5': 'id6'})) + r = requests.delete( + "http://localhost:" + service_port + "/api/v1/mapping/priv/pub", + headers={ + "Authorization": "local " + lut.token, + "content-type": "x-www-form-urlencoded", + }, + data=json.dumps({"id1": "id7", "id3": "id4", "id5": "id6"}), + ) assert r.status_code == 204 # get mappings - r = requests.get('http://localhost:' + service_port + '/api/v1/mapping/pub', - headers={'Authorization': 'local ' + lut.token}, - data=json.dumps({'ids': ['id2', 'id4']})) + r = requests.get( + "http://localhost:" + service_port + "/api/v1/mapping/pub", + headers={"Authorization": "local " + lut.token}, + data=json.dumps({"ids": ["id2", "id4"]}), + ) - assert r.json() == {'id2': {'mappings': [{'ns': 'priv', 'id': 'id1'}]}, - 'id4': {'mappings': []} - } + assert r.json() == { + "id2": {"mappings": [{"ns": "priv", "id": "id1"}]}, + "id4": {"mappings": []}, + } # fail delete mappings - r = requests.delete('http://localhost:' + service_port + '/api/v1/mapping/pub/priv', - headers={'Authorization': 'local ' + lut.token, - 'content-type': 'x-www-form-urlencoded'}, - data=json.dumps({'id2': 'id1'})) + r = requests.delete( + "http://localhost:" + service_port + "/api/v1/mapping/pub/priv", + headers={ + "Authorization": "local " + lut.token, + "content-type": "x-www-form-urlencoded", + }, + data=json.dumps({"id2": "id1"}), + ) assert_json_error_correct( r.json(), - {'error': {'httpcode': 403, - 'httpstatus': 'Forbidden', - 'appcode': 20000, - 'apperror': 'Unauthorized', - 'message': ('20000 Unauthorized: User local/lu may not administrate ' + - 'namespace pub') - } - }) + { + "error": { + "httpcode": 403, + "httpstatus": "Forbidden", + "appcode": 20000, + "apperror": "Unauthorized", + "message": ( + "20000 Unauthorized: User local/lu may not administrate " + + "namespace pub" + ), + } + }, + ) assert r.status_code == 403 # test mapping to same namespace - r = requests.put('http://localhost:' + service_port + '/api/v1/mapping/priv/priv', - headers={'Authorization': 'local ' + lut.token}, - data=json.dumps({'id20': 'id21'})) + r = requests.put( + "http://localhost:" + service_port + "/api/v1/mapping/priv/priv", + headers={"Authorization": "local " + lut.token}, + data=json.dumps({"id20": "id21"}), + ) assert r.status_code == 204 # get mappings - r = requests.get('http://localhost:' + service_port + '/api/v1/mapping/priv?separate', - headers={'Authorization': 'local ' + lut.token}, - data=json.dumps({'ids': ['id1', 'id21', 'id20']})) - - assert r.json() == {'id1': {'admin': [{'ns': 'pub', 'id': 'id2'}], 'other': []}, - 'id21': {'other': [{'ns': 'priv', 'id': 'id20'}], 'admin': []}, - 'id20': {'other': [], 'admin': [{'ns': 'priv', 'id': 'id21'}]} - } + r = requests.get( + "http://localhost:" + service_port + "/api/v1/mapping/priv?separate", + headers={"Authorization": "local " + lut.token}, + data=json.dumps({"ids": ["id1", "id21", "id20"]}), + ) + + assert r.json() == { + "id1": {"admin": [{"ns": "pub", "id": "id2"}], "other": []}, + "id21": {"other": [{"ns": "priv", "id": "id20"}], "admin": []}, + "id20": {"other": [], "admin": [{"ns": "priv", "id": "id21"}]}, + } diff --git a/src/jgikbase/test/idmapping/mongo_controller.py b/src/jgikbase/test/idmapping/mongo_controller.py index b2ba755..9b0e326 100644 --- a/src/jgikbase/test/idmapping/mongo_controller.py +++ b/src/jgikbase/test/idmapping/mongo_controller.py @@ -3,6 +3,7 @@ Production use is not recommended. """ + from pathlib import Path from jgikbase.test.idmapping.test_utils import TestException import os @@ -14,6 +15,9 @@ from pymongo.mongo_client import MongoClient import semver +# semver parser +sver = semver.VersionInfo.parse + class MongoController: """ @@ -30,8 +34,10 @@ class MongoController: indexes, false otherwise. """ - def __init__(self, mongoexe: Path, root_temp_dir: Path, use_wired_tiger: bool=False) -> None: - ''' + def __init__( + self, mongoexe: Path, root_temp_dir: Path, use_wired_tiger: bool = False + ) -> None: + """ Create and start a new MongoDB database. An unused port will be selected for the server. :param mongoexe: The path to the MongoDB server executable (e.g. mongod) to run. @@ -39,42 +45,78 @@ def __init__(self, mongoexe: Path, root_temp_dir: Path, use_wired_tiger: bool=Fa The files will be stored inside a child directory that is unique per invocation. :param use_wired_tiger: For MongoDB versions > 3.0, specify that the Wired Tiger storage engine should be used. Setting this to true for other versions will cause an error. - ''' + """ if not mongoexe or not os.access(mongoexe, os.X_OK): - raise TestException('mongod executable path {} does not exist or is not executable.' - .format(mongoexe)) + raise TestException( + "mongod executable path {} does not exist or is not executable.".format( + mongoexe + ) + ) if not root_temp_dir: - raise ValueError('root_temp_dir is None') + raise ValueError("root_temp_dir is None") # make temp dirs root_temp_dir = root_temp_dir.absolute() os.makedirs(root_temp_dir, exist_ok=True) - self.temp_dir = Path(tempfile.mkdtemp(prefix='MongoController-', dir=str(root_temp_dir))) - data_dir = self.temp_dir.joinpath('data') + self.temp_dir = Path( + tempfile.mkdtemp(prefix="MongoController-", dir=str(root_temp_dir)) + ) + data_dir = self.temp_dir.joinpath("data") os.makedirs(data_dir) self.port = test_utils.find_free_port() + mongodb_ver = self.get_mongodb_version(mongoexe) + + command = [ + str(mongoexe), + "--port", + str(self.port), + "--dbpath", + str(data_dir), + ] + + if sver(mongodb_ver) < sver('6.1.0'): + command.extend(['--nojournal']) - command = [str(mongoexe), '--port', str(self.port), '--dbpath', str(data_dir), - '--nojournal'] if use_wired_tiger: - command.extend(['--storageEngine', 'wiredTiger']) + command.extend(["--storageEngine", "wiredTiger"]) - self._outfile = open(self.temp_dir.joinpath('mongo.log'), 'w') + self._outfile = open(self.temp_dir.joinpath("mongo.log"), "w") - self._proc = subprocess.Popen(command, stdout=self._outfile, stderr=subprocess.STDOUT) + self._proc = subprocess.Popen( + command, stdout=self._outfile, stderr=subprocess.STDOUT + ) time.sleep(1) # wait for server to start up - self.client = MongoClient('localhost', self.port) - # check that the server is up. See - # https://api.mongodb.com/python/3.7.0/api/pymongo/mongo_client.html - # #pymongo.mongo_client.MongoClient - self.client.admin.command('ismaster') + + try: + self.client: MongoClient = MongoClient('localhost', self.port) + # This line will raise an exception if the server is down + server_info = self.client.server_info() + except Exception as e: + raise ValueError("MongoDB server is down") from e # get some info about the db - self.db_version = self.client.server_info()['version'] - self.index_version = 2 if (semver.compare(self.db_version, '3.4.0') >= 0) else 1 - self.includes_system_indexes = (semver.compare(self.db_version, '3.2.0') < 0 - and not use_wired_tiger) + self.db_version = server_info['version'] + self.index_version = 2 if (semver.compare(self.db_version, "3.4.0") >= 0) else 1 + self.includes_system_indexes = ( + semver.compare(self.db_version, "3.2.0") < 0 and not use_wired_tiger + ) + + def get_mongodb_version(self, mongoexe: Path) -> str: + try: + process = subprocess.Popen( + [str(mongoexe), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = process.communicate() + + if process.returncode == 0: + version_line = stdout.decode().split('\n')[0] + mongodb_version = version_line.split()[2][1:] + return mongodb_version.strip() + else: + raise ValueError(f"Error: {stderr.decode()}") + except Exception as e: + raise ValueError("Failed to get MongoDB version") from e def destroy(self, delete_temp_files: bool) -> None: """ @@ -93,19 +135,19 @@ def destroy(self, delete_temp_files: bool) -> None: shutil.rmtree(self.temp_dir) def clear_database(self, db_name, drop_indexes=False): - ''' + """ Remove all data from a database. :param db_name: the name of the db to clear. :param drop_indexes: drop all indexes if true, retain indexes (which will be empty) if false. - ''' + """ if drop_indexes: self.client.drop_database(db_name) else: db = self.client[db_name] for name in db.list_collection_names(): - if not name.startswith('system.'): + if not name.startswith("system."): # don't drop collection since that drops indexes db.get_collection(name).delete_many({}) @@ -115,16 +157,16 @@ def main(): root_temp_dir = test_utils.get_temp_dir() mc = MongoController(mongoexe, root_temp_dir, False) - print('port: ' + str(mc.port)) - print('temp_dir: ' + str(mc.temp_dir)) - print('db_version: ' + mc.db_version) - print('index_version: ' + str(mc.index_version)) - print('includes_system_indexes: ' + str(mc.includes_system_indexes)) - mc.client['foo']['bar'].insert_one({'foo': 'bar'}) - mc.clear_database('foo') - input('press enter to shut down') + print("port: " + str(mc.port)) + print("temp_dir: " + str(mc.temp_dir)) + print("db_version: " + mc.db_version) + print("index_version: " + str(mc.index_version)) + print("includes_system_indexes: " + str(mc.includes_system_indexes)) + mc.client["foo"]["bar"].insert_one({"foo": "bar"}) + mc.clear_database("foo") + input("press enter to shut down") mc.destroy(True) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/jgikbase/test/idmapping/service/mapper_service_test.py b/src/jgikbase/test/idmapping/service/mapper_service_test.py index 37aeb3e..b9572e0 100644 --- a/src/jgikbase/test/idmapping/service/mapper_service_test.py +++ b/src/jgikbase/test/idmapping/service/mapper_service_test.py @@ -5,23 +5,30 @@ from jgikbase.idmapping.core.object_id import Namespace, NamespaceID, ObjectID from jgikbase.idmapping.core.user import AuthsourceID, User, Username from jgikbase.idmapping.core.tokens import Token -from jgikbase.idmapping.core.errors import InvalidTokenError, NoSuchNamespaceError,\ - UnauthorizedError, NoSuchUserError +from jgikbase.idmapping.core.errors import ( + InvalidTokenError, + NoSuchNamespaceError, + UnauthorizedError, + NoSuchUserError, +) import re from jgikbase.idmapping.service import mapper_service from logging import LogRecord import json from flask.app import Flask from flask import g -from typing import IO -from jgikbase.test.idmapping.test_utils import assert_ms_epoch_close_to_now, CALLID_PATTERN,\ - assert_json_error_correct +from typing import IO, Optional +from jgikbase.test.idmapping.test_utils import ( + assert_ms_epoch_close_to_now, + CALLID_PATTERN, + assert_json_error_correct, +) -VERSION = '0.1.1' -WERKZEUG = 'werkzeug/0.16.0' +VERSION = "0.1.2" +WERKZEUG = "werkzeug/2.0.3" -def build_app(ignore_ip_headers=False, logstream: IO[str]=None): +def build_app(ignore_ip_headers=False, logstream: Optional[IO[str]] = None): builder = create_autospec(IDMappingBuilder, spec_set=True, instance=True) mapper = create_autospec(IDMapper, spec_set=True, instance=True) cfg = Mock() @@ -43,134 +50,227 @@ def mock_request_for_ip_headers(remote_addr, xff, real_ip): def test_get_ip_address_no_headers_with_ignore(): - req = mock_request_for_ip_headers('4.5.6.7', None, None) + req = mock_request_for_ip_headers("4.5.6.7", None, None) - assert mapper_service.get_ip_address(req, True) == '4.5.6.7' + assert mapper_service.get_ip_address(req, True) == "4.5.6.7" assert req.headers.get.call_args_list == [] def test_get_ip_address_no_headers_no_ignore(): - req = mock_request_for_ip_headers(' \t 4.5.6.7 ', None, None) + req = mock_request_for_ip_headers(" \t 4.5.6.7 ", None, None) - assert mapper_service.get_ip_address(req, False) == '4.5.6.7' - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert mapper_service.get_ip_address(req, False) == "4.5.6.7" + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_get_ip_address_whitespace_headers_no_ignore(): - req = mock_request_for_ip_headers('4.5.6.7', ' \t ', ' \t ') + req = mock_request_for_ip_headers("4.5.6.7", " \t ", " \t ") - assert mapper_service.get_ip_address(req, False) == '4.5.6.7' - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert mapper_service.get_ip_address(req, False) == "4.5.6.7" + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_get_ip_address_with_headers_with_ignore(): - req = mock_request_for_ip_headers('4.5.6.7', '1.2.3.4, 5.6.7.8', '9.10.11.12') + req = mock_request_for_ip_headers("4.5.6.7", "1.2.3.4, 5.6.7.8", "9.10.11.12") - assert mapper_service.get_ip_address(req, True) == '4.5.6.7' + assert mapper_service.get_ip_address(req, True) == "4.5.6.7" assert req.headers.get.call_args_list == [] def test_get_ip_address_with_xff_and_real(): - req = mock_request_for_ip_headers('4.5.6.7', ' 1.2.3.4 , 5.6.7.8 ', '9.10.11.12') + req = mock_request_for_ip_headers( + "4.5.6.7", " 1.2.3.4 , 5.6.7.8 ", "9.10.11.12" + ) - assert mapper_service.get_ip_address(req, False) == '1.2.3.4' - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert mapper_service.get_ip_address(req, False) == "1.2.3.4" + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_get_ip_address_with_xff(): - req = mock_request_for_ip_headers('4.5.6.7', '1.2.3.4, 5.6.7.8', None) + req = mock_request_for_ip_headers("4.5.6.7", "1.2.3.4, 5.6.7.8", None) - assert mapper_service.get_ip_address(req, False) == '1.2.3.4' - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert mapper_service.get_ip_address(req, False) == "1.2.3.4" + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_get_ip_address_with_real(): - req = mock_request_for_ip_headers('4.5.6.7', None, ' 9.10.11.12 ') + req = mock_request_for_ip_headers("4.5.6.7", None, " 9.10.11.12 ") - assert mapper_service.get_ip_address(req, False) == '9.10.11.12' - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert mapper_service.get_ip_address(req, False) == "9.10.11.12" + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_format_ip_headers_no_headers_with_ignore(): - req = mock_request_for_ip_headers(' \t 4.5.6.7 ', None, None) + req = mock_request_for_ip_headers(" \t 4.5.6.7 ", None, None) assert mapper_service.format_ip_headers(req, True) is None assert req.headers.get.call_args_list == [] def test_format_ip_headers_no_headers_no_ignore(): - req = mock_request_for_ip_headers(' \t 4.5.6.7 ', None, None) + req = mock_request_for_ip_headers(" \t 4.5.6.7 ", None, None) assert mapper_service.format_ip_headers(req, False) is None - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_format_ip_headers_whitespace_headers_no_ignore(): - req = mock_request_for_ip_headers(' \t 4.5.6.7 ', ' \t ', ' \t ') + req = mock_request_for_ip_headers(" \t 4.5.6.7 ", " \t ", " \t ") assert mapper_service.format_ip_headers(req, False) is None - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_format_ip_headers_with_headers_with_ignore(): - req = mock_request_for_ip_headers('4.5.6.7', '1.2.3.4, 5.6.7.8', '9.10.11.12') + req = mock_request_for_ip_headers("4.5.6.7", "1.2.3.4, 5.6.7.8", "9.10.11.12") assert mapper_service.format_ip_headers(req, True) is None assert req.headers.get.call_args_list == [] def test_format_ip_headers_with_xff_and_real(): - req = mock_request_for_ip_headers(' 4.5.6.7 \t ', ' 1.2.3.4 , 5.6.7.8 ', - ' 9.10.11.12 \t ') + req = mock_request_for_ip_headers( + " 4.5.6.7 \t ", " 1.2.3.4 , 5.6.7.8 ", " 9.10.11.12 \t " + ) assert mapper_service.format_ip_headers(req, False) == ( - 'X-Forwarded-For: 1.2.3.4 , 5.6.7.8, X-Real-IP: 9.10.11.12, Remote IP: 4.5.6.7') - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + "X-Forwarded-For: 1.2.3.4 , 5.6.7.8, X-Real-IP: 9.10.11.12, Remote IP: 4.5.6.7" + ) + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_format_ip_headers_with_xff(): - req = mock_request_for_ip_headers(' 4.5.6.7 \t ', ' 1.2.3.4 , 5.6.7.8 ', None) + req = mock_request_for_ip_headers( + " 4.5.6.7 \t ", " 1.2.3.4 , 5.6.7.8 ", None + ) assert mapper_service.format_ip_headers(req, False) == ( - 'X-Forwarded-For: 1.2.3.4 , 5.6.7.8, Remote IP: 4.5.6.7') - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + "X-Forwarded-For: 1.2.3.4 , 5.6.7.8, Remote IP: 4.5.6.7" + ) + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] def test_format_ip_headers_with_real(): - req = mock_request_for_ip_headers(' 4.5.6.7 \t ', None, ' 9.10.11.12 \t ') + req = mock_request_for_ip_headers(" 4.5.6.7 \t ", None, " 9.10.11.12 \t ") assert mapper_service.format_ip_headers(req, False) == ( - 'X-Real-IP: 9.10.11.12, Remote IP: 4.5.6.7') - assert req.headers.get.call_args_list == [(('X-Forwarded-For',), {}), (('X-Real-IP',), {})] + "X-Real-IP: 9.10.11.12, Remote IP: 4.5.6.7" + ) + assert req.headers.get.call_args_list == [ + (("X-Forwarded-For",), {}), + (("X-Real-IP",), {}), + ] + + +def test_log_formatter_with_no_test_request_context(): + f = JSONFlaskLogFormatter("service name") + format1 = f.format( + LogRecord( + "my name", 40, "path", 2, "%s foo %s", ("bar", "baz"), None, None, None + ) + ) + format2 = f.format( + LogRecord( + "my name", + 40, + "path", + 2, + "%s foo %s", + ("bar", "baz"), + (None, None, None), + None, + None, + ) + ) + contents1 = json.loads(format1) + contents2 = json.loads(format2) + time1 = contents1["time"] + del contents1["time"] + time2 = contents2["time"] + del contents2["time"] + + expected = { + "service": "service name", + "level": "ERROR", + "source": "my name", + "msg": "bar foo baz", + } + + assert contents1 == expected + assert contents2 == expected + + assert_ms_epoch_close_to_now(time1) + assert_ms_epoch_close_to_now(time2) def test_log_formatter_no_exception(): - app = Flask('foo') - f = JSONFlaskLogFormatter('service name') - with app.test_request_context('/'): - g.ip = '4.5.6.7' - g.method = 'POST' - g.req_id = '1564161' - format1 = f.format(LogRecord('my name', 40, 'path', 2, '%s foo %s', ('bar', 'baz'), - None, None, None)) - format2 = f.format(LogRecord('my name', 40, 'path', 2, '%s foo %s', ('bar', 'baz'), - (None, None, None), None, None)) + app = Flask("foo") + f = JSONFlaskLogFormatter("service name") + with app.test_request_context("/"): + g.ip = "4.5.6.7" + g.method = "POST" + g.req_id = "1564161" + format1 = f.format( + LogRecord( + "my name", 40, "path", 2, "%s foo %s", ("bar", "baz"), None, None, None + ) + ) + format2 = f.format( + LogRecord( + "my name", + 40, + "path", + 2, + "%s foo %s", + ("bar", "baz"), + (None, None, None), + None, + None, + ) + ) contents1 = json.loads(format1) contents2 = json.loads(format2) - time1 = contents1['time'] - del contents1['time'] - time2 = contents2['time'] - del contents2['time'] + time1 = contents1["time"] + del contents1["time"] + time2 = contents2["time"] + del contents2["time"] expected = { - 'service': 'service name', - 'level': 'ERROR', - 'source': 'my name', - 'ip': '4.5.6.7', - 'method': 'POST', - 'callid': '1564161', - 'msg': 'bar foo baz' - } + "service": "service name", + "level": "ERROR", + "source": "my name", + "ip": "4.5.6.7", + "method": "POST", + "callid": "1564161", + "msg": "bar foo baz", + } assert contents1 == expected assert contents2 == expected @@ -180,45 +280,56 @@ def test_log_formatter_no_exception(): def test_log_formatter_with_exception(): - app = Flask('foo') - f = JSONFlaskLogFormatter('service name') + app = Flask("foo") + f = JSONFlaskLogFormatter("service name") def exception_method(): - raise NoSuchUserError('userfoo') + raise NoSuchUserError("userfoo") try: exception_method() except Exception as err: e = err exc_info = (type(e), e, e.__traceback__) - with app.test_request_context('/'): - g.ip = '4.5.6.7' - g.method = 'POST' - g.req_id = '1564161' - format_ = f.format(LogRecord('my name', 40, 'path', 2, '%s foo %s', ('bar', 'baz'), - exc_info, None, None)) + with app.test_request_context("/"): + g.ip = "4.5.6.7" + g.method = "POST" + g.req_id = "1564161" + format_ = f.format( + LogRecord( + "my name", + 40, + "path", + 2, + "%s foo %s", + ("bar", "baz"), + exc_info, + None, + None, + ) + ) contents = json.loads(format_) - time_ = contents['time'] - del contents['time'] - excep = contents['excep'] - del contents['excep'] + time_ = contents["time"] + del contents["time"] + excep = contents["excep"] + del contents["excep"] print(contents) assert contents == { - 'service': 'service name', - 'level': 'ERROR', - 'source': 'my name', - 'ip': '4.5.6.7', - 'method': 'POST', - 'callid': '1564161', - 'msg': 'bar foo baz' - } + "service": "service name", + "level": "ERROR", + "source": "my name", + "ip": "4.5.6.7", + "method": "POST", + "callid": "1564161", + "msg": "bar foo baz", + } assert_ms_epoch_close_to_now(time_) - assert 'exception_method()' in excep - assert 'NoSuchUserError: 50000 No such user: userfoo' in excep - assert 'Traceback (most' in excep + assert "exception_method()" in excep + assert "NoSuchUserError: 50000 No such user: userfoo" in excep + assert "Traceback (most" in excep def test_root_and_logging(): @@ -226,34 +337,35 @@ def test_root_and_logging(): logstream = Mock() cli, _ = build_app(logstream=logstream) - resp = cli.get('/') + resp = cli.get("/") j = resp.get_json() - time_ = j['servertime'] - commit = j['gitcommithash'] - del j['servertime'] - del j['gitcommithash'] + time_ = j["servertime"] + commit = j["gitcommithash"] + del j["servertime"] + del j["gitcommithash"] - assert j == {'service': 'ID Mapping Service', 'version': VERSION} - assert re.match('[a-f\d]{40}', commit) is not None + assert j == {"service": "ID Mapping Service", "version": VERSION} + assert re.match(r"[a-f\d]{40}", commit) is not None assert_ms_epoch_close_to_now(time_) assert resp.status_code == 200 - assert len(logstream.write.call_args_list) == 2 - assert logstream.write.call_args_list[1][0][0] == '\n' + assert len(logstream.write.call_args_list) == 1 logjson = json.loads(logstream.write.call_args_list[0][0][0]) - time_ = logjson['time'] - del logjson['time'] - callid = logjson['callid'] - del logjson['callid'] - - assert logjson == {'service': 'IDMappingService', - 'level': 'INFO', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '127.0.0.1', - 'method': 'GET', - 'msg': f'GET / 200 {WERKZEUG}'} + time_ = logjson["time"] + del logjson["time"] + callid = logjson["callid"] + del logjson["callid"] + + assert logjson == { + "service": "IDMappingService", + "level": "INFO", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "127.0.0.1", + "method": "GET", + "msg": f"GET / 200 {WERKZEUG}", + } assert_ms_epoch_close_to_now(time_) assert CALLID_PATTERN.match(callid) is not None @@ -262,106 +374,142 @@ def test_root_and_logging_with_xff_and_real_headers(): logstream = Mock() cli, _ = build_app(logstream=logstream) - cli.get('/', headers={'x-forwarded-for': ' 1.2.3.4, 5.6.7.8 ', - 'x-real-ip': ' 7.8.9.10 '}) # already tested response, don't care + cli.get( + "/", + headers={ + "x-forwarded-for": " 1.2.3.4, 5.6.7.8 ", + "x-real-ip": " 7.8.9.10 ", + }, + ) # already tested response, don't care - assert len(logstream.write.call_args_list) == 4 - assert logstream.write.call_args_list[1][0][0] == '\n' - assert logstream.write.call_args_list[3][0][0] == '\n' + assert len(logstream.write.call_args_list) == 2 ipjson = json.loads(logstream.write.call_args_list[0][0][0]) - respjson = json.loads(logstream.write.call_args_list[2][0][0]) + respjson = json.loads(logstream.write.call_args_list[1][0][0]) # don't check these again, checked above. - del ipjson['time'] - del respjson['time'] - del ipjson['callid'] - del respjson['callid'] + del ipjson["time"] + del respjson["time"] + del ipjson["callid"] + del respjson["callid"] assert ipjson == { - 'service': 'IDMappingService', - 'level': 'INFO', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '1.2.3.4', - 'method': 'GET', - 'msg': 'X-Forwarded-For: 1.2.3.4, 5.6.7.8, X-Real-IP: 7.8.9.10, Remote IP: 127.0.0.1'} - - assert respjson == {'service': 'IDMappingService', - 'level': 'INFO', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '1.2.3.4', - 'method': 'GET', - 'msg': f'GET / 200 {WERKZEUG}'} + "service": "IDMappingService", + "level": "INFO", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "1.2.3.4", + "method": "GET", + "msg": "X-Forwarded-For: 1.2.3.4, 5.6.7.8, X-Real-IP: 7.8.9.10, Remote IP: 127.0.0.1", + } + + assert respjson == { + "service": "IDMappingService", + "level": "INFO", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "1.2.3.4", + "method": "GET", + "msg": f"GET / 200 {WERKZEUG}", + } def test_root_and_logging_with_xff_and_real_headers_ignored(): logstream = Mock() cli, _ = build_app(logstream=logstream, ignore_ip_headers=True) - cli.get('/', headers={'x-forwarded-for': ' 1.2.3.4, 5.6.7.8 ', - 'x-real-ip': ' 7.8.9.10 '}) # already tested response, don't care + cli.get( + "/", + headers={ + "x-forwarded-for": " 1.2.3.4, 5.6.7.8 ", + "x-real-ip": " 7.8.9.10 ", + }, + ) # already tested response, don't care - assert len(logstream.write.call_args_list) == 2 - assert logstream.write.call_args_list[1][0][0] == '\n' + assert len(logstream.write.call_args_list) == 1 respjson = json.loads(logstream.write.call_args_list[0][0][0]) # don't check these again, checked above. - del respjson['time'] - del respjson['callid'] + del respjson["time"] + del respjson["callid"] - assert respjson == {'service': 'IDMappingService', - 'level': 'INFO', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '127.0.0.1', - 'method': 'GET', - 'msg': f'GET / 200 {WERKZEUG}'} + assert respjson == { + "service": "IDMappingService", + "level": "INFO", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "127.0.0.1", + "method": "GET", + "msg": f"GET / 200 {WERKZEUG}", + } def test_get_namespace_no_auth(): cli, mapper = build_app() - mapper.get_namespace.return_value = Namespace(NamespaceID('foo'), False) + mapper.get_namespace.return_value = Namespace(NamespaceID("foo"), False) - resp = cli.get('/api/v1/namespace/foo') + resp = cli.get("/api/v1/namespace/foo") - assert resp.get_json() == {'namespace': 'foo', 'publicly_mappable': False, 'users': []} + assert resp.get_json() == { + "namespace": "foo", + "publicly_mappable": False, + "users": [], + } assert resp.status_code == 200 - assert mapper.get_namespace.call_args_list == [((NamespaceID('foo'), None, None), {})] + assert mapper.get_namespace.call_args_list == [ + ((NamespaceID("foo"), None, None), {}) + ] def test_get_namespace_with_auth(): cli, mapper = build_app() - mapper.get_namespace.return_value = Namespace(NamespaceID('foo'), True, set([ - User(AuthsourceID('bar'), Username('baz')), User(AuthsourceID('bag'), Username('bat'))])) - - resp = cli.get('/api/v1/namespace/foo', headers={'Authorization': ' \tas toketoketoke \t'}) - - assert resp.get_json() == {'namespace': 'foo', 'publicly_mappable': True, - 'users': ['bag/bat', 'bar/baz']} + mapper.get_namespace.return_value = Namespace( + NamespaceID("foo"), + True, + set( + [ + User(AuthsourceID("bar"), Username("baz")), + User(AuthsourceID("bag"), Username("bat")), + ] + ), + ) + + resp = cli.get( + "/api/v1/namespace/foo", headers={"Authorization": " \tas toketoketoke \t"} + ) + + assert resp.get_json() == { + "namespace": "foo", + "publicly_mappable": True, + "users": ["bag/bat", "bar/baz"], + } assert resp.status_code == 200 - assert mapper.get_namespace.call_args_list == [((NamespaceID('foo'), AuthsourceID('as'), - Token('toketoketoke')), {})] + assert mapper.get_namespace.call_args_list == [ + ((NamespaceID("foo"), AuthsourceID("as"), Token("toketoketoke")), {}) + ] def test_get_namespace_fail_munged_auth(): # general tests of the application error handler for general application errors logstream = Mock() cli, _ = build_app(logstream=logstream) - resp = cli.get('/api/v1/namespace/foo', headers={'Authorization': 'astoketoketoke'}) + resp = cli.get("/api/v1/namespace/foo", headers={"Authorization": "astoketoketoke"}) - err = '30001 Illegal input parameter: Expected authsource and token in header.' + err = "30001 Illegal input parameter: Expected authsource and token in header." assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': err - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": err, + } + }, + ) assert resp.status_code == 400 - check_error_logging(logstream, 'GET', '/api/v1/namespace/foo', 400, - 'IllegalParameterError: ' + err) + check_error_logging( + logstream, "GET", "/api/v1/namespace/foo", 400, "IllegalParameterError: " + err + ) def test_get_namespace_fail_invalid_token(): @@ -370,60 +518,71 @@ def test_get_namespace_fail_invalid_token(): cli, mapper = build_app(logstream=logstream) mapper.get_namespace.side_effect = InvalidTokenError() - resp = cli.get('/api/v1/namespace/foo', headers={'Authorization': 'as toketoketoke'}) + resp = cli.get( + "/api/v1/namespace/foo", headers={"Authorization": "as toketoketoke"} + ) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 401, - 'httpstatus': 'Unauthorized', - 'appcode': 10020, - 'apperror': 'Invalid token', - 'message': '10020 Invalid token' - } - }) + { + "error": { + "httpcode": 401, + "httpstatus": "Unauthorized", + "appcode": 10020, + "apperror": "Invalid token", + "message": "10020 Invalid token", + } + }, + ) assert resp.status_code == 401 - check_error_logging(logstream, 'GET', '/api/v1/namespace/foo', 401, - 'InvalidTokenError: 10020 Invalid token') + check_error_logging( + logstream, + "GET", + "/api/v1/namespace/foo", + 401, + "InvalidTokenError: 10020 Invalid token", + ) def check_error_logging(logstream_mock, method, url, code, stackstring): - assert len(logstream_mock.write.call_args_list) == 4 - assert logstream_mock.write.call_args_list[1][0][0] == '\n' - assert logstream_mock.write.call_args_list[3][0][0] == '\n' + assert len(logstream_mock.write.call_args_list) == 2 errjson = json.loads(logstream_mock.write.call_args_list[0][0][0]) - respjson = json.loads(logstream_mock.write.call_args_list[2][0][0]) + respjson = json.loads(logstream_mock.write.call_args_list[1][0][0]) - errtime = errjson['time'] - del errjson['time'] - errcallid = errjson['callid'] - del errjson['callid'] + errtime = errjson["time"] + del errjson["time"] + errcallid = errjson["callid"] + del errjson["callid"] - resptime = respjson['time'] - del respjson['time'] - respcallid = respjson['callid'] - del respjson['callid'] + resptime = respjson["time"] + del respjson["time"] + respcallid = respjson["callid"] + del respjson["callid"] - stack = errjson['msg'] - del errjson['msg'] + stack = errjson["msg"] + del errjson["msg"] assert errjson == { - 'service': 'IDMappingService', - 'level': 'ERROR', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '127.0.0.1', - 'method': method} - - assert stack.startswith('Logging exception:\n') - assert 'Traceback (most' in stack + "service": "IDMappingService", + "level": "ERROR", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "127.0.0.1", + "method": method, + } + + assert stack.startswith("Logging exception:\n") + assert "Traceback (most" in stack assert stackstring in stack - assert respjson == {'service': 'IDMappingService', - 'level': 'INFO', - 'source': 'jgikbase.idmapping.service.mapper_service', - 'ip': '127.0.0.1', - 'method': method, - 'msg': '{} {} {} {}'.format(method, url, code, WERKZEUG)} + assert respjson == { + "service": "IDMappingService", + "level": "INFO", + "source": "jgikbase.idmapping.service.mapper_service", + "ip": "127.0.0.1", + "method": method, + "msg": "{} {} {} {}".format(method, url, code, WERKZEUG), + } assert_ms_epoch_close_to_now(errtime) assert CALLID_PATTERN.match(errcallid) is not None assert_ms_epoch_close_to_now(resptime) @@ -434,114 +593,142 @@ def test_get_namespace_fail_no_namespace(): # really a general test of the no data error handler logstream = Mock() cli, mapper = build_app(logstream=logstream) - mapper.get_namespace.side_effect = NoSuchNamespaceError('foo') + mapper.get_namespace.side_effect = NoSuchNamespaceError("foo") - resp = cli.get('/api/v1/namespace/foo') + resp = cli.get("/api/v1/namespace/foo") assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'appcode': 50010, - 'apperror': 'No such namespace', - 'message': '50010 No such namespace: foo' - } - }) + { + "error": { + "httpcode": 404, + "httpstatus": "Not Found", + "appcode": 50010, + "apperror": "No such namespace", + "message": "50010 No such namespace: foo", + } + }, + ) assert resp.status_code == 404 - check_error_logging(logstream, 'GET', '/api/v1/namespace/foo', 404, - 'NoSuchNamespaceError: 50010 No such namespace: foo') + check_error_logging( + logstream, + "GET", + "/api/v1/namespace/foo", + 404, + "NoSuchNamespaceError: 50010 No such namespace: foo", + ) def test_get_namespace_fail_valueerror(): # really a general test of the catch all error handler logstream = Mock() cli, mapper = build_app(logstream=logstream) - mapper.get_namespace.side_effect = ValueError('things are all messed up down here') + mapper.get_namespace.side_effect = ValueError("things are all messed up down here") - resp = cli.get('/api/v1/namespace/foo') + resp = cli.get("/api/v1/namespace/foo") assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 500, - 'httpstatus': 'Internal Server Error', - 'message': 'things are all messed up down here' - } - }) + { + "error": { + "httpcode": 500, + "httpstatus": "Internal Server Error", + "message": "things are all messed up down here", + } + }, + ) assert resp.status_code == 500 - check_error_logging(logstream, 'GET', '/api/v1/namespace/foo', 500, - 'ValueError: things are all messed up down here') + check_error_logging( + logstream, + "GET", + "/api/v1/namespace/foo", + 500, + "ValueError: things are all messed up down here", + ) def test_method_not_allowed(): logstream = Mock() cli, _ = build_app(logstream=logstream) - resp = cli.delete('/api/v1/namespace/foo') + resp = cli.delete("/api/v1/namespace/foo") assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 405, - 'httpstatus': 'Method Not Allowed', - 'message': ('405 Method Not Allowed: The method is not allowed ' + - 'for the requested URL.') - } - }) + { + "error": { + "httpcode": 405, + "httpstatus": "Method Not Allowed", + "message": ( + "405 Method Not Allowed: The method is not allowed " + + "for the requested URL." + ), + } + }, + ) assert resp.status_code == 405 - check_error_logging(logstream, 'DELETE', '/api/v1/namespace/foo', 405, - 'MethodNotAllowed: 405 Method Not Allowed: The method is not allowed ' + - 'for the requested URL.') + check_error_logging( + logstream, + "DELETE", + "/api/v1/namespace/foo", + 405, + "MethodNotAllowed: 405 Method Not Allowed: The method is not allowed " + + "for the requested URL.", + ) def test_not_found(): logstream = Mock() cli, _ = build_app(logstream=logstream) - resp = cli.get('/api/v1/nothinghere') + resp = cli.get("/api/v1/nothinghere") - err = ('404 Not Found: The requested URL was not found on the server. ' + - 'If you entered the URL manually please check your spelling and try again.') + err = ( + "404 Not Found: The requested URL was not found on the server. " + + "If you entered the URL manually please check your spelling and try again." + ) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 404, - 'httpstatus': 'Not Found', - 'message': err - } - }) + {"error": {"httpcode": 404, "httpstatus": "Not Found", "message": err}}, + ) assert resp.status_code == 404 - check_error_logging(logstream, 'GET', '/api/v1/nothinghere', 404, - 'NotFound: ' + err) + check_error_logging( + logstream, "GET", "/api/v1/nothinghere", 404, "NotFound: " + err + ) def test_create_namespace_put(): cli, mapper = build_app() - resp = cli.put('/api/v1/namespace/foo', headers={'Authorization': 'source tokey'}) + resp = cli.put("/api/v1/namespace/foo", headers={"Authorization": "source tokey"}) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 - assert mapper.create_namespace.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo')), {})] + assert mapper.create_namespace.call_args_list == [ + ((AuthsourceID("source"), Token("tokey"), NamespaceID("foo")), {}) + ] def test_create_namespace_post(): cli, mapper = build_app() - resp = cli.post('/api/v1/namespace/foo', headers={'Authorization': 'source tokey'}) + resp = cli.post("/api/v1/namespace/foo", headers={"Authorization": "source tokey"}) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 - assert mapper.create_namespace.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo')), {})] + assert mapper.create_namespace.call_args_list == [ + ((AuthsourceID("source"), Token("tokey"), NamespaceID("foo")), {}) + ] def test_create_namespace_fail_no_token(): - fail_no_token_put('/api/v1/namespace/foo') + fail_no_token_put("/api/v1/namespace/foo") def fail_no_token_put(url): @@ -559,86 +746,99 @@ def fail_no_token_delete(url): def fail_no_token_check(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 401, - 'appcode': 10010, - 'apperror': 'No authentication token', - 'httpstatus': 'Unauthorized', - 'message': '10010 No authentication token' - } - }) + { + "error": { + "httpcode": 401, + "appcode": 10010, + "apperror": "No authentication token", + "httpstatus": "Unauthorized", + "message": "10010 No authentication token", + } + }, + ) assert resp.status_code == 401 def test_create_namespace_fail_munged_auth(): - fail_munged_auth_post('/api/v1/namespace/foo') - fail_munged_auth_put('/api/v1/namespace/foo') + fail_munged_auth_post("/api/v1/namespace/foo") + fail_munged_auth_put("/api/v1/namespace/foo") def fail_munged_auth_put(url): cli, _ = build_app() - resp = cli.put(url, headers={'Authorization': 'astoketoketoke'}) + resp = cli.put(url, headers={"Authorization": "astoketoketoke"}) fail_munged_auth_check(resp) def fail_munged_auth_post(url): cli, _ = build_app() - resp = cli.post(url, headers={'Authorization': 'astoketoketoke'}) + resp = cli.post(url, headers={"Authorization": "astoketoketoke"}) fail_munged_auth_check(resp) def fail_munged_auth_delete(url): cli, _ = build_app() - resp = cli.delete(url, headers={'Authorization': 'astoketoketoke'}) + resp = cli.delete(url, headers={"Authorization": "astoketoketoke"}) fail_munged_auth_check(resp) def fail_munged_auth_check(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'Expected authsource and token in header.') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "Expected authsource and token in header." + ), + } + }, + ) assert resp.status_code == 400 def test_create_namespace_fail_illegal_ns_id(): - fail_illegal_ns_id_put('/api/v1/namespace/foo*bar') + fail_illegal_ns_id_put("/api/v1/namespace/foo*bar") def fail_illegal_ns_id_get(url, json=None): cli, _ = build_app() - resp = cli.get(url, headers={'Authorization': 'source tokey'}, json=json) + resp = cli.get(url, headers={"Authorization": "source tokey"}, json=json) fail_illegal_ns_id_check(resp) def fail_illegal_ns_id_put(url, json=None): cli, _ = build_app() - resp = cli.put(url, headers={'Authorization': 'source tokey'}, json=json) + resp = cli.put(url, headers={"Authorization": "source tokey"}, json=json) fail_illegal_ns_id_check(resp) def fail_illegal_ns_id_delete(url, json=None): cli, _ = build_app() - resp = cli.delete(url, headers={'Authorization': 'source tokey'}, json=json) + resp = cli.delete(url, headers={"Authorization": "source tokey"}, json=json) fail_illegal_ns_id_check(resp) def fail_illegal_ns_id_check(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'Illegal character in namespace id foo*bar: *') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "Illegal character in namespace id foo*bar: *" + ), + } + }, + ) assert resp.status_code == 400 @@ -647,167 +847,219 @@ def test_create_namespace_fail_unauthorized(): logstream = Mock() cli, mapper = build_app(logstream=logstream) - mapper.create_namespace.side_effect = UnauthorizedError('YOU SHALL NOT PASS') + mapper.create_namespace.side_effect = UnauthorizedError("YOU SHALL NOT PASS") - resp = cli.put('/api/v1/namespace/foo', headers={'Authorization': 'source tokey'}) + resp = cli.put("/api/v1/namespace/foo", headers={"Authorization": "source tokey"}) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 403, - 'httpstatus': 'Forbidden', - 'appcode': 20000, - 'apperror': 'Unauthorized', - 'message': '20000 Unauthorized: YOU SHALL NOT PASS' - } - }) + { + "error": { + "httpcode": 403, + "httpstatus": "Forbidden", + "appcode": 20000, + "apperror": "Unauthorized", + "message": "20000 Unauthorized: YOU SHALL NOT PASS", + } + }, + ) assert resp.status_code == 403 - assert mapper.create_namespace.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo')), {})] + assert mapper.create_namespace.call_args_list == [ + ((AuthsourceID("source"), Token("tokey"), NamespaceID("foo")), {}) + ] - check_error_logging(logstream, 'PUT', '/api/v1/namespace/foo', 403, - 'UnauthorizedError: 20000 Unauthorized: YOU SHALL NOT PASS') + check_error_logging( + logstream, + "PUT", + "/api/v1/namespace/foo", + 403, + "UnauthorizedError: 20000 Unauthorized: YOU SHALL NOT PASS", + ) def test_add_user_to_namespace(): cli, mapper = build_app() - resp = cli.put('/api/v1/namespace/foo/user/bar/baz', headers={'Authorization': 'source tokey'}) + resp = cli.put( + "/api/v1/namespace/foo/user/bar/baz", headers={"Authorization": "source tokey"} + ) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 - assert mapper.add_user_to_namespace.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo'), - User(AuthsourceID('bar'), Username('baz'))), {})] + assert mapper.add_user_to_namespace.call_args_list == [ + ( + ( + AuthsourceID("source"), + Token("tokey"), + NamespaceID("foo"), + User(AuthsourceID("bar"), Username("baz")), + ), + {}, + ) + ] def test_add_user_to_namespace_fail_no_token(): - fail_no_token_put('/api/v1/namespace/foo/user/bar/baz') + fail_no_token_put("/api/v1/namespace/foo/user/bar/baz") def test_add_user_to_namespace_fail_munged_auth(): - fail_munged_auth_put('/api/v1/namespace/foo/user/bar/baz') + fail_munged_auth_put("/api/v1/namespace/foo/user/bar/baz") def test_add_user_to_namespace_fail_illegal_ns_id(): - fail_illegal_ns_id_put('/api/v1/namespace/foo*bar/user/bar/baz') + fail_illegal_ns_id_put("/api/v1/namespace/foo*bar/user/bar/baz") def test_remove_user_from_namespace(): cli, mapper = build_app() - resp = cli.delete('/api/v1/namespace/foo/user/bar/baz', - headers={'Authorization': 'source tokey'}) + resp = cli.delete( + "/api/v1/namespace/foo/user/bar/baz", headers={"Authorization": "source tokey"} + ) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 - assert mapper.remove_user_from_namespace.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo'), - User(AuthsourceID('bar'), Username('baz'))), {})] + assert mapper.remove_user_from_namespace.call_args_list == [ + ( + ( + AuthsourceID("source"), + Token("tokey"), + NamespaceID("foo"), + User(AuthsourceID("bar"), Username("baz")), + ), + {}, + ) + ] def test_remove_user_from_namespace_fail_no_token(): - fail_no_token_delete('/api/v1/namespace/foo/user/bar/baz') + fail_no_token_delete("/api/v1/namespace/foo/user/bar/baz") def test_remove_user_from_namespace_fail_munged_auth(): - fail_munged_auth_delete('/api/v1/namespace/foo/user/bar/baz') + fail_munged_auth_delete("/api/v1/namespace/foo/user/bar/baz") def test_remove_user_from_namespace_fail_illegal_ns_id(): - fail_illegal_ns_id_delete('/api/v1/namespace/foo*bar/user/bar/baz') + fail_illegal_ns_id_delete("/api/v1/namespace/foo*bar/user/bar/baz") def test_set_namespace_publicly_mappable(): - check_set_namespace_publicly_mappable('true', True) - check_set_namespace_publicly_mappable('false', False) + check_set_namespace_publicly_mappable("true", True) + check_set_namespace_publicly_mappable("false", False) def check_set_namespace_publicly_mappable(arg, expected): cli, mapper = build_app() - resp = cli.put('/api/v1/namespace/foo/set?publicly_mappable=' + arg, - headers={'Authorization': 'source tokey'}) + resp = cli.put( + "/api/v1/namespace/foo/set?publicly_mappable=" + arg, + headers={"Authorization": "source tokey"}, + ) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 - assert mapper.set_namespace_publicly_mappable.call_args_list == [(( - AuthsourceID('source'), Token('tokey'), NamespaceID('foo'), expected), {})] + assert mapper.set_namespace_publicly_mappable.call_args_list == [ + ((AuthsourceID("source"), Token("tokey"), NamespaceID("foo"), expected), {}) + ] def test_set_namespace_fail_no_op(): cli, _ = build_app() - resp = cli.put('/api/v1/namespace/foo/set', headers={'Authorization': 'source tokey'}) + resp = cli.put( + "/api/v1/namespace/foo/set", headers={"Authorization": "source tokey"} + ) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: No settings provided.' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: No settings provided.", + } + }, + ) assert resp.status_code == 400 def test_set_namespace_publicly_mappable_illegal_input(): cli, _ = build_app() - resp = cli.put('/api/v1/namespace/foo/set?publicly_mappable=foobar', - headers={'Authorization': 'source tokey'}) + resp = cli.put( + "/api/v1/namespace/foo/set?publicly_mappable=foobar", + headers={"Authorization": "source tokey"}, + ) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ("30001 Illegal input parameter: Expected value of 'true' or " + - "'false' for publicly_mappable") - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: Expected value of 'true' or " + + "'false' for publicly_mappable" + ), + } + }, + ) assert resp.status_code == 400 def test_set_namespace_publicly_mappable_fail_no_token(): - fail_no_token_put('/api/v1/namespace/foo/set') + fail_no_token_put("/api/v1/namespace/foo/set") def test_set_namespace_publicly_mappable_fail_munged_auth(): - fail_munged_auth_put('/api/v1/namespace/foo/set') + fail_munged_auth_put("/api/v1/namespace/foo/set") def test_set_namespace_publicly_mappable_fail_illegal_ns_id(): - fail_illegal_ns_id_put('/api/v1/namespace/foo*bar/set?publicly_mappable=true') + fail_illegal_ns_id_put("/api/v1/namespace/foo*bar/set?publicly_mappable=true") def test_get_namespaces_empty(): - check_get_namespaces((set(), set()), {'publicly_mappable': [], 'privately_mappable': []}) + check_get_namespaces( + (set(), set()), {"publicly_mappable": [], "privately_mappable": []} + ) def test_get_namespaces_public(): check_get_namespaces( - (set([NamespaceID('zedsdead'), NamespaceID('foo'), NamespaceID('bar')]), set()), - {'publicly_mappable': ['bar', 'foo', 'zedsdead'], 'privately_mappable': []}) + (set([NamespaceID("zedsdead"), NamespaceID("foo"), NamespaceID("bar")]), set()), + {"publicly_mappable": ["bar", "foo", "zedsdead"], "privately_mappable": []}, + ) def test_get_namespaces_private(): check_get_namespaces( - (set(), set([NamespaceID('zedsdead'), NamespaceID('foo'), NamespaceID('bar')])), - {'publicly_mappable': [], 'privately_mappable': ['bar', 'foo', 'zedsdead']}) + (set(), set([NamespaceID("zedsdead"), NamespaceID("foo"), NamespaceID("bar")])), + {"publicly_mappable": [], "privately_mappable": ["bar", "foo", "zedsdead"]}, + ) def test_get_namespaces_both(): check_get_namespaces( - (set([NamespaceID('whoo'), NamespaceID('whee'), NamespaceID('pewpewpew')]), - set([NamespaceID('zedsdead'), NamespaceID('foo'), NamespaceID('bar')])), - {'publicly_mappable': ['pewpewpew', 'whee', 'whoo'], - 'privately_mappable': ['bar', 'foo', 'zedsdead']}) + ( + set([NamespaceID("whoo"), NamespaceID("whee"), NamespaceID("pewpewpew")]), + set([NamespaceID("zedsdead"), NamespaceID("foo"), NamespaceID("bar")]), + ), + { + "publicly_mappable": ["pewpewpew", "whee", "whoo"], + "privately_mappable": ["bar", "foo", "zedsdead"], + }, + ) def check_get_namespaces(returned, expected): @@ -815,7 +1067,7 @@ def check_get_namespaces(returned, expected): mapper.get_namespaces.return_value = returned - resp = cli.get('/api/v1/namespace/') + resp = cli.get("/api/v1/namespace/") assert resp.get_json() == expected assert resp.status_code == 200 @@ -825,586 +1077,831 @@ def check_get_namespaces(returned, expected): def test_create_mapping_put(): cli, mapper = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', - headers={'Authorization': 'source tokey', - 'content-type': 'x-www-form-urlencoded'}, - data='{"aid1": "id1", "id2": "id2"}') + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={ + "Authorization": "source tokey", + "content-type": "x-www-form-urlencoded", + }, + data='{"aid1": "id1", "id2": "id2"}', + ) check_create_mapping(resp, mapper) def test_create_mapping_post(): cli, mapper = build_app() - resp = cli.post('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={' aid1 \t ': 'id1', 'id2': ' \t id2 '}) + resp = cli.post( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={" aid1 \t ": "id1", "id2": " \t id2 "}, + ) check_create_mapping(resp, mapper) def check_create_mapping(resp, mapper): - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 assert mapper.create_mapping.call_args_list == [ - ((AuthsourceID('source'), Token('tokey'), - ObjectID(NamespaceID('ans'), 'aid1'), - ObjectID(NamespaceID('ns'), 'id1')), {}), - ((AuthsourceID('source'), Token('tokey'), - ObjectID(NamespaceID('ans'), 'id2'), - ObjectID(NamespaceID('ns'), 'id2')), {})] + ( + ( + AuthsourceID("source"), + Token("tokey"), + ObjectID(NamespaceID("ans"), "aid1"), + ObjectID(NamespaceID("ns"), "id1"), + ), + {}, + ), + ( + ( + AuthsourceID("source"), + Token("tokey"), + ObjectID(NamespaceID("ans"), "id2"), + ObjectID(NamespaceID("ns"), "id2"), + ), + {}, + ), + ] def test_create_mapping_fail_no_token(): - fail_no_token_put('/api/v1/mapping/ans/ns') + fail_no_token_put("/api/v1/mapping/ans/ns") def test_create_mapping_fail_munged_auth(): - fail_munged_auth_put('/api/v1/mapping/ans/ns') - fail_munged_auth_post('/api/v1/mapping/ans/ns') + fail_munged_auth_put("/api/v1/mapping/ans/ns") + fail_munged_auth_post("/api/v1/mapping/ans/ns") def test_create_mapping_fail_no_body(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}) + resp = cli.put("/api/v1/mapping/ans/ns", headers={"Authorization": "source tokey"}) check_mapping_fail_no_body(resp) def check_mapping_fail_no_body(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'message': 'Input JSON decode error: Expecting value: line 1 column 1 (char 0)' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "message": "Input JSON decode error: Expecting value: line 1 column 1 (char 0)", + } + }, + ) assert resp.status_code == 400 def test_bad_json_error(): logstream = Mock() cli, _ = build_app(logstream=logstream) - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - data='{"foo": ["bar", "baz"}]') + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + data='{"foo": ["bar", "baz"}]', + ) err = "Expecting ',' delimiter: line 1 column 22 (char 21)" assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'message': 'Input JSON decode error: ' + err - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "message": "Input JSON decode error: " + err, + } + }, + ) assert resp.status_code == 400 - check_error_logging(logstream, 'PUT', '/api/v1/mapping/ans/ns', 400, 'JSONDecodeError: ' + err) + check_error_logging( + logstream, "PUT", "/api/v1/mapping/ans/ns", 400, "JSONDecodeError: " + err + ) def test_create_mapping_fail_bad_json(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - data='{"foo": ["bar", "baz"}]') + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + data='{"foo": ["bar", "baz"}]', + ) check_mapping_fail_bad_json(resp) def check_mapping_fail_bad_json(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'message': ("Input JSON decode error: Expecting ',' delimiter: " + - "line 1 column 22 (char 21)") - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "message": ( + "Input JSON decode error: Expecting ',' delimiter: " + + "line 1 column 22 (char 21)" + ), + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_not_dict(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json=['foo', 'bar']) + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json=["foo", "bar"], + ) check_mapping_fail_not_dict(resp) def check_mapping_fail_not_dict(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'Expected JSON mapping in request body') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "Expected JSON mapping in request body" + ), + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_no_ids(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={}) + resp = cli.put( + "/api/v1/mapping/ans/ns", headers={"Authorization": "source tokey"}, json={} + ) check_mapping_fail_no_ids(resp) def check_mapping_fail_no_ids(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: No ids supplied' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: No ids supplied", + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_whitespace_key(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={' \t ': 'id1'}) + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={" \t ": "id1"}, + ) check_mapping_fail_whitespace_key(resp) def check_mapping_fail_whitespace_key(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: whitespace only key in input JSON' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: whitespace only key in input JSON", + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_non_string_value(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={'id': []}) + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={"id": []}, + ) check_mapping_fail_non_string_value(resp) def check_mapping_fail_non_string_value(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'value for key id in input JSON is not string: []') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "value for key id in input JSON is not string: []" + ), + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_whitespace_value(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={'id': ' \t '}) + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={"id": " \t "}, + ) check_mapping_fail_whitespace_value(resp) def check_mapping_fail_whitespace_value(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': ('30000 Missing input parameter: ' + - 'value for key id in input JSON is whitespace only') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": ( + "30000 Missing input parameter: " + + "value for key id in input JSON is whitespace only" + ), + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_too_many_ids(): cli, _ = build_app() - resp = cli.put('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={str(x): str(x) for x in range(10001)}) + resp = cli.put( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={str(x): str(x) for x in range(10001)}, + ) check_mapping_fail_too_many_ids(resp) def check_mapping_fail_too_many_ids(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'A maximum of 10000 ids are allowed') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "A maximum of 10000 ids are allowed" + ), + } + }, + ) assert resp.status_code == 400 def test_create_mapping_fail_illegal_ns_id(): - fail_illegal_ns_id_put('/api/v1/mapping/foo*bar/ns', - json={'admin_id': 'aid', 'other_id': 'id'}) + fail_illegal_ns_id_put( + "/api/v1/mapping/foo*bar/ns", json={"admin_id": "aid", "other_id": "id"} + ) def test_remove_mapping(): cli, mapper = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={' some id ': 'aid', 'other_id': ' \t id '}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={" some id ": "aid", "other_id": " \t id "}, + ) - assert resp.data == b'' + assert resp.data == b"" assert resp.status_code == 204 assert mapper.remove_mapping.call_args_list == [ - ((AuthsourceID('source'), Token('tokey'), - ObjectID(NamespaceID('ans'), 'some id'), - ObjectID(NamespaceID('ns'), 'aid')), {}), - ((AuthsourceID('source'), Token('tokey'), - ObjectID(NamespaceID('ans'), 'other_id'), - ObjectID(NamespaceID('ns'), 'id')), {}) - ] + ( + ( + AuthsourceID("source"), + Token("tokey"), + ObjectID(NamespaceID("ans"), "some id"), + ObjectID(NamespaceID("ns"), "aid"), + ), + {}, + ), + ( + ( + AuthsourceID("source"), + Token("tokey"), + ObjectID(NamespaceID("ans"), "other_id"), + ObjectID(NamespaceID("ns"), "id"), + ), + {}, + ), + ] def test_remove_mapping_fail_no_token(): - fail_no_token_delete('/api/v1/mapping/ans/ns') + fail_no_token_delete("/api/v1/mapping/ans/ns") def test_remove_mapping_fail_munged_auth(): - fail_munged_auth_delete('/api/v1/mapping/ans/ns') + fail_munged_auth_delete("/api/v1/mapping/ans/ns") def test_remove_mapping_fail_no_body(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", headers={"Authorization": "source tokey"} + ) check_mapping_fail_no_body(resp) def test_remove_mapping_fail_bad_json(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - data='{"foo": ["bar", "baz"}]') + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + data='{"foo": ["bar", "baz"}]', + ) check_mapping_fail_bad_json(resp) def test_remove_mapping_fail_not_dict(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json=['foo', 'bar']) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json=["foo", "bar"], + ) check_mapping_fail_not_dict(resp) def test_remove_mapping_fail_no_ids(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", headers={"Authorization": "source tokey"}, json={} + ) check_mapping_fail_no_ids(resp) def test_remove_mapping_fail_whitespace_key(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={' \t ': 'id1'}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={" \t ": "id1"}, + ) check_mapping_fail_whitespace_key(resp) def test_remove_mapping_fail_non_string_value(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={'id': []}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={"id": []}, + ) check_mapping_fail_non_string_value(resp) def test_remove_mapping_fail_whitespace_value(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={'id': ' \t '}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={"id": " \t "}, + ) check_mapping_fail_whitespace_value(resp) def test_remove_mapping_fail_too_many_ids(): cli, _ = build_app() - resp = cli.delete('/api/v1/mapping/ans/ns', headers={'Authorization': 'source tokey'}, - json={str(x): str(x) for x in range(10001)}) + resp = cli.delete( + "/api/v1/mapping/ans/ns", + headers={"Authorization": "source tokey"}, + json={str(x): str(x) for x in range(10001)}, + ) check_mapping_fail_too_many_ids(resp) def test_remove_mapping_fail_illegal_ns_id(): - fail_illegal_ns_id_delete('/api/v1/mapping/foo*bar/ns', - json={'admin_id': 'aid', 'other_id': 'id'}) + fail_illegal_ns_id_delete( + "/api/v1/mapping/foo*bar/ns", json={"admin_id": "aid", "other_id": "id"} + ) def test_get_mappings_empty(): - check_get_mappings([(set(), set()), (set(), set())], - {'id1': {'mappings': []}, 'id2': {'mappings': []}}) - check_get_mappings([(set(), set()), (set(), set())], - {'id1': {'admin': [], 'other': []}, 'id2': {'admin': [], 'other': []}}, - query='?separate') + check_get_mappings( + [(set(), set()), (set(), set())], + {"id1": {"mappings": []}, "id2": {"mappings": []}}, + ) + check_get_mappings( + [(set(), set()), (set(), set())], + {"id1": {"admin": [], "other": []}, "id2": {"admin": [], "other": []}}, + query="?separate", + ) def to_oid(namespace, id_): - return(ObjectID(NamespaceID(namespace), id_)) + return ObjectID(NamespaceID(namespace), id_) def test_get_mappings_admin(): check_get_mappings( - [(set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')]), - set()), - (set([to_oid('ns', 'id')]), - set())], - {'id1': {'mappings': [{'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}] - }, - 'id2': {'mappings': [{'ns': 'ns', 'id': 'id'}]} - }) + [ + ( + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + set(), + ), + (set([to_oid("ns", "id")]), set()), + ], + { + "id1": { + "mappings": [ + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + ] + }, + "id2": {"mappings": [{"ns": "ns", "id": "id"}]}, + }, + ) check_get_mappings( - [(set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')]), - set()), - (set([to_oid('ns', 'id')]), - set())], - {'id1': {'admin': [{'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}], - 'other': [] - }, - 'id2': {'admin': [{'ns': 'ns', 'id': 'id'}], - 'other': [] - } - }, - query='?separate') + [ + ( + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + set(), + ), + (set([to_oid("ns", "id")]), set()), + ], + { + "id1": { + "admin": [ + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + ], + "other": [], + }, + "id2": {"admin": [{"ns": "ns", "id": "id"}], "other": []}, + }, + query="?separate", + ) def test_get_mappings_other(): check_get_mappings( - [(set(), - set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')])), - (set(), - set([to_oid('ns', 'id')]))], - {'id1': {'mappings': [{'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}] - }, - 'id2': {'mappings': [{'ns': 'ns', 'id': 'id'}]} - }) + [ + ( + set(), + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + ), + (set(), set([to_oid("ns", "id")])), + ], + { + "id1": { + "mappings": [ + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + ] + }, + "id2": {"mappings": [{"ns": "ns", "id": "id"}]}, + }, + ) check_get_mappings( - [(set(), - set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')])), - (set(), - set([to_oid('ns', 'id')]))], - {'id1': {'admin': [], - 'other': [{'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}] - }, - 'id2': {'admin': [], - 'other': [{'ns': 'ns', 'id': 'id'}] - } - }, - query='?separate') + [ + ( + set(), + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + ), + (set(), set([to_oid("ns", "id")])), + ], + { + "id1": { + "admin": [], + "other": [ + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + ], + }, + "id2": {"admin": [], "other": [{"ns": "ns", "id": "id"}]}, + }, + query="?separate", + ) def test_get_mappings_both(): check_get_mappings( - [(set([to_oid('whee', 'myadiders'), to_oid('whoo', 'someid'), to_oid('baz', 'someid'), - to_oid('whee', 'myadidas')]), - set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')])), - (set([to_oid('ns', 'id')]), - set())], - {'id1': {'mappings': [{'ns': 'baz', 'id': 'someid'}, - {'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}, - {'ns': 'whee', 'id': 'myadidas'}, - {'ns': 'whee', 'id': 'myadiders'}, - {'ns': 'whoo', 'id': 'someid'}] - }, - 'id2': {'mappings': [{'ns': 'ns', 'id': 'id'}]} - }) + [ + ( + set( + [ + to_oid("whee", "myadiders"), + to_oid("whoo", "someid"), + to_oid("baz", "someid"), + to_oid("whee", "myadidas"), + ] + ), + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + ), + (set([to_oid("ns", "id")]), set()), + ], + { + "id1": { + "mappings": [ + {"ns": "baz", "id": "someid"}, + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + {"ns": "whee", "id": "myadidas"}, + {"ns": "whee", "id": "myadiders"}, + {"ns": "whoo", "id": "someid"}, + ] + }, + "id2": {"mappings": [{"ns": "ns", "id": "id"}]}, + }, + ) check_get_mappings( - [(set([to_oid('whee', 'myadiders'), to_oid('whoo', 'someid'), to_oid('baz', 'someid'), - to_oid('whee', 'myadidas')]), - set([to_oid('ns3', 'id1'), to_oid('ns1', 'id3'), to_oid('ns1', 'id1'), - to_oid('ns3', 'jd1')])), - (set([to_oid('ns', 'id')]), - set())], - {'id1': {'admin': [{'ns': 'baz', 'id': 'someid'}, - {'ns': 'whee', 'id': 'myadidas'}, - {'ns': 'whee', 'id': 'myadiders'}, - {'ns': 'whoo', 'id': 'someid'}], - 'other': [{'ns': 'ns1', 'id': 'id1'}, - {'ns': 'ns1', 'id': 'id3'}, - {'ns': 'ns3', 'id': 'id1'}, - {'ns': 'ns3', 'id': 'jd1'}] - }, - 'id2': {'admin': [{'ns': 'ns', 'id': 'id'}], - 'other': [] - } - }, - query='?separate') + [ + ( + set( + [ + to_oid("whee", "myadiders"), + to_oid("whoo", "someid"), + to_oid("baz", "someid"), + to_oid("whee", "myadidas"), + ] + ), + set( + [ + to_oid("ns3", "id1"), + to_oid("ns1", "id3"), + to_oid("ns1", "id1"), + to_oid("ns3", "jd1"), + ] + ), + ), + (set([to_oid("ns", "id")]), set()), + ], + { + "id1": { + "admin": [ + {"ns": "baz", "id": "someid"}, + {"ns": "whee", "id": "myadidas"}, + {"ns": "whee", "id": "myadiders"}, + {"ns": "whoo", "id": "someid"}, + ], + "other": [ + {"ns": "ns1", "id": "id1"}, + {"ns": "ns1", "id": "id3"}, + {"ns": "ns3", "id": "id1"}, + {"ns": "ns3", "id": "jd1"}, + ], + }, + "id2": {"admin": [{"ns": "ns", "id": "id"}], "other": []}, + }, + query="?separate", + ) def test_get_mappings_with_empty_filter(): check_get_mappings( - [(set([to_oid('ns3', 'id1')]), set()), (set(), set())], - {'id1': {'mappings': [{'ns': 'ns3', 'id': 'id1'}]}, 'id2': {'mappings': []}}, - query='?namespace_filter= \t ') + [(set([to_oid("ns3", "id1")]), set()), (set(), set())], + {"id1": {"mappings": [{"ns": "ns3", "id": "id1"}]}, "id2": {"mappings": []}}, + query="?namespace_filter= \t ", + ) check_get_mappings( - [(set([to_oid('ns3', 'id1')]), set()), (set(), set())], - {'id1': {'admin': [{'ns': 'ns3', 'id': 'id1'}], 'other': []}, - 'id2': {'admin': [], 'other': []}}, - query='?separate&namespace_filter= \t ') + [(set([to_oid("ns3", "id1")]), set()), (set(), set())], + { + "id1": {"admin": [{"ns": "ns3", "id": "id1"}], "other": []}, + "id2": {"admin": [], "other": []}, + }, + query="?separate&namespace_filter= \t ", + ) def test_get_mappings_with_filter(): check_get_mappings( - [(set([to_oid('ns3', 'id1')]), set()), (set(), set())], - {'id1': {'mappings': [{'ns': 'ns3', 'id': 'id1'}]}, 'id2': {'mappings': []}}, - query='?namespace_filter= \t ns3, ns1, \t ns2 ', - ns_filter_expected=[NamespaceID('ns3'), NamespaceID('ns1'), NamespaceID('ns2')]) + [(set([to_oid("ns3", "id1")]), set()), (set(), set())], + {"id1": {"mappings": [{"ns": "ns3", "id": "id1"}]}, "id2": {"mappings": []}}, + query="?namespace_filter= \t ns3, ns1, \t ns2 ", + ns_filter_expected=[NamespaceID("ns3"), NamespaceID("ns1"), NamespaceID("ns2")], + ) check_get_mappings( - [(set([to_oid('ns3', 'id1')]), set()), (set(), set())], - {'id1': {'admin': [{'ns': 'ns3', 'id': 'id1'}], 'other': []}, - 'id2': {'admin': [], 'other': []}}, - query='?separate&namespace_filter= \t ns3, ns1, \t ns2 ', - ns_filter_expected=[NamespaceID('ns3'), NamespaceID('ns1'), NamespaceID('ns2')]) + [(set([to_oid("ns3", "id1")]), set()), (set(), set())], + { + "id1": {"admin": [{"ns": "ns3", "id": "id1"}], "other": []}, + "id2": {"admin": [], "other": []}, + }, + query="?separate&namespace_filter= \t ns3, ns1, \t ns2 ", + ns_filter_expected=[NamespaceID("ns3"), NamespaceID("ns1"), NamespaceID("ns2")], + ) -def check_get_mappings(returned, expected, query='', ns_filter_expected=[]): +def check_get_mappings(returned, expected, query="", ns_filter_expected=[]): cli, mapper = build_app() mapper.get_mappings.side_effect = returned - resp = cli.get('/api/v1/mapping/ns' + query, json={'ids': [' id1 \t', 'id2']}) + resp = cli.get("/api/v1/mapping/ns" + query, json={"ids": [" id1 \t", "id2"]}) assert resp.get_json() == expected assert resp.status_code == 200 assert mapper.get_mappings.call_args_list == [ - ((ObjectID(NamespaceID('ns'), 'id1'), ns_filter_expected), {}), - ((ObjectID(NamespaceID('ns'), 'id2'), ns_filter_expected), {})] + ((ObjectID(NamespaceID("ns"), "id1"), ns_filter_expected), {}), + ((ObjectID(NamespaceID("ns"), "id2"), ns_filter_expected), {}), + ] def test_get_mappings_fail_no_body(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ns') + resp = cli.get("/api/v1/mapping/ns") check_mapping_fail_no_body(resp) def test_get_mapping_fail_bad_json(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', data='{"foo": ["bar", "baz"}]') + resp = cli.get("/api/v1/mapping/ans", data='{"foo": ["bar", "baz"}]') check_mapping_fail_bad_json(resp) def test_get_mapping_fail_not_dict(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', json=['foo', 'bar']) + resp = cli.get("/api/v1/mapping/ans", json=["foo", "bar"]) check_mapping_fail_not_dict(resp) def test_get_mapping_fail_ids_not_list(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', json={'ids': {'id': 'id'}}) + resp = cli.get("/api/v1/mapping/ans", json={"ids": {"id": "id"}}) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'Expected list at /ids in request body') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "Expected list at /ids in request body" + ), + } + }, + ) assert resp.status_code == 400 def test_get_mapping_fail_ids_empty(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', json={'ids': []}) + resp = cli.get("/api/v1/mapping/ans", json={"ids": []}) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: No ids supplied' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: No ids supplied", + } + }, + ) assert resp.status_code == 400 def test_get_mapping_fail_bad_id(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', json={'ids': ['id', None, 'id1']}) + resp = cli.get("/api/v1/mapping/ans", json={"ids": ["id", None, "id1"]}) check_get_mapping_fail_bad_id(resp) - resp = cli.get('/api/v1/mapping/ans', json={'ids': ['id', ' \t ', 'id1']}) + resp = cli.get("/api/v1/mapping/ans", json={"ids": ["id", " \t ", "id1"]}) check_get_mapping_fail_bad_id(resp) def check_get_mapping_fail_bad_id(resp): assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: null or whitespace-only id in list' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: null or whitespace-only id in list", + } + }, + ) assert resp.status_code == 400 def test_get_mapping_fail_too_many_ids(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ans', json={'ids': [str(x) for x in range(1001)]}) + resp = cli.get("/api/v1/mapping/ans", json={"ids": [str(x) for x in range(1001)]}) assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30001, - 'apperror': 'Illegal input parameter', - 'message': ('30001 Illegal input parameter: ' + - 'A maximum of 1000 ids are allowed') - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30001, + "apperror": "Illegal input parameter", + "message": ( + "30001 Illegal input parameter: " + + "A maximum of 1000 ids are allowed" + ), + } + }, + ) assert resp.status_code == 400 def test_get_mappings_fail_whitespace_in_filter(): cli, _ = build_app() - resp = cli.get('/api/v1/mapping/ns?namespace_filter=ns1, , ns2 , ns3') + resp = cli.get("/api/v1/mapping/ns?namespace_filter=ns1, , ns2 , ns3") assert_json_error_correct( resp.get_json(), - {'error': {'httpcode': 400, - 'httpstatus': 'Bad Request', - 'appcode': 30000, - 'apperror': 'Missing input parameter', - 'message': '30000 Missing input parameter: namespace id' - } - }) + { + "error": { + "httpcode": 400, + "httpstatus": "Bad Request", + "appcode": 30000, + "apperror": "Missing input parameter", + "message": "30000 Missing input parameter: namespace id", + } + }, + ) assert resp.status_code == 400 def test_get_mappings_fail_illegal_ns_id(): - fail_illegal_ns_id_get('/api/v1/mapping/foo*bar', json={'ids': ['id']}) - fail_illegal_ns_id_get('/api/v1/mapping/foobar?namespace_filter=foo*bar', json={'ids': ['id']}) + fail_illegal_ns_id_get("/api/v1/mapping/foo*bar", json={"ids": ["id"]}) + fail_illegal_ns_id_get( + "/api/v1/mapping/foobar?namespace_filter=foo*bar", json={"ids": ["id"]} + ) diff --git a/src/jgikbase/test/idmapping/storage/mongo/test_id_mapping_mongo_storage.py b/src/jgikbase/test/idmapping/storage/mongo/test_id_mapping_mongo_storage.py index fac26dd..79c4d43 100644 --- a/src/jgikbase/test/idmapping/storage/mongo/test_id_mapping_mongo_storage.py +++ b/src/jgikbase/test/idmapping/storage/mongo/test_id_mapping_mongo_storage.py @@ -1,31 +1,47 @@ from pytest import raises, fixture from jgikbase.test.idmapping.mongo_controller import MongoController from jgikbase.test.idmapping import test_utils -from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import IDMappingMongoStorage +from jgikbase.idmapping.storage.mongo.id_mapping_mongo_storage import ( + IDMappingMongoStorage, +) from jgikbase.idmapping.core.user import User, AuthsourceID, Username from jgikbase.idmapping.core.tokens import HashedToken from jgikbase.test.idmapping.test_utils import assert_exception_correct from pymongo.errors import DuplicateKeyError -from jgikbase.idmapping.core.errors import NoSuchUserError, UserExistsError, InvalidTokenError,\ - NamespaceExistsError, NoSuchNamespaceError -from jgikbase.idmapping.storage.errors import IDMappingStorageError, StorageInitException +from jgikbase.idmapping.core.errors import ( + NoSuchUserError, + UserExistsError, + InvalidTokenError, + NamespaceExistsError, + NoSuchNamespaceError, +) +from jgikbase.idmapping.storage.errors import ( + IDMappingStorageError, + StorageInitException, +) import re from jgikbase.idmapping.core.object_id import NamespaceID, Namespace, ObjectID -TEST_DB_NAME = 'test_id_mapping' +TEST_DB_NAME = "test_id_mapping" -@fixture(scope='module') +@fixture(scope="module") def mongo(): mongoexe = test_utils.get_mongo_exe() tempdir = test_utils.get_temp_dir() wt = test_utils.get_use_wired_tiger() mongo = MongoController(mongoexe, tempdir, wt) - print('running mongo {}{} on port {} in dir {}'.format( - mongo.db_version, ' with WiredTiger' if wt else '', mongo.port, mongo.temp_dir)) + print( + "running mongo {}{} on port {} in dir {}".format( + mongo.db_version, + " with WiredTiger" if wt else "", + mongo.port, + mongo.temp_dir, + ) + ) yield mongo del_temp = test_utils.get_delete_temp_files() - print('shutting down mongo, delete_temp_files={}'.format(del_temp)) + print("shutting down mongo, delete_temp_files={}".format(del_temp)) mongo.destroy(del_temp) @@ -38,7 +54,8 @@ def idstorage(mongo): def test_fail_startup(): with raises(Exception) as got: IDMappingMongoStorage(None) - assert_exception_correct(got.value, TypeError('db cannot be None')) + assert_exception_correct(got.value, TypeError("db cannot be None")) + # The following tests ensure that all indexes are created correctly. The collection names # are tested so that if a new collection is added the test will fail without altering @@ -48,83 +65,111 @@ def test_fail_startup(): def test_collection_names(idstorage, mongo): names = mongo.client[TEST_DB_NAME].list_collection_names() - expected = set(['users', 'config', 'ns', 'map']) + expected = set(["users", "config", "ns", "map"]) if mongo.includes_system_indexes: - expected.add('system.indexes') + expected.add("system.indexes") assert set(names) == expected def test_index_config(idstorage, mongo): v = mongo.index_version - indexes = mongo.client[TEST_DB_NAME]['config'].index_information() - expected = {'_id_': {'v': v, 'key': [('_id', 1)], 'ns': 'test_id_mapping.config'}, - 'schema_1': {'v': v, 'unique': True, 'key': [('schema', 1)], - 'ns': 'test_id_mapping.config'}} + indexes = mongo.client[TEST_DB_NAME]["config"].index_information() + test_utils.remove_ns_from_index_info(indexes) + expected = { + "_id_": {"v": v, "key": [("_id", 1)]}, + "schema_1": { + "v": v, + "unique": True, + "key": [("schema", 1)], + }, + } assert indexes == expected def test_index_user(idstorage, mongo): v = mongo.index_version - indexes = mongo.client[TEST_DB_NAME]['users'].index_information() - expected = {'_id_': {'v': v, 'key': [('_id', 1)], 'ns': 'test_id_mapping.users'}, - 'user_1': {'v': v, 'unique': True, 'key': [('user', 1)], - 'ns': 'test_id_mapping.users'}, - 'hshtkn_1': {'v': v, 'unique': True, 'key': [('hshtkn', 1)], - 'ns': 'test_id_mapping.users'}} + indexes = mongo.client[TEST_DB_NAME]["users"].index_information() + test_utils.remove_ns_from_index_info(indexes) + expected = { + "_id_": {"v": v, "key": [("_id", 1)]}, + "user_1": { + "v": v, + "unique": True, + "key": [("user", 1)], + }, + "hshtkn_1": { + "v": v, + "unique": True, + "key": [("hshtkn", 1)], + }, + } assert indexes == expected def test_index_namespace(idstorage, mongo): v = mongo.index_version - indexes = mongo.client[TEST_DB_NAME]['ns'].index_information() - expected = {'_id_': {'v': v, 'key': [('_id', 1)], 'ns': 'test_id_mapping.ns'}, - 'nsid_1': {'v': v, 'unique': True, 'key': [('nsid', 1)], - 'ns': 'test_id_mapping.ns'}} + indexes = mongo.client[TEST_DB_NAME]["ns"].index_information() + test_utils.remove_ns_from_index_info(indexes) + expected = { + "_id_": {"v": v, "key": [("_id", 1)]}, + "nsid_1": { + "v": v, + "unique": True, + "key": [("nsid", 1)], + }, + } assert indexes == expected def test_index_mappings(idstorage, mongo): v = mongo.index_version - indexes = mongo.client[TEST_DB_NAME]['map'].index_information() - expected = {'_id_': {'v': v, 'key': [('_id', 1)], 'ns': 'test_id_mapping.map'}, - 'pnsid_1_pid_1_snsid_1_sid_1': { - 'v': v, - 'unique': True, - 'key': [('pnsid', 1), ('pid', 1), ('snsid', 1), ('sid', 1)], - 'ns': 'test_id_mapping.map'}, - 'snsid_1_sid_1': { - 'v': v, - 'key': [('snsid', 1), ('sid', 1)], - 'ns': 'test_id_mapping.map'}, - } + indexes = mongo.client[TEST_DB_NAME]["map"].index_information() + test_utils.remove_ns_from_index_info(indexes) + expected = { + "_id_": {"v": v, "key": [("_id", 1)]}, + "pnsid_1_pid_1_snsid_1_sid_1": { + "v": v, + "unique": True, + "key": [("pnsid", 1), ("pid", 1), ("snsid", 1), ("sid", 1)], + }, + "snsid_1_sid_1": { + "v": v, + "key": [("snsid", 1), ("sid", 1)], + }, + } assert indexes == expected def test_startup_and_check_config_doc(idstorage, mongo): - col = mongo.client[TEST_DB_NAME]['config'] - assert col.count() == 1 # only one config doc + col = mongo.client[TEST_DB_NAME]["config"] + assert len(list(col.find({}))) == 1 # only one config doc cfgdoc = col.find_one() - assert cfgdoc['schema'] == 'schema' - assert cfgdoc['schemaver'] == 1 - assert cfgdoc['inupdate'] is False + assert cfgdoc["schema"] == "schema" + assert cfgdoc["schemaver"] == 1 + assert cfgdoc["inupdate"] is False # check startup works with cfg object in place idmap = IDMappingMongoStorage(mongo.client[TEST_DB_NAME]) - idmap.create_local_user(Username('foo'), HashedToken('t')) - assert idmap.get_user(HashedToken('t')) == (Username('foo'), False) + idmap.create_local_user(Username("foo"), HashedToken("t")) + assert idmap.get_user(HashedToken("t")) == (Username("foo"), False) def test_startup_with_2_config_docs(mongo): - col = mongo.client[TEST_DB_NAME]['config'] + col = mongo.client[TEST_DB_NAME]["config"] col.drop() # clear db independently of creating a idmapping mongo instance - col.insert_many([{'schema': 'schema', 'schemaver': 1, 'inupdate': False}, - {'schema': 'schema', 'schemaver': 2, 'inupdate': False}]) + col.insert_many( + [ + {"schema": "schema", "schemaver": 1, "inupdate": False}, + {"schema": "schema", "schemaver": 2, "inupdate": False}, + ] + ) # pattern matcher for the error format across python 2 & 3 p = re.compile( - 'Failed to create index: E11000 duplicate key error (index|collection): ' + - r'test_id_mapping.config( index: |\.\$)schema_1\s+dup key: ' + - r'\{ : "schema" \}') + r'Failed to create index: .*?' + r'E11000 duplicate key error collection: test_id_mapping.config index: schema_1 ' + r'dup key: (\{ : "schema" \}|\{ schema: "schema" \})' + ) with raises(StorageInitException) as got: IDMappingMongoStorage(mongo.client[TEST_DB_NAME]) @@ -132,30 +177,40 @@ def test_startup_with_2_config_docs(mongo): def test_startup_with_extra_corrupt_config_doc(mongo): - col = mongo.client[TEST_DB_NAME]['config'] + col = mongo.client[TEST_DB_NAME]["config"] col.drop() # clear db independently of creating a idmapping mongo instance - col.insert_many([{'schema': 'schema', 'schemaver': 1, 'inupdate': False}, - {'schema': 'schemabad', 'schemaver': 2, 'inupdate': False}]) + col.insert_many( + [ + {"schema": "schema", "schemaver": 1, "inupdate": False}, + {"schema": "schemabad", "schemaver": 2, "inupdate": False}, + ] + ) - fail_startup(mongo, 'Multiple config objects found in the database. ' + - 'This should not happen, something is very wrong.') + fail_startup( + mongo, + "Multiple config objects found in the database. " + + "This should not happen, something is very wrong.", + ) def test_startup_with_bad_schema_version(mongo): - col = mongo.client[TEST_DB_NAME]['config'] + col = mongo.client[TEST_DB_NAME]["config"] col.drop() # clear db independently of creating a idmapping mongo instance - col.insert_one({'schema': 'schema', 'schemaver': 4, 'inupdate': False}) + col.insert_one({"schema": "schema", "schemaver": 4, "inupdate": False}) - fail_startup(mongo, 'Incompatible database schema. Server is v1, DB is v4') + fail_startup(mongo, "Incompatible database schema. Server is v1, DB is v4") def test_startup_in_update(mongo): - col = mongo.client[TEST_DB_NAME]['config'] + col = mongo.client[TEST_DB_NAME]["config"] col.drop() # clear db independently of creating a idmapping mongo instance - col.insert_one({'schema': 'schema', 'schemaver': 1, 'inupdate': True}) + col.insert_one({"schema": "schema", "schemaver": 1, "inupdate": True}) - fail_startup(mongo, 'The database is in the middle of an update from v1 of the ' + - 'schema. Aborting startup.') + fail_startup( + mongo, + "The database is in the middle of an update from v1 of the " + + "schema. Aborting startup.", + ) def fail_startup(mongo, expected_msg): @@ -166,37 +221,41 @@ def fail_startup(mongo, expected_msg): def test_create_update_and_get_user(idstorage): # create - idstorage.create_local_user(Username('foo'), HashedToken('bar')) - assert idstorage.get_user(HashedToken('bar')) == (Username('foo'), False) + idstorage.create_local_user(Username("foo"), HashedToken("bar")) + assert idstorage.get_user(HashedToken("bar")) == (Username("foo"), False) # update - idstorage.update_local_user_token(Username('foo'), HashedToken('bat')) - assert idstorage.get_user(HashedToken('bat')) == (Username('foo'), False) + idstorage.update_local_user_token(Username("foo"), HashedToken("bat")) + assert idstorage.get_user(HashedToken("bat")) == (Username("foo"), False) - idstorage.update_local_user_token(Username('foo'), HashedToken('boo')) - assert idstorage.get_user(HashedToken('boo')) == (Username('foo'), False) + idstorage.update_local_user_token(Username("foo"), HashedToken("boo")) + assert idstorage.get_user(HashedToken("boo")) == (Username("foo"), False) # test different user - idstorage.create_local_user(Username('foo1'), HashedToken('baz')) - assert idstorage.get_user(HashedToken('baz')) == (Username('foo1'), False) + idstorage.create_local_user(Username("foo1"), HashedToken("baz")) + assert idstorage.get_user(HashedToken("baz")) == (Username("foo1"), False) def test_create_user_fail_input_None(idstorage): - t = HashedToken('t') - u = Username('u') - fail_create_user(idstorage, None, t, TypeError('username cannot be None')) - fail_create_user(idstorage, u, None, TypeError('token cannot be None')) + t = HashedToken("t") + u = Username("u") + fail_create_user(idstorage, None, t, TypeError("username cannot be None")) + fail_create_user(idstorage, u, None, TypeError("token cannot be None")) def test_create_user_fail_duplicate_user(idstorage): - idstorage.create_local_user(Username('u'), HashedToken('t')) - fail_create_user(idstorage, Username('u'), HashedToken('t1'), UserExistsError('u')) + idstorage.create_local_user(Username("u"), HashedToken("t")) + fail_create_user(idstorage, Username("u"), HashedToken("t1"), UserExistsError("u")) def test_create_user_fail_duplicate_token(idstorage): - idstorage.create_local_user(Username('u'), HashedToken('t')) - fail_create_user(idstorage, Username('u1'), HashedToken('t'), - ValueError('The provided token already exists in the database')) + idstorage.create_local_user(Username("u"), HashedToken("t")) + fail_create_user( + idstorage, + Username("u1"), + HashedToken("t"), + ValueError("The provided token already exists in the database"), + ) def fail_create_user(idstorage, user, token, expected): @@ -206,22 +265,28 @@ def fail_create_user(idstorage, user, token, expected): def test_update_user_token_fail_input_None(idstorage): - t = HashedToken('t') - u = Username('u') - fail_update_user_token(idstorage, None, t, TypeError('username cannot be None')) - fail_update_user_token(idstorage, u, None, TypeError('token cannot be None')) + t = HashedToken("t") + u = Username("u") + fail_update_user_token(idstorage, None, t, TypeError("username cannot be None")) + fail_update_user_token(idstorage, u, None, TypeError("token cannot be None")) def test_update_user_token_fail_duplicate_token(idstorage): - idstorage.create_local_user(Username('u'), HashedToken('t')) - idstorage.create_local_user(Username('u1'), HashedToken('t1')) - fail_update_user_token(idstorage, Username('u1'), HashedToken('t'), - ValueError('The provided token already exists in the database')) + idstorage.create_local_user(Username("u"), HashedToken("t")) + idstorage.create_local_user(Username("u1"), HashedToken("t1")) + fail_update_user_token( + idstorage, + Username("u1"), + HashedToken("t"), + ValueError("The provided token already exists in the database"), + ) def test_update_user_token_fail_no_such_user(idstorage): - idstorage.create_local_user(Username('u'), HashedToken('t')) - fail_update_user_token(idstorage, Username('u1'), HashedToken('t1'), NoSuchUserError('u1')) + idstorage.create_local_user(Username("u"), HashedToken("t")) + fail_update_user_token( + idstorage, Username("u1"), HashedToken("t1"), NoSuchUserError("u1") + ) def fail_update_user_token(idstorage, user, token, expected): @@ -231,12 +296,12 @@ def fail_update_user_token(idstorage, user, token, expected): def test_get_user_fail_input_None(idstorage): - fail_get_user(idstorage, None, TypeError('token cannot be None')) + fail_get_user(idstorage, None, TypeError("token cannot be None")) def test_get_user_fail_no_such_token(idstorage): - idstorage.create_local_user(Username('u'), HashedToken('t')) - fail_get_user(idstorage, HashedToken('t1'), InvalidTokenError()) + idstorage.create_local_user(Username("u"), HashedToken("t")) + fail_get_user(idstorage, HashedToken("t1"), InvalidTokenError()) def fail_get_user(idstorage, token, expected): @@ -246,34 +311,34 @@ def fail_get_user(idstorage, token, expected): def test_set_user_as_admin(idstorage): - idstorage.create_local_user(Username('foo'), HashedToken('bar')) + idstorage.create_local_user(Username("foo"), HashedToken("bar")) - assert idstorage.get_user(HashedToken('bar')) == (Username('foo'), False) + assert idstorage.get_user(HashedToken("bar")) == (Username("foo"), False) # set twice to check 2nd is no-op - idstorage.set_local_user_as_admin(Username('foo'), True) - idstorage.set_local_user_as_admin(Username('foo'), True) + idstorage.set_local_user_as_admin(Username("foo"), True) + idstorage.set_local_user_as_admin(Username("foo"), True) - assert idstorage.get_user(HashedToken('bar')) == (Username('foo'), True) + assert idstorage.get_user(HashedToken("bar")) == (Username("foo"), True) - idstorage.set_local_user_as_admin(Username('foo'), False) - idstorage.set_local_user_as_admin(Username('foo'), False) + idstorage.set_local_user_as_admin(Username("foo"), False) + idstorage.set_local_user_as_admin(Username("foo"), False) - assert idstorage.get_user(HashedToken('bar')) == (Username('foo'), False) + assert idstorage.get_user(HashedToken("bar")) == (Username("foo"), False) - idstorage.set_local_user_as_admin(Username('foo'), True) - idstorage.set_local_user_as_admin(Username('foo'), None) + idstorage.set_local_user_as_admin(Username("foo"), True) + idstorage.set_local_user_as_admin(Username("foo"), None) - assert idstorage.get_user(HashedToken('bar')) == (Username('foo'), False) + assert idstorage.get_user(HashedToken("bar")) == (Username("foo"), False) def test_set_local_user_as_admin_fail_None_input(idstorage): - fail_set_local_user_as_admin(idstorage, None, TypeError('username cannot be None')) + fail_set_local_user_as_admin(idstorage, None, TypeError("username cannot be None")) def test_set_local_user_as_admin_fail_no_such_user(idstorage): - idstorage.create_local_user(Username('bar'), HashedToken('whoo')) - fail_set_local_user_as_admin(idstorage, Username('foo'), NoSuchUserError('foo')) + idstorage.create_local_user(Username("bar"), HashedToken("whoo")) + fail_set_local_user_as_admin(idstorage, Username("foo"), NoSuchUserError("foo")) def fail_set_local_user_as_admin(idstorage, user, expected): @@ -285,62 +350,67 @@ def fail_set_local_user_as_admin(idstorage, user, expected): def test_unparseable_duplicate_key_exception(idstorage): # this is a very naughty test reaching into the implementation with raises(Exception) as got: - idstorage._get_duplicate_location(DuplicateKeyError('unmatchable dup key foo')) + idstorage._get_duplicate_location(DuplicateKeyError("unmatchable dup key foo")) assert_exception_correct( - got.value, IDMappingStorageError('unable to parse duplicate key error: unmatchable ')) + got.value, + IDMappingStorageError("unable to parse duplicate key error: unmatchable "), + ) def test_get_users(idstorage): assert idstorage.get_users() == {} - idstorage.create_local_user(Username('foo'), HashedToken('t1')) + idstorage.create_local_user(Username("foo"), HashedToken("t1")) - assert idstorage.get_users() == {Username('foo'): False} + assert idstorage.get_users() == {Username("foo"): False} - idstorage.create_local_user(Username('mrsentity'), HashedToken('t2')) - idstorage.create_local_user(Username('mrsenigma'), HashedToken('t3')) - idstorage.update_local_user_token(Username('mrsenigma'), HashedToken('t4')) + idstorage.create_local_user(Username("mrsentity"), HashedToken("t2")) + idstorage.create_local_user(Username("mrsenigma"), HashedToken("t3")) + idstorage.update_local_user_token(Username("mrsenigma"), HashedToken("t4")) - idstorage.set_local_user_as_admin(Username('foo'), True) - idstorage.set_local_user_as_admin(Username('mrsenigma'), True) + idstorage.set_local_user_as_admin(Username("foo"), True) + idstorage.set_local_user_as_admin(Username("mrsenigma"), True) - assert idstorage.get_users() == {Username('foo'): True, Username('mrsenigma'): True, - Username('mrsentity'): False} + assert idstorage.get_users() == { + Username("foo"): True, + Username("mrsenigma"): True, + Username("mrsentity"): False, + } def test_user_exists(idstorage): - idstorage.create_local_user(Username('foo'), HashedToken('t1')) + idstorage.create_local_user(Username("foo"), HashedToken("t1")) - assert idstorage.user_exists(Username('foo')) is True - assert idstorage.user_exists(Username('bar')) is False + assert idstorage.user_exists(Username("foo")) is True + assert idstorage.user_exists(Username("bar")) is False def test_user_exists_fail(idstorage): with raises(Exception) as got: idstorage.user_exists(None) - assert_exception_correct(got.value, TypeError('username cannot be None')) + assert_exception_correct(got.value, TypeError("username cannot be None")) def test_create_and_get_namespace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - expected = Namespace(NamespaceID('foo'), False, None) + idstorage.create_namespace(NamespaceID("foo")) + expected = Namespace(NamespaceID("foo"), False, None) - assert idstorage.get_namespace(NamespaceID('foo')) == expected + assert idstorage.get_namespace(NamespaceID("foo")) == expected - idstorage.create_namespace(NamespaceID('bar')) - expected = Namespace(NamespaceID('bar'), False, None) + idstorage.create_namespace(NamespaceID("bar")) + expected = Namespace(NamespaceID("bar"), False, None) - assert idstorage.get_namespace(NamespaceID('bar')) == expected + assert idstorage.get_namespace(NamespaceID("bar")) == expected def test_create_namespace_fail_input_None(idstorage): - fail_create_namespace(idstorage, None, TypeError('namespace_id cannot be None')) + fail_create_namespace(idstorage, None, TypeError("namespace_id cannot be None")) def test_create_namespace_fail_namespace_exists(idstorage): - idstorage.create_namespace(NamespaceID('foo')) + idstorage.create_namespace(NamespaceID("foo")) - fail_create_namespace(idstorage, NamespaceID('foo'), NamespaceExistsError('foo')) + fail_create_namespace(idstorage, NamespaceID("foo"), NamespaceExistsError("foo")) def fail_create_namespace(idstorage, namespace_id, expected): @@ -350,12 +420,12 @@ def fail_create_namespace(idstorage, namespace_id, expected): def test_get_namespace_fail_input_None(idstorage): - fail_get_namespace(idstorage, None, TypeError('namespace_id cannot be None')) + fail_get_namespace(idstorage, None, TypeError("namespace_id cannot be None")) def test_get_namespace_fail_no_such_namespace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - fail_get_namespace(idstorage, NamespaceID('bar'), NoSuchNamespaceError('bar')) + idstorage.create_namespace(NamespaceID("foo")) + fail_get_namespace(idstorage, NamespaceID("bar"), NoSuchNamespaceError("bar")) def fail_get_namespace(idstorage, namespace_id, expected): @@ -365,69 +435,99 @@ def fail_get_namespace(idstorage, namespace_id, expected): def test_add_and_remove_namespace_users(idstorage): - nsid = NamespaceID('foo') + nsid = NamespaceID("foo") idstorage.create_namespace(nsid) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), False) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), False + ) - idstorage.add_user_to_namespace(nsid, User(AuthsourceID('asone'), Username('u1'))) - users = set([User(AuthsourceID('asone'), Username('u1'))]) - assert idstorage.get_namespace(nsid) == Namespace(NamespaceID('foo'), False, users) + idstorage.add_user_to_namespace(nsid, User(AuthsourceID("asone"), Username("u1"))) + users = set([User(AuthsourceID("asone"), Username("u1"))]) + assert idstorage.get_namespace(nsid) == Namespace(NamespaceID("foo"), False, users) - idstorage.add_user_to_namespace(nsid, User(AuthsourceID('astwo'), Username('u2'))) - users.add(User(AuthsourceID('astwo'), Username('u2'))) - assert idstorage.get_namespace(nsid) == Namespace(NamespaceID('foo'), False, users) + idstorage.add_user_to_namespace(nsid, User(AuthsourceID("astwo"), Username("u2"))) + users.add(User(AuthsourceID("astwo"), Username("u2"))) + assert idstorage.get_namespace(nsid) == Namespace(NamespaceID("foo"), False, users) - idstorage.remove_user_from_namespace(NamespaceID('foo'), - User(AuthsourceID('asone'), Username('u1'))) - users = set([User(AuthsourceID('astwo'), Username('u2'))]) - assert idstorage.get_namespace(nsid) == Namespace(NamespaceID('foo'), False, users) + idstorage.remove_user_from_namespace( + NamespaceID("foo"), User(AuthsourceID("asone"), Username("u1")) + ) + users = set([User(AuthsourceID("astwo"), Username("u2"))]) + assert idstorage.get_namespace(nsid) == Namespace(NamespaceID("foo"), False, users) - idstorage.remove_user_from_namespace(NamespaceID('foo'), - User(AuthsourceID('astwo'), Username('u2'))) - assert idstorage.get_namespace(nsid) == Namespace(NamespaceID('foo'), False) + idstorage.remove_user_from_namespace( + NamespaceID("foo"), User(AuthsourceID("astwo"), Username("u2")) + ) + assert idstorage.get_namespace(nsid) == Namespace(NamespaceID("foo"), False) def test_add_user_to_namespace_fail_inputs_None(idstorage): - u = User(AuthsourceID('as'), 'u') - n = NamespaceID('n') - fail_add_namespace_user(idstorage, None, u, TypeError('namespace_id cannot be None')) - fail_add_namespace_user(idstorage, n, None, TypeError('admin_user cannot be None')) + u = User(AuthsourceID("as"), "u") + n = NamespaceID("n") + fail_add_namespace_user( + idstorage, None, u, TypeError("namespace_id cannot be None") + ) + fail_add_namespace_user(idstorage, n, None, TypeError("admin_user cannot be None")) def test_remove_user_from_namespace_fail_inputs_None(idstorage): - u = User(AuthsourceID('as'), 'u') - n = NamespaceID('n') - fail_remove_namespace_user(idstorage, None, u, TypeError('namespace_id cannot be None')) - fail_remove_namespace_user(idstorage, n, None, TypeError('admin_user cannot be None')) + u = User(AuthsourceID("as"), "u") + n = NamespaceID("n") + fail_remove_namespace_user( + idstorage, None, u, TypeError("namespace_id cannot be None") + ) + fail_remove_namespace_user( + idstorage, n, None, TypeError("admin_user cannot be None") + ) def test_add_user_to_namespace_fail_no_such_namespace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - fail_add_namespace_user(idstorage, NamespaceID('bar'), User(AuthsourceID('as'), Username('u')), - NoSuchNamespaceError('bar')) + idstorage.create_namespace(NamespaceID("foo")) + fail_add_namespace_user( + idstorage, + NamespaceID("bar"), + User(AuthsourceID("as"), Username("u")), + NoSuchNamespaceError("bar"), + ) def test_remove_user_from_namespace_fail_no_such_namespace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - idstorage.add_user_to_namespace(NamespaceID('foo'), User(AuthsourceID('as'), Username('u'))) + idstorage.create_namespace(NamespaceID("foo")) + idstorage.add_user_to_namespace( + NamespaceID("foo"), User(AuthsourceID("as"), Username("u")) + ) fail_remove_namespace_user( - idstorage, NamespaceID('bar'), User(AuthsourceID('as'), Username('u')), - NoSuchNamespaceError('bar')) + idstorage, + NamespaceID("bar"), + User(AuthsourceID("as"), Username("u")), + NoSuchNamespaceError("bar"), + ) def test_add_user_to_namespace_fail_duplicate(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - idstorage.add_user_to_namespace(NamespaceID('foo'), User(AuthsourceID('as'), Username('u'))) - fail_add_namespace_user(idstorage, NamespaceID('foo'), User(AuthsourceID('as'), Username('u')), - UserExistsError('User as/u already administrates namespace foo')) + idstorage.create_namespace(NamespaceID("foo")) + idstorage.add_user_to_namespace( + NamespaceID("foo"), User(AuthsourceID("as"), Username("u")) + ) + fail_add_namespace_user( + idstorage, + NamespaceID("foo"), + User(AuthsourceID("as"), Username("u")), + UserExistsError("User as/u already administrates namespace foo"), + ) def test_remove_user_from_namespace_fail_no_such_user(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - idstorage.add_user_to_namespace(NamespaceID('foo'), User(AuthsourceID('as'), Username('u'))) + idstorage.create_namespace(NamespaceID("foo")) + idstorage.add_user_to_namespace( + NamespaceID("foo"), User(AuthsourceID("as"), Username("u")) + ) fail_remove_namespace_user( - idstorage, NamespaceID('foo'), User(AuthsourceID('as'), Username('u1')), - NoSuchUserError('User as/u1 does not administrate namespace foo')) + idstorage, + NamespaceID("foo"), + User(AuthsourceID("as"), Username("u1")), + NoSuchUserError("User as/u1 does not administrate namespace foo"), + ) def fail_add_namespace_user(idstorage, namespace_id, user, expected): @@ -443,30 +543,43 @@ def fail_remove_namespace_user(idstorage, namespace_id, user, expected): def test_set_namespace_publicly_mappable(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), False) + idstorage.create_namespace(NamespaceID("foo")) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), False + ) - idstorage.set_namespace_publicly_mappable(NamespaceID('foo'), True) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), True) + idstorage.set_namespace_publicly_mappable(NamespaceID("foo"), True) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), True + ) - idstorage.set_namespace_publicly_mappable(NamespaceID('foo'), False) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), False) + idstorage.set_namespace_publicly_mappable(NamespaceID("foo"), False) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), False + ) - idstorage.set_namespace_publicly_mappable(NamespaceID('foo'), True) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), True) + idstorage.set_namespace_publicly_mappable(NamespaceID("foo"), True) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), True + ) - idstorage.set_namespace_publicly_mappable(NamespaceID('foo'), None) - assert idstorage.get_namespace(NamespaceID('foo')) == Namespace(NamespaceID('foo'), False) + idstorage.set_namespace_publicly_mappable(NamespaceID("foo"), None) + assert idstorage.get_namespace(NamespaceID("foo")) == Namespace( + NamespaceID("foo"), False + ) def test_set_namespace_publicly_mappable_input_None(idstorage): - fail_set_namespace_publicly_mappable(idstorage, None, TypeError('namespace_id cannot be None')) + fail_set_namespace_publicly_mappable( + idstorage, None, TypeError("namespace_id cannot be None") + ) def test_set_namespace_publicly_mappable_no_such_namespace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - fail_set_namespace_publicly_mappable(idstorage, NamespaceID('bar'), - NoSuchNamespaceError('bar')) + idstorage.create_namespace(NamespaceID("foo")) + fail_set_namespace_publicly_mappable( + idstorage, NamespaceID("bar"), NoSuchNamespaceError("bar") + ) def fail_set_namespace_publicly_mappable(idstorage, namespace_id, expected): @@ -476,23 +589,38 @@ def fail_set_namespace_publicly_mappable(idstorage, namespace_id, expected): def set_up_data_for_get_namespaces(idstorage): - idstorage.create_namespace(NamespaceID('ns1')) - idstorage.set_namespace_publicly_mappable(NamespaceID('ns1'), True) - idstorage.add_user_to_namespace(NamespaceID('ns1'), User(AuthsourceID('as'), Username('u'))) - - idstorage.create_namespace(NamespaceID('ns2')) - - idstorage.create_namespace(NamespaceID('ns3')) - idstorage.add_user_to_namespace(NamespaceID('ns3'), User(AuthsourceID('as'), Username('u'))) - idstorage.add_user_to_namespace(NamespaceID('ns3'), - User(AuthsourceID('astwo'), Username('u3'))) - - expected = [Namespace(NamespaceID('ns1'), True, - set([User(AuthsourceID('as'), Username('u'))])), - Namespace(NamespaceID('ns2'), False), - Namespace(NamespaceID('ns3'), False, - set([User(AuthsourceID('as'), Username('u')), - User(AuthsourceID('astwo'), Username('u3'))]))] + idstorage.create_namespace(NamespaceID("ns1")) + idstorage.set_namespace_publicly_mappable(NamespaceID("ns1"), True) + idstorage.add_user_to_namespace( + NamespaceID("ns1"), User(AuthsourceID("as"), Username("u")) + ) + + idstorage.create_namespace(NamespaceID("ns2")) + + idstorage.create_namespace(NamespaceID("ns3")) + idstorage.add_user_to_namespace( + NamespaceID("ns3"), User(AuthsourceID("as"), Username("u")) + ) + idstorage.add_user_to_namespace( + NamespaceID("ns3"), User(AuthsourceID("astwo"), Username("u3")) + ) + + expected = [ + Namespace( + NamespaceID("ns1"), True, set([User(AuthsourceID("as"), Username("u"))]) + ), + Namespace(NamespaceID("ns2"), False), + Namespace( + NamespaceID("ns3"), + False, + set( + [ + User(AuthsourceID("as"), Username("u")), + User(AuthsourceID("astwo"), Username("u3")), + ] + ), + ), + ] return expected @@ -511,24 +639,36 @@ def test_get_namespaces_with_nids(idstorage): expected = set_up_data_for_get_namespaces(idstorage) - assert idstorage.get_namespaces([NamespaceID('ns1')]) == set([expected[0]]) - assert idstorage.get_namespaces(nids=set([NamespaceID('ns1')])) == set([expected[0]]) + assert idstorage.get_namespaces([NamespaceID("ns1")]) == set([expected[0]]) + assert idstorage.get_namespaces(nids=set([NamespaceID("ns1")])) == set( + [expected[0]] + ) - nids = {NamespaceID('ns2'), NamespaceID('ns3')} + nids = {NamespaceID("ns2"), NamespaceID("ns3")} assert idstorage.get_namespaces(nids) == set([expected[1], expected[2]]) assert idstorage.get_namespaces(nids=nids) == set([expected[1], expected[2]]) def test_get_namespaces_fail_None_input(idstorage): - fail_get_namespaces(idstorage, {NamespaceID('foo'), None}, TypeError('None item in nids')) + fail_get_namespaces( + idstorage, {NamespaceID("foo"), None}, TypeError("None item in nids") + ) def test_get_namespaces_fail_no_such_namepsace(idstorage): - idstorage.create_namespace(NamespaceID('foo')) - fail_get_namespaces(idstorage, {NamespaceID('zoo'), NamespaceID('foo'), NamespaceID('baz'), - NamespaceID('aioli_compote_drizzled_on_artisian_tater_tots')}, - NoSuchNamespaceError( - "['aioli_compote_drizzled_on_artisian_tater_tots', 'baz', 'zoo']")) + idstorage.create_namespace(NamespaceID("foo")) + fail_get_namespaces( + idstorage, + { + NamespaceID("zoo"), + NamespaceID("foo"), + NamespaceID("baz"), + NamespaceID("aioli_compote_drizzled_on_artisian_tater_tots"), + }, + NoSuchNamespaceError( + "['aioli_compote_drizzled_on_artisian_tater_tots', 'baz', 'zoo']" + ), + ) def fail_get_namespaces(idstorage, nids, expected): @@ -538,86 +678,161 @@ def fail_get_namespaces(idstorage, nids, expected): def test_add_and_get_mapping(idstorage): - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) # add twice to check for no errors or duplictions - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) - assert idstorage.find_mappings(ObjectID(NamespaceID('foo'), 'bar')) == \ - (set([ObjectID(NamespaceID('baz'), 'bat')]), set()) + assert idstorage.find_mappings(ObjectID(NamespaceID("foo"), "bar")) == ( + set([ObjectID(NamespaceID("baz"), "bat")]), + set(), + ) - assert idstorage.find_mappings(ObjectID(NamespaceID('baz'), 'bat')) == \ - (set(), set([ObjectID(NamespaceID('foo'), 'bar')])) + assert idstorage.find_mappings(ObjectID(NamespaceID("baz"), "bat")) == ( + set(), + set([ObjectID(NamespaceID("foo"), "bar")]), + ) def test_remove_mapping(idstorage): - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) - idstorage.add_mapping(ObjectID(NamespaceID('baz'), 'bar'), ObjectID(NamespaceID('bar'), 'bat')) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("baz"), "bar"), ObjectID(NamespaceID("bar"), "bat") + ) # try removing mappings that don't exist - assert idstorage.remove_mapping( - ObjectID(NamespaceID('bar'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) is False - assert idstorage.remove_mapping( - ObjectID(NamespaceID('foo'), 'baz'), ObjectID(NamespaceID('baz'), 'bat')) is False - assert idstorage.remove_mapping( - ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('bat'), 'bat')) is False - assert idstorage.remove_mapping( - ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bag')) is False - - assert idstorage.find_mappings(ObjectID(NamespaceID('foo'), 'bar')) == \ - (set([ObjectID(NamespaceID('baz'), 'bat')]), set()) - assert idstorage.find_mappings(ObjectID(NamespaceID('baz'), 'bar')) == \ - (set([ObjectID(NamespaceID('bar'), 'bat')]), set()) + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("bar"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + is False + ) + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("foo"), "baz"), ObjectID(NamespaceID("baz"), "bat") + ) + is False + ) + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("bat"), "bat") + ) + is False + ) + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bag") + ) + is False + ) + + assert idstorage.find_mappings(ObjectID(NamespaceID("foo"), "bar")) == ( + set([ObjectID(NamespaceID("baz"), "bat")]), + set(), + ) + assert idstorage.find_mappings(ObjectID(NamespaceID("baz"), "bar")) == ( + set([ObjectID(NamespaceID("bar"), "bat")]), + set(), + ) # remove a mapping that does exist - assert idstorage.remove_mapping(ObjectID(NamespaceID('foo'), 'bar'), - ObjectID(NamespaceID('baz'), 'bat')) is True - - assert idstorage.find_mappings(ObjectID(NamespaceID('foo'), 'bar')) == (set(), set()) - assert idstorage.find_mappings(ObjectID(NamespaceID('baz'), 'bar')) == \ - (set([ObjectID(NamespaceID('bar'), 'bat')]), set()) + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + is True + ) + + assert idstorage.find_mappings(ObjectID(NamespaceID("foo"), "bar")) == ( + set(), + set(), + ) + assert idstorage.find_mappings(ObjectID(NamespaceID("baz"), "bar")) == ( + set([ObjectID(NamespaceID("bar"), "bat")]), + set(), + ) # try removing the same mapping - assert idstorage.remove_mapping(ObjectID(NamespaceID('foo'), 'bar'), - ObjectID(NamespaceID('baz'), 'bat')) is False + assert ( + idstorage.remove_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + is False + ) def test_find_no_mappings(idstorage): - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) - idstorage.add_mapping(ObjectID(NamespaceID('baz'), 'bar'), ObjectID(NamespaceID('bar'), 'bat')) - - assert idstorage.find_mappings(ObjectID(NamespaceID('bat'), 'bar')) == (set(), set()) - assert idstorage.find_mappings(ObjectID(NamespaceID('baz'), 'bag')) == (set(), set()) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("baz"), "bar"), ObjectID(NamespaceID("bar"), "bat") + ) + + assert idstorage.find_mappings(ObjectID(NamespaceID("bat"), "bar")) == ( + set(), + set(), + ) + assert idstorage.find_mappings(ObjectID(NamespaceID("baz"), "bag")) == ( + set(), + set(), + ) def test_find_multiple_mappings(idstorage): - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('bar'), 'bag')) - - idstorage.add_mapping(ObjectID(NamespaceID('bag'), 'arg'), ObjectID(NamespaceID('foo'), 'bar')) - idstorage.add_mapping(ObjectID(NamespaceID('bla'), 'urg'), ObjectID(NamespaceID('foo'), 'bar')) - - assert idstorage.find_mappings(ObjectID(NamespaceID('foo'), 'bar'), None) == \ - (set([ObjectID(NamespaceID('baz'), 'bat'), ObjectID(NamespaceID('bar'), 'bag')]), - set([ObjectID(NamespaceID('bag'), 'arg'), ObjectID(NamespaceID('bla'), 'urg')])) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("bar"), "bag") + ) + + idstorage.add_mapping( + ObjectID(NamespaceID("bag"), "arg"), ObjectID(NamespaceID("foo"), "bar") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("bla"), "urg"), ObjectID(NamespaceID("foo"), "bar") + ) + + assert idstorage.find_mappings(ObjectID(NamespaceID("foo"), "bar"), None) == ( + set([ObjectID(NamespaceID("baz"), "bat"), ObjectID(NamespaceID("bar"), "bag")]), + set([ObjectID(NamespaceID("bag"), "arg"), ObjectID(NamespaceID("bla"), "urg")]), + ) def test_filter_mappings(idstorage): - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('baz'), 'bat')) - idstorage.add_mapping(ObjectID(NamespaceID('foo'), 'bar'), ObjectID(NamespaceID('bar'), 'bag')) - - idstorage.add_mapping(ObjectID(NamespaceID('bag'), 'arg'), ObjectID(NamespaceID('foo'), 'bar')) - idstorage.add_mapping(ObjectID(NamespaceID('bla'), 'urg'), ObjectID(NamespaceID('foo'), 'bar')) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("baz"), "bat") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("foo"), "bar"), ObjectID(NamespaceID("bar"), "bag") + ) + + idstorage.add_mapping( + ObjectID(NamespaceID("bag"), "arg"), ObjectID(NamespaceID("foo"), "bar") + ) + idstorage.add_mapping( + ObjectID(NamespaceID("bla"), "urg"), ObjectID(NamespaceID("foo"), "bar") + ) assert idstorage.find_mappings( - ObjectID(NamespaceID('foo'), 'bar'), - ns_filter=set([NamespaceID('baz'), NamespaceID('bag')])) == \ - (set([ObjectID(NamespaceID('baz'), 'bat')]), set([ObjectID(NamespaceID('bag'), 'arg')])) + ObjectID(NamespaceID("foo"), "bar"), + ns_filter=set([NamespaceID("baz"), NamespaceID("bag")]), + ) == ( + set([ObjectID(NamespaceID("baz"), "bat")]), + set([ObjectID(NamespaceID("bag"), "arg")]), + ) def test_add_mapping_fail_input_None(idstorage): - oid = ObjectID(NamespaceID('foo'), 'bar') - fail_add_mapping(idstorage, None, oid, TypeError('primary_OID cannot be None')) - fail_add_mapping(idstorage, oid, None, TypeError('secondary_OID cannot be None')) + oid = ObjectID(NamespaceID("foo"), "bar") + fail_add_mapping(idstorage, None, oid, TypeError("primary_OID cannot be None")) + fail_add_mapping(idstorage, oid, None, TypeError("secondary_OID cannot be None")) def fail_add_mapping(idstorage, pOID, sOID, expected): @@ -627,9 +842,9 @@ def fail_add_mapping(idstorage, pOID, sOID, expected): def test_remove_mapping_fail_input_None(idstorage): - oid = ObjectID(NamespaceID('foo'), 'bar') - fail_remove_mapping(idstorage, None, oid, TypeError('primary_OID cannot be None')) - fail_remove_mapping(idstorage, oid, None, TypeError('secondary_OID cannot be None')) + oid = ObjectID(NamespaceID("foo"), "bar") + fail_remove_mapping(idstorage, None, oid, TypeError("primary_OID cannot be None")) + fail_remove_mapping(idstorage, oid, None, TypeError("secondary_OID cannot be None")) def fail_remove_mapping(idstorage, pOID, sOID, expected): @@ -639,11 +854,15 @@ def fail_remove_mapping(idstorage, pOID, sOID, expected): def test_find_mappings_fail_input_None(idstorage): - oid = ObjectID(NamespaceID('foo'), 'bar') - f = set([NamespaceID('foo')]) - fail_find_mappings(idstorage, None, f, TypeError('oid cannot be None')) - fail_find_mappings(idstorage, oid, set([NamespaceID('foo'), None]), - TypeError('None item in ns_filter')) + oid = ObjectID(NamespaceID("foo"), "bar") + f = set([NamespaceID("foo")]) + fail_find_mappings(idstorage, None, f, TypeError("oid cannot be None")) + fail_find_mappings( + idstorage, + oid, + set([NamespaceID("foo"), None]), + TypeError("None item in ns_filter"), + ) def fail_find_mappings(idstorage, oid, ns_filter, expected): diff --git a/src/jgikbase/test/idmapping/test_utils.py b/src/jgikbase/test/idmapping/test_utils.py index 2ff2171..c13dcdb 100644 --- a/src/jgikbase/test/idmapping/test_utils.py +++ b/src/jgikbase/test/idmapping/test_utils.py @@ -9,14 +9,14 @@ import time import re -MONGO_EXE = 'test.mongo.exe' -TEST_TEMP_DIR = 'test.temp.dir' -MONGO_USE_WIRED_TIGER = 'test.mongo.wired_tiger' -KEEP_TEMP_DIR = 'test.temp.dir.keep' +MONGO_EXE = "test.mongo.exe" +TEST_TEMP_DIR = "test.temp.dir" +MONGO_USE_WIRED_TIGER = "test.mongo.wired_tiger" +KEEP_TEMP_DIR = "test.temp.dir.keep" -TEST_CONFIG_FILE_SECTION = 'idmappingservicetest' +TEST_CONFIG_FILE_SECTION = "idmappingservicetest" -TEST_FILE_LOC_ENV_KEY = 'IDMAP_TEST_FILE' +TEST_FILE_LOC_ENV_KEY = "IDMAP_TEST_FILE" _CONFIG = None @@ -30,17 +30,19 @@ def get_temp_dir() -> Path: def get_use_wired_tiger() -> bool: - return _get_test_property(MONGO_USE_WIRED_TIGER) == 'true' + return _get_test_property(MONGO_USE_WIRED_TIGER) == "true" def get_delete_temp_files() -> bool: - return _get_test_property(KEEP_TEMP_DIR) != 'true' + return _get_test_property(KEEP_TEMP_DIR) != "true" def _get_test_config_file_path() -> Path: p = os.environ.get(TEST_FILE_LOC_ENV_KEY) if not p: - raise TestException("Can't find key {} in environment".format(TEST_FILE_LOC_ENV_KEY)) + raise TestException( + "Can't find key {} in environment".format(TEST_FILE_LOC_ENV_KEY) + ) return Path(p) @@ -51,25 +53,36 @@ def _get_test_property(prop: str) -> str: config = configparser.ConfigParser() config.read(test_cfg) if TEST_CONFIG_FILE_SECTION not in config: - raise TestException('No section {} found in test config file {}' - .format(TEST_CONFIG_FILE_SECTION, test_cfg)) + raise TestException( + "No section {} found in test config file {}".format( + TEST_CONFIG_FILE_SECTION, test_cfg + ) + ) sec = config[TEST_CONFIG_FILE_SECTION] # a section is not a real map and is missing methods _CONFIG = {x: sec[x] for x in sec.keys()} if prop not in _CONFIG: - raise TestException('Property {} in section {} of test file {} is missing' - .format(prop, TEST_CONFIG_FILE_SECTION, test_cfg)) + raise TestException( + "Property {} in section {} of test file {} is missing".format( + prop, TEST_CONFIG_FILE_SECTION, test_cfg + ) + ) return _CONFIG[prop] def find_free_port() -> int: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(('', 0)) + s.bind(("", 0)) return s.getsockname()[1] +def remove_ns_from_index_info(index_info: dict) -> None: + for index_detail in index_info.values(): + index_detail.pop("ns", None) + + def assert_exception_correct(got: Exception, expected: Exception): - assert type(got) == type(expected) + assert type(got) is type(expected) assert got.args == expected.args @@ -79,14 +92,14 @@ def assert_ms_epoch_close_to_now(time_): assert now_ms - 1000 < time_ -CALLID_PATTERN = re.compile('^\d{16}$') +CALLID_PATTERN = re.compile(r"^\d{16}$") def assert_json_error_correct(got, expected): - time_ = got['error']['time'] - callid = got['error']['callid'] - del got['error']['time'] - del got['error']['callid'] + time_ = got["error"]["time"] + callid = got["error"]["callid"] + del got["error"]["time"] + del got["error"]["callid"] assert got == expected assert CALLID_PATTERN.match(callid) is not None @@ -103,7 +116,7 @@ def __init__(self): def format(self, record): self.logs.append(record) - return 'no logs here, no sir' + return "no logs here, no sir" class TestException(Exception): diff --git a/test.cfg.example b/test.cfg.example index eff7a1d..d3b6469 100644 --- a/test.cfg.example +++ b/test.cfg.example @@ -9,7 +9,7 @@ test.mongo.exe=/path/to/mongodbexecutable test.mongo.wired_tiger=false # path to a temporary directory to use for tests. -test.temp.dir=/path/to/temp/dir/make/sure/this/is/safe +test.temp.dir=temp_test_dir # true to keep the temporary directory contents after running tests. test.temp.dir.keep=false