diff --git a/.github/workflows/clear-artifacts.yml b/.github/workflows/clear-artifacts.yml
new file mode 100644
index 00000000000..c78316f3ebe
--- /dev/null
+++ b/.github/workflows/clear-artifacts.yml
@@ -0,0 +1,17 @@
+name: 'Delete old artifacts'
+on:
+ schedule:
+ - cron: '0 * * * *' # Every hour
+ workflow_dispatch:
+ inputs:
+ message:
+ description: 'Message for manually triggering'
+ required: false
+ default: 'Triggered for Updates'
+ type: string
+jobs:
+ delete-artifacts:
+ uses: mosip/kattu/.github/workflows/clear-artifacts.yml@master
+ secrets:
+ ACCESS_TOKEN: ${{ secrets.access_token }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
diff --git a/.github/workflows/clear_artifacts.yml b/.github/workflows/clear_artifacts.yml
deleted file mode 100644
index 75b8aa96454..00000000000
--- a/.github/workflows/clear_artifacts.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-name: 'Delete old artifacts'
-on:
- schedule:
- - cron: '0 * * * *' # every hour
-
-jobs:
- delete-artifacts:
- runs-on: ubuntu-latest
- steps:
- - uses: kolpav/purge-artifacts-action@v1
- with:
- token: ${{ secrets. access_token }}
- expire-in: 2days # Setting this to 0 will delete all artifacts
diff --git a/.github/workflows/db-test.yml b/.github/workflows/db-test.yml
new file mode 100644
index 00000000000..791f99684d0
--- /dev/null
+++ b/.github/workflows/db-test.yml
@@ -0,0 +1,37 @@
+name: PostgreSQL Test
+
+on:
+ release:
+ types: [published]
+ pull_request:
+ types: [opened, reopened, synchronize]
+ workflow_dispatch:
+ inputs:
+ message:
+ description: 'Message for manually triggering'
+ required: false
+ default: 'Triggered for Updates'
+ type: string
+ push:
+ branches:
+ - '!release-branch'
+ - release*
+ - master
+ - 1.*
+ - develop*
+ - MOSIP*
+ paths:
+ - 'db_scripts/**'
+
+jobs:
+ build-db-test:
+ strategy:
+ matrix:
+ include:
+ - DB_LOCATION: 'db_scripts/mosip_kernel'
+ DB_NAME: 'mosip_kernel'
+ fail-fast: false
+ name: ${{ matrix.DB_NAME }}
+ uses: mosip/kattu/.github/workflows/db-test.yml@master
+ with:
+ DB_LOCATION: ${{ matrix.DB_LOCATION}}
diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml
new file mode 100644
index 00000000000..57c52dd4744
--- /dev/null
+++ b/.github/workflows/push-trigger.yml
@@ -0,0 +1,97 @@
+name: Maven Package upon a push
+
+on:
+ release:
+ types: [published]
+ pull_request:
+ types: [opened, reopened, synchronize]
+ workflow_dispatch:
+ inputs:
+ message:
+ description: 'Message for manually triggering'
+ required: false
+ default: 'Triggered for Updates'
+ type: string
+ push:
+ branches:
+ - '!release-branch'
+ - release*
+ - master
+ - 1.*
+ - develop
+ - MOSIP*
+
+jobs:
+ build-commons:
+ uses: mosip/kattu/.github/workflows/maven-build.yml@master
+ with:
+ SERVICE_LOCATION: ./
+ BUILD_ARTIFACT: commons
+ secrets:
+ OSSRH_USER: ${{ secrets.OSSRH_USER }}
+ OSSRH_SECRET: ${{ secrets.OSSRH_SECRET }}
+ OSSRH_TOKEN: ${{ secrets.OSSRH_TOKEN }}
+ GPG_SECRET: ${{ secrets.GPG_SECRET }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
+
+ publish_to_nexus:
+ if: "${{ !contains(github.ref, 'master') && github.event_name != 'pull_request' }}"
+ needs: build-commons
+ uses: mosip/kattu/.github/workflows/maven-publish-to-nexus.yml@master
+ with:
+ SERVICE_LOCATION: ./
+ secrets:
+ OSSRH_URL: ${{ secrets.RELEASE_URL }}
+ OSSRH_USER: ${{ secrets.OSSRH_USER }}
+ OSSRH_SECRET: ${{ secrets.OSSRH_SECRET }}
+ OSSRH_TOKEN: ${{ secrets.OSSRH_TOKEN }}
+ GPG_SECRET: ${{ secrets.GPG_SECRET }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
+
+ sonar_analysis:
+ needs: build-commons
+ if: "${{ github.event_name != 'pull_request' }}"
+ uses: mosip/kattu/.github/workflows/maven-sonar-analysis.yml@master
+ with:
+ SERVICE_LOCATION: ./
+ secrets:
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
+ ORG_KEY: ${{ secrets.ORG_KEY }}
+ OSSRH_USER: ${{ secrets.OSSRH_USER }}
+ OSSRH_SECRET: ${{ secrets.OSSRH_SECRET }}
+ OSSRH_TOKEN: ${{ secrets.OSSRH_TOKEN }}
+ GPG_SECRET: ${{ secrets.GPG_SECRET }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
+
+ build-dockers:
+ needs: build-commons
+ strategy:
+ matrix:
+ include:
+ - SERVICE_LOCATION: 'kernel/kernel-ridgenerator-service'
+ SERVICE_NAME: 'kernel-ridgenerator-service'
+ BUILD_ARTIFACT: 'commons'
+ - SERVICE_LOCATION: 'kernel/kernel-notification-service'
+ SERVICE_NAME: 'kernel-notification-service'
+ BUILD_ARTIFACT: 'commons'
+ - SERVICE_LOCATION: 'kernel/kernel-pridgenerator-service'
+ SERVICE_NAME: 'kernel-pridgenerator-service'
+ BUILD_ARTIFACT: 'commons'
+ - SERVICE_LOCATION: 'kernel/kernel-idgenerator-service'
+ SERVICE_NAME: 'kernel-idgenerator-service'
+ BUILD_ARTIFACT: 'commons'
+ - SERVICE_LOCATION: 'kernel/kernel-salt-generator'
+ SERVICE_NAME: 'kernel-salt-generator'
+ BUILD_ARTIFACT: 'commons'
+ fail-fast: false
+ uses: mosip/kattu/.github/workflows/docker-build.yml@master
+ name: ${{ matrix.SERVICE_NAME }}
+ with:
+ SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }}
+ SERVICE_NAME: ${{ matrix.SERVICE_NAME }}
+ BUILD_ARTIFACT: ${{ matrix.BUILD_ARTIFACT }}
+ secrets:
+ DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }}
+ ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }}
+ RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }}
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
diff --git a/.github/workflows/push_trigger.yml b/.github/workflows/push_trigger.yml
deleted file mode 100644
index f92362392a0..00000000000
--- a/.github/workflows/push_trigger.yml
+++ /dev/null
@@ -1,540 +0,0 @@
-name: Maven Package upon a push
-
-on:
- push:
- branches:
- - master
- - 1.*
- - develop
- - release*
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- ref: ${{ github.ref }}
- java-version: 11
- server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml
- settings-path: ${{ github.workspace }} # location for the settings.xml file
-
- - name: Setup branch and env
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo "GPG_TTY=$(tty)" >> $GITHUB_ENV
- - name: Setup branch and GPG public key
- run: |
- # Strip git ref prefix from version
-
- echo ${{ env.BRANCH_NAME }}
-
- echo ${{ env.GPG_TTY }}
- sudo apt-get --yes install gnupg2
- gpg2 --import ./.github/keys/mosipgpgkey_pub.gpg
- gpg2 --quiet --batch --passphrase=${{secrets.gpg_secret}} --allow-secret-key-import --import ./.github/keys/mosipgpgkey_sec.gpg
-
- - uses: actions/cache@v1
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-${{ env.BRANCH_NAME }}
-
- - name: Install xmllint
- run: |
- sudo apt-get update
- sudo apt-get install libxml2-utils
- - name: Setup the settings file for ossrh server
- run: echo " ossrh ${{secrets.ossrh_user}} ${{secrets.ossrh_secret}} ossrh true gpg2 ${{secrets.gpg_secret}} allow-snapshots true snapshots-repo https://oss.sonatype.org/content/repositories/snapshots false true releases-repo https://oss.sonatype.org/service/local/staging/deploy/maven2 true false sonar . https://sonarcloud.io false " > $GITHUB_WORKSPACE/settings.xml
-
- - name: Build with Maven
- run: mvn -U -B package --file pom.xml -s $GITHUB_WORKSPACE/settings.xml
-
- - name: Ready the springboot artifacts
- run: find -name '*.jar' -executable -type f -exec zip release.zip {} +
-
- - name: Upload the springboot jars
- uses: actions/upload-artifact@v1
- with:
- name: release
- path: ./release.zip
-
- # - uses: 8398a7/action-slack@v3
- # with:
- # status: ${{ job.status }}
- # fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- # env:
- # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- # if: failure() # Pick up events even if the job fails or is canceled.
-
- publish_to_nexus:
- if: "!contains(github.ref, 'master')"
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- ref: ${{ github.ref }}
- java-version: 11
- server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml
- settings-path: ${{ github.workspace }} # location for the settings.xml file
-
- - name: Setup branch and env
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo "GPG_TTY=$(tty)" >> $GITHUB_ENV
- - name: Setup branch and GPG public key
- run: |
- # Strip git ref prefix from version
-
- echo ${{ env.BRANCH_NAME }}
-
- echo ${{ env.GPG_TTY }}
- sudo apt-get --yes install gnupg2
- gpg2 --import ./.github/keys/mosipgpgkey_pub.gpg
- gpg2 --quiet --batch --passphrase=${{secrets.gpg_secret}} --allow-secret-key-import --import ./.github/keys/mosipgpgkey_sec.gpg
-
- - uses: actions/cache@v1
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-${{ env.BRANCH_NAME }}
-
- - name: Install xmllint
- run: |
- sudo apt-get update
- sudo apt-get install libxml2-utils
- - name: Setup the settings file for ossrh server
- run: echo " ossrh ossrh ${{secrets.RELEASE_USER}} ${{secrets.RELEASE_TOKEN}} ossrh true gpg2 ${{secrets.gpg_secret}} allow-snapshots true snapshots-repo https://oss.sonatype.org/content/repositories/snapshots false true releases-repo https://oss.sonatype.org/service/local/staging/deploy/maven2 true false sonar . https://sonarcloud.io false " > $GITHUB_WORKSPACE/settings.xml
-
- - name: Build with Maven
- run: mvn -B package --file pom.xml -s $GITHUB_WORKSPACE/settings.xml
-
- - name: Publish the maven package
- run: |
- mvn -B deploy -DaltDeploymentRepository=ossrh::default::${{ secrets.OSSRH_SNAPSHOT_URL }} -s $GITHUB_WORKSPACE/settings.xml -f pom.xml
- env:
- GITHUB_TOKEN: ${{secrets.RELEASE_TOKEN}}
- GPG_TTY: $(tty)
-
- # - uses: 8398a7/action-slack@v3
- # with:
- # status: ${{ job.status }}
- # fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- # env:
- # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- # if: failure() # Pick up events even if the job fails or is canceled.
-
- sonar_analysis:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- ref: ${{ github.ref }}
- java-version: 11
- server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml
- settings-path: ${{ github.workspace }} # location for the settings.xml file
-
- - name: Setup branch and env
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo "GPG_TTY=$(tty)" >> $GITHUB_ENV
- - uses: actions/cache@v1
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-${{ env.BRANCH_NAME }}
- - name: Setup the settings file for ossrh server
- run: echo " ossrh ${{secrets.ossrh_user}} ${{secrets.ossrh_secret}} ossrh true gpg2 ${{secrets.gpg_secret}} allow-snapshots true snapshots-repo https://oss.sonatype.org/content/repositories/snapshots false true releases-repo https://oss.sonatype.org/service/local/staging/deploy/maven2 true false sonar . https://sonarcloud.io false " > $GITHUB_WORKSPACE/settings.xml
-
- - name: Build with Maven
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- mvn -B package -s $GITHUB_WORKSPACE/settings.xml --file pom.xml
- - name: Analyze with SonarCloud
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- mvn -B -Dgpg.skip verify sonar:sonar -Dsonar.projectKey=mosip_${{ github.event.repository.name }} -Dsonar.organization=${{ secrets.ORG_KEY }} -Dsonar.host.url=https://sonarcloud.io -Dsonar.login=${{ secrets.SONAR_TOKEN }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-
- # - uses: 8398a7/action-slack@v3
- # with:
- # status: ${{ job.status }}
- # fields: repo,message,commit,workflow,job # selectable (default: repo,message)
- # env:
- # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEVOPS_WEBHOOK }} # required
- # if: failure() # Pick up events even if the job fails or is canceled.
-
-
- docker-kernel-ridgenerator-service:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-ridgenerator-service
- SERVICE_LOCATION: kernel/kernel-ridgenerator-service
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
-
- docker-kernel-notification-service:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-notification-service
- SERVICE_LOCATION: kernel/kernel-notification-service
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
-
- docker-kernel-pridgenerator-service:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-pridgenerator-service
- SERVICE_LOCATION: kernel/kernel-pridgenerator-service
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
-
- docker-kernel-otpmanager-service:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-otpmanager-service
- SERVICE_LOCATION: kernel/kernel-otpmanager-service
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
-
- docker-kernel-idgenerator-service:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-idgenerator-service
- SERVICE_LOCATION: kernel/kernel-idgenerator-service
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
-
- docker-kernel-salt-generator:
- needs: build
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-salt-generator
- SERVICE_LOCATION: kernel/kernel-salt-generator
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the kernel-salt-generator
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- if [[ $BRANCH_NAME == 'master' ]]; then
- VERSION=latest
- else
- VERSION=$BRANCH_NAME
- fi
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- - uses: 8398a7/action-slack@v3
- with:
- status: ${{ job.status }}
- fields: repo,message,commit,author,action,eventName,ref,workflow,job,took # selectable (default: repo,message)
- env:
- SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required
- if: failure() # Pick up events even if the job fails or is canceled.
\ No newline at end of file
diff --git a/.github/workflows/release_changes.yml b/.github/workflows/release_changes.yml
deleted file mode 100644
index 0b5e369eb71..00000000000
--- a/.github/workflows/release_changes.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-name: Release/pre-release Preparation.
-
-on:
- workflow_dispatch:
- inputs:
- message:
- description: 'Triggered for release or pe-release'
- required: false
- default: 'Release Preparation'
- releaseTags:
- description: 'tag to update'
- required: true
- snapshotTags:
- description: 'tag to be replaced'
- required: true
- base:
- description: 'base branch for PR'
- required: true
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Setup branch and env
- run: |
- # Strip git ref prefix from version
- echo "BRANCH_NAME=$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')" >> $GITHUB_ENV
- echo "GPG_TTY=$(tty)" >> $GITHUB_ENV
-
- - name: Mannualy changing the pom versions
- run: find . -type f -name "*pom.xml" -print0 | xargs -0 sed -i "s/${{ github.event.inputs.snapshotTags }}/${{ github.event.inputs.releaseTags }}/g"
-
- - name: Updating the Release URL in POM
- run: |
- cd .github/workflows
- sed -i 's/OSSRH_SNAPSHOT_URL/RELEASE_URL/g' push_trigger.yml
-
- - name: Updating libs-snapshot-local to libs-release local for artifactory URL's.
- run: find . -type f -name "*Dockerfile" -print0 | xargs -0 sed -i "s/libs-snapshot-local/libs-release-local/g"
-
- - name: removing -DskipTests
- run: find . -type f -name "*push_trigger.yml" -print0 | xargs -0 sed -i "s/"-DskipTests"//g"
-
-# - name: removing --Dgpg.skip
-# run: find . -type f -name "*push_trigger.yml" -print0 | xargs -0 sed -i "s/"-Dgpg.skip"//g"
-
- - name: Create Pull Request
- uses: peter-evans/create-pull-request@v3
- with:
- commit-message: Updated Pom versions for release changes
- title: Release changes
- body: Automated PR for ${{ github.event.inputs.releaseTags }} release.
- branch: release-branch
- delete-branch: true
- base: ${{ github.event.inputs.base }}
diff --git a/.github/workflows/release_trigger.yml b/.github/workflows/release_trigger.yml
deleted file mode 100644
index 16007e15cfc..00000000000
--- a/.github/workflows/release_trigger.yml
+++ /dev/null
@@ -1,748 +0,0 @@
-# This workflow will build and publish package using Maven, Docker when a release is published
-
-name: Release maven packages and docker upon a release
-
-on:
- release:
- types: [published]
-
-
-jobs:
- build:
-
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- ref: ${{ github.ref }}
- java-version: 11
- server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml
- settings-path: ${{ github.workspace }} # location for the settings.xml file
-
- - name: Setup branch and GPG public key
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
- echo "::set-env name=GPG_TTY::$(tty)"
- echo ${{ env.GPG_TTY }}
- sudo apt-get --yes install gnupg2
- gpg2 --import ./.github/keys/mosipgpgkey_pub.gpg
- gpg2 --quiet --batch --passphrase=${{secrets.gpg_secret}} --allow-secret-key-import --import ./.github/keys/mosipgpgkey_sec.gpg
-
- - uses: actions/cache@v1
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-${{ env.BRANCH_NAME }}
-
- - name: Install xmllint
- run: |
- sudo apt-get update
- sudo apt-get install libxml2-utils
-
- - name: Setup the settings file for ossrh server
- run: echo " ossrh ${{secrets.ossrh_user}} ${{secrets.ossrh_secret}} ossrh true gpg2 ${{secrets.gpg_secret}} allow-snapshots true snapshots-repo https://oss.sonatype.org/content/repositories/snapshots false true releases-repo https://oss.sonatype.org/service/local/staging/deploy/maven2 true false sonar . https://sonarcloud.io false " > $GITHUB_WORKSPACE/settings.xml
-
- - name: Build with Maven
- run: mvn -B package --file pom.xml -s $GITHUB_WORKSPACE/settings.xml
-
- - name: Ready the springboot artifacts
- run: find -name '*.jar' -executable -type f -exec zip release.zip {} +
-
- - name: Upload the springboot jars
- uses: actions/upload-artifact@v1
- with:
- name: release
- path: ./release.zip
-
- publish_to_nexus:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- ref: ${{ github.ref }}
- java-version: 11
- server-id: ossrh # Value of the distributionManagement/repository/id field of the pom.xml
- settings-path: ${{ github.workspace }} # location for the settings.xml file
-
- - name: Setup branch and GPG public key
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
- echo "::set-env name=GPG_TTY::$(tty)"
- echo ${{ env.GPG_TTY }}
- sudo apt-get --yes install gnupg2
- gpg2 --import ./.github/keys/mosipgpgkey_pub.gpg
- gpg2 --quiet --batch --passphrase=${{secrets.gpg_secret}} --allow-secret-key-import --import ./.github/keys/mosipgpgkey_sec.gpg
-
- - uses: actions/cache@v1
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-${{ env.BRANCH_NAME }}
-
- - name: Install xmllint
- run: |
- sudo apt-get update
- sudo apt-get install libxml2-utils
-
- - name: Setup the settings file for ossrh server
- run: echo " ossrh ${{secrets.ossrh_user}} ${{secrets.ossrh_secret}} ossrh true gpg2 ${{secrets.gpg_secret}} allow-snapshots true snapshots-repo https://oss.sonatype.org/content/repositories/snapshots false true releases-repo https://oss.sonatype.org/service/local/staging/deploy/maven2 true false sonar . https://sonarcloud.io false " > $GITHUB_WORKSPACE/settings.xml
-
- - name: Build with Maven
- run: mvn -B package --file pom.xml -s $GITHUB_WORKSPACE/settings.xml
-
- - name: Publish the maven package
- run: |
- chmod +x ./deploy.sh
- ./deploy.sh kernel $GITHUB_WORKSPACE/settings.xml .*
- ./deploy.sh id-repository $GITHUB_WORKSPACE/settings.xml .*
- env:
- GPG_TTY: $(tty)
- - name: Analyze with SonarCloud
- run: mvn -B verify sonar:sonar -Dsonar.projectKey=${{ secrets.PROJECT_KEY }} -Dsonar.organization=${{ secrets.ORG_KEY }} -Dsonar.host.url=https://sonarcloud.io -Dsonar.login=${{ secrets.SONAR_TOKEN }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- docker-id-repository-vid-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: id-repository-vid-service
- SERVICE_LOCATION: id-repository/id-repository-vid-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch and GPG public key
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-id-repository-identity-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: id-repository-identity-service
- SERVICE_LOCATION: id-repository/id-repository-identity-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-auditmanager-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-auditmanager-service
- SERVICE_LOCATION: kernel/kernel-auditmanager-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-ridgenerator-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-ridgenerator-service
- SERVICE_LOCATION: kernel/kernel-ridgenerator-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-notification-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-notification-service
- SERVICE_LOCATION: kernel/kernel-notification-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-auth-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-auth-service
- SERVICE_LOCATION: kernel/kernel-auth-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-keymanager-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-keymanager-service
- SERVICE_LOCATION: kernel/kernel-keymanager-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-pridgenerator-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-pridgenerator-service
- SERVICE_LOCATION: kernel/kernel-pridgenerator-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-otpmanager-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-otpmanager-service
- SERVICE_LOCATION: kernel/kernel-otpmanager-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-idgenerator-service:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-idgenerator-service
- SERVICE_LOCATION: kernel/kernel-idgenerator-service
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the id-repository-vid-service
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
-
- docker-kernel-salt-generator:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: kernel-salt-generator
- SERVICE_LOCATION: kernel/kernel-salt-generator
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
-
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the kernel-salt-generator
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
-
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
- docker-kernel-keys-generator:
- needs: build
-
- runs-on: ubuntu-latest
- env:
- NAMESPACE: ${{ secrets. dev_namespace_docker_hub }}
- SERVICE_NAME: keys-generator
- SERVICE_LOCATION: kernel/keys-generator
-
- steps:
- - uses: actions/checkout@v2
- - uses: actions/download-artifact@v1
- with:
- name: release
- path: ./
-
- - name: Setup branch name
- run: |
- # Strip git ref prefix from version
- echo "::set-env name=BRANCH_NAME::$(echo ${{ github.ref }} | sed -e 's,.*/\(.*\),\1,')"
- echo ${{ env.BRANCH_NAME }}
- - name: Get version info from pom
- id: getPomVersion
- uses: mavrosxristoforos/get-xml-info@1.0
- with:
- xml-file: ./${{ env.SERVICE_LOCATION }}/pom.xml
- xpath: /*[local-name()="project"]/*[local-name()="version"]
-
- - name: Unzip and extract the keys-generator
- run: unzip -uj "release.zip" "${{ env.SERVICE_LOCATION }}/target/*" -d "./${{ env.SERVICE_LOCATION }}/target"
-
- - name: Build image
- run: |
- cd "./${{env.SERVICE_LOCATION}}"
- docker build . --file Dockerfile --tag ${{ env.SERVICE_NAME }}
- - name: Log into registry
- run: echo "${{ secrets.release_docker_hub }}" | docker login -u ${{ secrets.actor_docker_hub }} --password-stdin
-
- - name: Push image
- run: |
- IMAGE_ID=$NAMESPACE/$SERVICE_NAME
-
- # Change all uppercase to lowercase
- IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
- echo "push version ${{steps.getPomVersion.outputs.info}}"
- VERSION=$BRANCH_NAME
- echo IMAGE_ID=$IMAGE_ID
- echo VERSION=$VERSION
- docker tag $SERVICE_NAME $IMAGE_ID:$VERSION
- docker push $IMAGE_ID:$VERSION
\ No newline at end of file
diff --git a/README.md b/README.md
index 9877305393d..a2a062958db 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-[](https://github.com/mosip/commons/actions/workflows/push_trigger.yml)
-[](https://sonarcloud.io/dashboard?branch=release-1.2.0&id=mosip_commons)
+[](https://github.com/mosip/commons/actions/workflows/push-trigger.yml)
+[](https://sonarcloud.io/dashboard?branch=master&id=mosip_commons)
# Commons
@@ -27,7 +27,7 @@ The project requires JDK 1.11.
To deploy Commons services on Kubernetes cluster using Dockers refer to [Sandbox Deployment](https://docs.mosip.io/1.2.0/deployment/sandbox-deployment).
## Test
-Automated functaionl tests available in [Functional Tests repo](https://github.com/mosip/mosip-functional-tests).
+Automated functional tests available in [Functional Tests repo](https://github.com/mosip/mosip-functional-tests).
## APIs
API documentation is available [here](https://mosip.github.io/documentation/).
diff --git a/db_release_scripts/mosip_authdevice/authdevice_release_db_deploy.sh b/db_release_scripts/mosip_authdevice/authdevice_release_db_deploy.sh
deleted file mode 100644
index 397c9c1817b..00000000000
--- a/db_release_scripts/mosip_authdevice/authdevice_release_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Authdevice Release DB deploy
-### -- Deploy Module : MOSIP Kernel
-### -- Purpose : To deploy AuthDevice Database alter scripts for the release.
-### -- Created By : Ram Bhatt
-### -- Created Date : Jan-2021
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-release_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Properties File Name - $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Deploymnet Version - $release_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-release-${release_version}-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Alter scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts deployment on $MOSIP_DB_NAME database is started....Deployment Version...$release_version" | tee -a $LOG 2>&1
-
-ALTER_SCRIPT_FILENAME_VERSION="sql/${release_version}_${ALTER_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for release deployment - $ALTER_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${ALTER_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying Alter scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $ALTER_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no alter scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database alter scripts deployment" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_authdevice/authdevice_release_deploy.properties b/db_release_scripts/mosip_authdevice/authdevice_release_deploy.properties
deleted file mode 100644
index e0c9ff9c2d7..00000000000
--- a/db_release_scripts/mosip_authdevice/authdevice_release_deploy.properties
+++ /dev/null
@@ -1,12 +0,0 @@
-DB_SERVERIP=
-DB_PORT=30090
-SU_USER=postgres
-DEFAULT_DB_NAME=postgres
-MOSIP_DB_NAME=mosip_authdevice
-SYSADMIN_USER=sysadmin
-BASEPATH=/home/madmin/database_release
-LOG_PATH=/home/madmin/logs/
-ALTER_SCRIPT_FLAG=1
-ALTER_SCRIPT_FILENAME=authdevice-scripts_release.sql
-REVOKE_SCRIPT_FLAG=1
-REVOKE_SCRIPT_FILENAME=authdevice-scripts_revoke.sql
diff --git a/db_release_scripts/mosip_authdevice/authdevice_revoke_db_deploy.sh b/db_release_scripts/mosip_authdevice/authdevice_revoke_db_deploy.sh
deleted file mode 100644
index d79b96cfb72..00000000000
--- a/db_release_scripts/mosip_authdevice/authdevice_revoke_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Authdevice Revoke DB deploy
-### -- Deploy Module : MOSIP Authdevice
-### -- Purpose : To revoke Authdevice Database alter scripts for the release.
-### -- Create By : Ram Bhatt
-### -- Created Date : Jan-2021
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-revoke_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Revoke Version - $revoke_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-revoke-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Deployment Revoke scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database revoke scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/alter-scripts" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Revoke scripts for DB deployment on $MOSIP_DB_NAME database is started....Revoke Version...$revoke_version" | tee -a $LOG 2>&1
-
-REVOKE_SCRIPT_FILENAME_VERSION="sql/${revoke_version}_${REVOKE_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for deployment revoke - $REVOKE_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${REVOKE_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Executing revoke scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $REVOKE_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no revoke scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment revoke" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_release.sql b/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_release.sql
deleted file mode 100644
index bffaa08e4f2..00000000000
--- a/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_release.sql
+++ /dev/null
@@ -1,18 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_authdevice
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Authdevice DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
---
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_authdevice sysadmin
-
--- -------------------------------------------------------------------------------------------------
-
-
-
-----------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_revoke.sql b/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_revoke.sql
deleted file mode 100644
index 9f34722c053..00000000000
--- a/db_release_scripts/mosip_authdevice/sql/1.1.5_authdevice-scripts_revoke.sql
+++ /dev/null
@@ -1,13 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_authdevice
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Revoking Database Alter deployement done for release in Authdevice DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_authdevice sysadmin
-
------------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_authdevice/sql/1.2_authdevice-scripts_release.sql b/db_release_scripts/mosip_authdevice/sql/1.2_authdevice-scripts_release.sql
deleted file mode 100644
index 5b52b968139..00000000000
--- a/db_release_scripts/mosip_authdevice/sql/1.2_authdevice-scripts_release.sql
+++ /dev/null
@@ -1,30 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_authdevice
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Authdevice DB.
--- Create By : Ram Bhatt
--- Created Date : Nov-2021
---
--- Modified Date Modified By Comments / Remarks
---
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_authdevice sysadmin
-
--- -------------------------------------------------------------------------------------------------
-
-ALTER TABLE authdevice.secure_biometric_interface DROP CONSTRAINT IF EXISTS fk_sbi_id CASCADE;
-
-
-ALTER TABLE authdevice.secure_biometric_interface ADD partner_org_name varchar(128);
-
-ALTER TABLE authdevice.secure_biometric_interface ADD provider_id varchar(36);
-
-ALTER TABLE authdevice.secure_biometric_interface_h ADD partner_org_name varchar(128);
-
-ALTER TABLE authdevice.secure_biometric_interface_h ADD provider_id varchar(36);
-
-ALTER TABLE authdevice.secure_biometric_interface DROP COLUMN IF EXISTS device_detail_id;
-ALTER TABLE authdevice.secure_biometric_interface_h DROP COLUMN IF EXISTS device_detail_id;
-
-----------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_iam/iam_release_db_deploy.sh b/db_release_scripts/mosip_iam/iam_release_db_deploy.sh
deleted file mode 100644
index 0becfeeb8ab..00000000000
--- a/db_release_scripts/mosip_iam/iam_release_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Iam Release DB deploy
-### -- Deploy Module : MOSIP Iam
-### -- Purpose : To deploy Iam Database alter scripts for the release.
-### -- Created By : Ram Bhatt
-### -- Created Date : Jan-2021
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-release_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Properties File Name - $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Deploymnet Version - $release_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-release-${release_version}-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Alter scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts deployment on $MOSIP_DB_NAME database is started....Deployment Version...$release_version" | tee -a $LOG 2>&1
-
-ALTER_SCRIPT_FILENAME_VERSION="sql/${release_version}_${ALTER_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for release deployment - $ALTER_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${ALTER_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying Alter scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $ALTER_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no alter scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database alter scripts deployment" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_iam/iam_release_deploy.properties b/db_release_scripts/mosip_iam/iam_release_deploy.properties
deleted file mode 100644
index 53859114efd..00000000000
--- a/db_release_scripts/mosip_iam/iam_release_deploy.properties
+++ /dev/null
@@ -1,12 +0,0 @@
-DB_SERVERIP=
-DB_PORT=30090
-SU_USER=postgres
-DEFAULT_DB_NAME=postgres
-MOSIP_DB_NAME=mosip_iam
-SYSADMIN_USER=sysadmin
-BASEPATH=/home/madmin/database_release
-LOG_PATH=/home/madmin/logs/
-ALTER_SCRIPT_FLAG=1
-ALTER_SCRIPT_FILENAME=iam-scripts_release.sql
-REVOKE_SCRIPT_FLAG=1
-REVOKE_SCRIPT_FILENAME=iam-scripts_revoke.sql
diff --git a/db_release_scripts/mosip_iam/iam_revoke_db_deploy.sh b/db_release_scripts/mosip_iam/iam_revoke_db_deploy.sh
deleted file mode 100644
index f6d9bbdc97a..00000000000
--- a/db_release_scripts/mosip_iam/iam_revoke_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : IAM Revoke DB deploy
-### -- Deploy Module : MOSIP Iam
-### -- Purpose : To revoke Iam Database alter scripts for the release.
-### -- Create By : Ram Bhatt
-### -- Created Date : Jan-2021
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-revoke_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Revoke Version - $revoke_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-revoke-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Deployment Revoke scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database revoke scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/alter-scripts" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Revoke scripts for DB deployment on $MOSIP_DB_NAME database is started....Revoke Version...$revoke_version" | tee -a $LOG 2>&1
-
-REVOKE_SCRIPT_FILENAME_VERSION="sql/${revoke_version}_${REVOKE_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for deployment revoke - $REVOKE_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${REVOKE_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Executing revoke scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $REVOKE_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no revoke scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment revoke" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_release.sql b/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_release.sql
deleted file mode 100644
index 4dafde1c8b0..00000000000
--- a/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_release.sql
+++ /dev/null
@@ -1,18 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_iam
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for IAM DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
---
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_iam sysadmin
-
--- -------------------------------------------------------------------------------------------------
-
-
-
-----------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_revoke.sql b/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_revoke.sql
deleted file mode 100644
index 56c56a43749..00000000000
--- a/db_release_scripts/mosip_iam/sql/1.1.5_iam-scripts_revoke.sql
+++ /dev/null
@@ -1,13 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_iam
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Revoking Database Alter deployement done for release in Iam DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_iam sysadmin
-
------------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_kernel/ddl/kernel-uin_assigned.sql b/db_release_scripts/mosip_kernel/ddl/kernel-uin_assigned.sql
deleted file mode 100644
index 640a2d6d363..00000000000
--- a/db_release_scripts/mosip_kernel/ddl/kernel-uin_assigned.sql
+++ /dev/null
@@ -1,47 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name: mosip_kernel
--- Table Name : kernel.uin_assigned
--- Purpose : UIN ASSIGNED: Stores pre-generated UINs that are assigned to an individual as part of registration process..
---
--- Create By : Sadanandegowda DM
--- Created Date : 20-May-2020
---
--- Modified Date Modified By Comments / Remarks
--- ------------------------------------------------------------------------------------------
---
--- ------------------------------------------------------------------------------------------
-
--- object: kernel.uin_assigned | type: TABLE --
--- DROP TABLE IF EXISTS kernel.uin_assigned CASCADE;
-CREATE TABLE kernel.uin_assigned(
- uin character varying(28) NOT NULL,
- uin_status character varying(16),
- cr_by character varying(256) NOT NULL,
- cr_dtimes timestamp NOT NULL,
- upd_by character varying(256),
- upd_dtimes timestamp,
- is_deleted boolean,
- del_dtimes timestamp,
- CONSTRAINT pk_uinass_id PRIMARY KEY (uin)
-
-);
--- ddl-end --
-COMMENT ON TABLE kernel.uin_assigned IS 'UIN: Stores pre-generated UINs that are assigned to an individual as part of registration process.';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.uin IS 'UIN: Pre-generated UINs (Unique Identification Number), which will be used to assign to an individual';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.uin_status IS 'Is Used: Status of the pre-generated UIN, whether it is assigned, unassigned or issued.';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.cr_by IS 'Created By : ID or name of the user who create / insert record';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.upd_by IS 'Updated By : ID or name of the user who update the record with new values';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.';
--- ddl-end --
-COMMENT ON COLUMN kernel.uin_assigned.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE';
--- ddl-end --
-
diff --git a/db_release_scripts/mosip_kernel/ddl/kernel-vid_assigned.sql b/db_release_scripts/mosip_kernel/ddl/kernel-vid_assigned.sql
deleted file mode 100644
index e528e5ac4c6..00000000000
--- a/db_release_scripts/mosip_kernel/ddl/kernel-vid_assigned.sql
+++ /dev/null
@@ -1,49 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name: mosip_kernel
--- Table Name : kernel.vid_assigned
--- Purpose : VID Assigned: Stores pre-generated VIDs that are assigned /expired to an individual as part of mosip process.
---
--- Create By : Sadanandegowda DM
--- Created Date : Dec-2020
---
--- Modified Date Modified By Comments / Remarks
--- ------------------------------------------------------------------------------------------
---
--- ------------------------------------------------------------------------------------------
-
--- object: kernel.vid_assigned | type: TABLE --
--- DROP TABLE IF EXISTS kernel.vid_assigned CASCADE;
-CREATE TABLE kernel.vid_assigned(
- vid character varying(36) NOT NULL,
- expiry_dtimes timestamp,
- vid_status character varying(16) NOT NULL,
- cr_by character varying(256) NOT NULL,
- cr_dtimes timestamp NOT NULL,
- upd_by character varying(256),
- upd_dtimes timestamp,
- is_deleted boolean,
- del_dtimes timestamp,
- CONSTRAINT pk_vida_id PRIMARY KEY (vid)
-
-);
--- ddl-end --
-COMMENT ON TABLE kernel.vid_assigned IS 'VID Assigned: Stores pre-generated VIDs that are assigned /expired to an individual as part of mosip process.';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.vid IS 'VID: Pre-generated VIDs (Vertual Identification Number), which will be used to assign to an individual';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.expiry_dtimes IS 'Expiry Date and Time: Expiry Date and Time of the Vertual ID';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.vid_status IS 'VID: Status of the pre-generated VID, whether it is available, expired or assigned.';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.cr_by IS 'Created By : ID or name of the user who create / insert record';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.upd_by IS 'Updated By : ID or name of the user who update the record with new values';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.';
--- ddl-end --
-COMMENT ON COLUMN kernel.vid_assigned.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE';
--- ddl-end --
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/kernel_release_db_deploy.sh b/db_release_scripts/mosip_kernel/kernel_release_db_deploy.sh
deleted file mode 100644
index 0cf94e1cdf0..00000000000
--- a/db_release_scripts/mosip_kernel/kernel_release_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Kernel Release DB deploy
-### -- Deploy Module : MOSIP Kernel
-### -- Purpose : To deploy Kernel Database alter scripts for the release.
-### -- Create By : Sadanandegowda
-### -- Created Date : 25-Oct-2019
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-release_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Properties File Name - $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Deploymnet Version - $release_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-release-${release_version}-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Alter scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts deployment on $MOSIP_DB_NAME database is started....Deployment Version...$release_version" | tee -a $LOG 2>&1
-
-ALTER_SCRIPT_FILENAME_VERSION="sql/${release_version}_${ALTER_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for release deployment - $ALTER_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${ALTER_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying Alter scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $ALTER_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no alter scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database alter scripts deployment" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_kernel/kernel_release_deploy.properties b/db_release_scripts/mosip_kernel/kernel_release_deploy.properties
deleted file mode 100644
index cab5c993c8f..00000000000
--- a/db_release_scripts/mosip_kernel/kernel_release_deploy.properties
+++ /dev/null
@@ -1,12 +0,0 @@
-DB_SERVERIP=
-DB_PORT=30090
-SU_USER=postgres
-DEFAULT_DB_NAME=postgres
-MOSIP_DB_NAME=mosip_kernel
-SYSADMIN_USER=sysadmin
-BASEPATH=/home/madmin/database_release
-LOG_PATH=/home/madmin/logs/
-ALTER_SCRIPT_FLAG=1
-ALTER_SCRIPT_FILENAME=kernel-scripts_release.sql
-REVOKE_SCRIPT_FLAG=1
-REVOKE_SCRIPT_FILENAME=kernel-scripts_revoke.sql
diff --git a/db_release_scripts/mosip_kernel/kernel_revoke_db_deploy.sh b/db_release_scripts/mosip_kernel/kernel_revoke_db_deploy.sh
deleted file mode 100644
index 7a52ef81f65..00000000000
--- a/db_release_scripts/mosip_kernel/kernel_revoke_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Kernel Revoke DB deploy
-### -- Deploy Module : MOSIP Kernel
-### -- Purpose : To revoke Kernel Database alter scripts for the release.
-### -- Create By : Sadanandegowda
-### -- Created Date : 25-Oct-2019
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-revoke_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Revoke Version - $revoke_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-revoke-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Deployment Revoke scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database revoke scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/alter-scripts" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on IDMAP DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Revoke scripts for DB deployment on $MOSIP_DB_NAME database is started....Revoke Version...$revoke_version" | tee -a $LOG 2>&1
-
-REVOKE_SCRIPT_FILENAME_VERSION="sql/${revoke_version}_${REVOKE_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for deployment revoke - $REVOKE_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${REVOKE_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Executing revoke scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $REVOKE_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no revoke scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment revoke" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_release.sql b/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_release.sql
deleted file mode 100644
index a4f41771b68..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_release.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.1.0
--- Purpose : Database Alter scripts for the release for Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : May-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-\ir ../ddl/kernel-uin_assigned.sql
-
-----------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_revoke.sql b/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_revoke.sql
deleted file mode 100644
index 778c24791aa..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.0_kernel-scripts_revoke.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.1.0
--- Purpose : Revoking Database Alter deployement done for release in Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : May-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-DROP TABLE IF EXISTS kernel.uin_assigned;
------------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_release.sql b/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_release.sql
deleted file mode 100644
index 804e661c208..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_release.sql
+++ /dev/null
@@ -1,21 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : Sep-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-DROP TABLE IF EXISTS kernel.key_alias CASCADE;
-DROP TABLE IF EXISTS kernel.key_policy_def CASCADE;
-DROP TABLE IF EXISTS kernel.key_policy_def_h CASCADE;
-DROP TABLE IF EXISTS kernel.key_store CASCADE;
-DROP TABLE IF EXISTS kernel.sync_control CASCADE;
-DROP TABLE IF EXISTS kernel.sync_job_def CASCADE;
-DROP TABLE IF EXISTS kernel.sync_transaction CASCADE;
-DROP TABLE IF EXISTS kernel.dao_key_store CASCADE;
-----------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_revoke.sql b/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_revoke.sql
deleted file mode 100644
index 60a499ccc8f..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.2_kernel-scripts_revoke.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Revoking Database Alter deployement done for release in Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : Sep-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-
------------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_release.sql b/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_release.sql
deleted file mode 100644
index 6304e9eeb0b..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_release.sql
+++ /dev/null
@@ -1,15 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.1.4
--- Purpose : Database Alter scripts for the release for Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : May-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-\ir ../ddl/kernel-vid_assigned.sql
-
-----------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_revoke.sql b/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_revoke.sql
deleted file mode 100644
index 41058567f16..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.4_kernel-scripts_revoke.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.1.4
--- Purpose : Revoking Database Alter deployement done for release in Kernel DB.
--- Create By : Sadanandegowda DM
--- Created Date : Dec-2020
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-DROP TABLE IF EXISTS kernel.vid_assigned;
------------------------------------------------------------------------------------------------------
\ No newline at end of file
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_release.sql b/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_release.sql
deleted file mode 100644
index 42c77e63a9d..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_release.sql
+++ /dev/null
@@ -1,20 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Kernel DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
-
-
-
-
-
-
-
-----------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_revoke.sql b/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_revoke.sql
deleted file mode 100644
index 7852097cf20..00000000000
--- a/db_release_scripts/mosip_kernel/sql/1.1.5_kernel-scripts_revoke.sql
+++ /dev/null
@@ -1,13 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_kernel
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Revoking Database Alter deployement done for release in Kernel DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
------------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_regdevice/regdevice_release_db_deploy.sh b/db_release_scripts/mosip_regdevice/regdevice_release_db_deploy.sh
deleted file mode 100644
index d0d399d7764..00000000000
--- a/db_release_scripts/mosip_regdevice/regdevice_release_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Key Manager Release DB deploy
-### -- Deploy Module : MOSIP Key Manager
-### -- Purpose : To deploy Key Manager Database alter scripts for the release.
-### -- Create By : Sadanandegowda
-### -- Created Date : Dec-2020
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-release_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Properties File Name - $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Deploymnet Version - $release_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-release-${release_version}-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Alter scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on Key Manager DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts deployment on $MOSIP_DB_NAME database is started....Deployment Version...$release_version" | tee -a $LOG 2>&1
-
-ALTER_SCRIPT_FILENAME_VERSION="sql/${release_version}_${ALTER_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for release deployment - $ALTER_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${ALTER_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying Alter scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $ALTER_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no alter scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database Alter scripts deployment version $release_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database alter scripts deployment" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_regdevice/regdevice_release_deploy.properties b/db_release_scripts/mosip_regdevice/regdevice_release_deploy.properties
deleted file mode 100644
index e93518bfcb1..00000000000
--- a/db_release_scripts/mosip_regdevice/regdevice_release_deploy.properties
+++ /dev/null
@@ -1,12 +0,0 @@
-DB_SERVERIP=
-DB_PORT=30090
-SU_USER=postgres
-DEFAULT_DB_NAME=postgres
-MOSIP_DB_NAME=mosip_regdevice
-SYSADMIN_USER=sysadmin
-BASEPATH=/home/madmin/database_release
-LOG_PATH=/home/madmin/logs/
-ALTER_SCRIPT_FLAG=1
-ALTER_SCRIPT_FILENAME=regdevice-scripts_release.sql
-REVOKE_SCRIPT_FLAG=1
-REVOKE_SCRIPT_FILENAME=regdevice-scripts_revoke.sql
diff --git a/db_release_scripts/mosip_regdevice/regdevice_revoke_db_deploy.sh b/db_release_scripts/mosip_regdevice/regdevice_revoke_db_deploy.sh
deleted file mode 100644
index 308d94a4254..00000000000
--- a/db_release_scripts/mosip_regdevice/regdevice_revoke_db_deploy.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-### -- ---------------------------------------------------------------------------------------------------------
-### -- Script Name : Key Manager Revoke DB deploy
-### -- Deploy Module : MOSIP Key Manager
-### -- Purpose : To revoke Key Manager Database alter scripts for the release.
-### -- Create By : Sadanandegowda
-### -- Created Date : Dec-2020
-### --
-### -- Modified Date Modified By Comments / Remarks
-### -- -----------------------------------------------------------------------------------------------------------
-
-### -- -----------------------------------------------------------------------------------------------------------
-
-#########Properties file #############
-set -e
-properties_file="$1"
-revoke_version="$2"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file"
- echo `date "+%m/%d/%Y %H:%M:%S"` ": DB Revoke Version - $revoke_version"
-#properties_file="./app.properties"
-if [ -f "$properties_file" ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
- while IFS='=' read -r key value
- do
- key=$(echo $key | tr '.' '_')
- eval ${key}=\${value}
- done < "$properties_file"
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
-fi
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------"
-
-today=`date '+%d%m%Y_%H%M%S'`;
-LOG="${LOG_PATH}${MOSIP_DB_NAME}-revoke-${today}.log"
-touch $LOG
-
-SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null)
-
-if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ]
-then
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1
-else
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1
-fi
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database Deployment Revoke scripts" | tee -a $LOG 2>&1
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Database revoke scripts are sourcing from :$BASEPATH/$MOSIP_DB_NAME/alter-scripts" | tee -a $LOG 2>&1
-
-#========================================DB Alter Scripts deployment process begins on Key Manager DB SERVER==================================
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Revoke scripts for DB deployment on $MOSIP_DB_NAME database is started....Revoke Version...$revoke_version" | tee -a $LOG 2>&1
-
-REVOKE_SCRIPT_FILENAME_VERSION="sql/${revoke_version}_${REVOKE_SCRIPT_FILENAME}"
-
-echo `date "+%m/%d/%Y %H:%M:%S"` ": Alter scripts file which is considered for deployment revoke - $REVOKE_SCRIPT_FILENAME_VERSION" | tee -a $LOG 2>&1
-
-cd /$BASEPATH/$MOSIP_DB_NAME/
-
-pwd | tee -a $LOG 2>&1
-
-CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1)
-
-if [ ${CONN} == 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1
-fi
-
-if [ ${REVOKE_SCRIPT_FLAG} == 1 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Executing revoke scripts for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1
- PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $REVOKE_SCRIPT_FILENAME_VERSION >> $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no revoke scripts available for this deployment at ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1
-fi
-
-if [ $(grep -c ERROR $LOG) -ne 0 ]
-then
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of Alter scripts MOSIP database deployment" | tee -a $LOG 2>&1
-else
- echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment revoke version $revoke_version completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1
- echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment revoke" | tee -a $LOG 2>&1
-fi
-
-echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1
-
-
diff --git a/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_release.sql b/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_release.sql
deleted file mode 100644
index c8d54c8a423..00000000000
--- a/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_release.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_regdevice
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Regdevice DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_regdevice sysadmin
-
-
-----------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_revoke.sql b/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_revoke.sql
deleted file mode 100644
index 237d6416b0b..00000000000
--- a/db_release_scripts/mosip_regdevice/sql/1.1.5_regdevice-scripts_revoke.sql
+++ /dev/null
@@ -1,13 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_keymgr
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Revoking Database Alter deployement done for release in Key manager DB.
--- Create By : Ram Bhatt
--- Created Date : Jan-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_kernel sysadmin
-
------------------------------------------------------------------------------------------------------
diff --git a/db_release_scripts/mosip_regdevice/sql/1.2_regdevice-scripts_release.sql b/db_release_scripts/mosip_regdevice/sql/1.2_regdevice-scripts_release.sql
deleted file mode 100644
index 2dac3e0d284..00000000000
--- a/db_release_scripts/mosip_regdevice/sql/1.2_regdevice-scripts_release.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- -------------------------------------------------------------------------------------------------
--- Database Name : mosip_regdevice
--- Release Version : 1.2.0-SNAPSHOT
--- Purpose : Database Alter scripts for the release for Regdevice DB.
--- Create By : Ram Bhatt
--- Created Date : Nov-2021
---
--- Modified Date Modified By Comments / Remarks
--- -------------------------------------------------------------------------------------------------
-
-\c mosip_regdevice sysadmin
-
-ALTER TABLE regdevice.secure_biometric_interface DROP CONSTRAINT IF EXISTS fk_sbi_id CASCADE;
-----------------------------------------------------------------------------------------------------
diff --git a/db_scripts/README.md b/db_scripts/README.md
index 7b87c168a7b..0cb8ad5e441 100644
--- a/db_scripts/README.md
+++ b/db_scripts/README.md
@@ -5,6 +5,7 @@ This folder containers various SQL scripts to create database and tables in post
This folder containers various SQL scripts to create database and tables in postgres. These scripts are automatically run with as part of DB initialisation in [Sandbox Deployment](https://docs.mosip.io/1.2.0/deployment/sandbox-deployment)
+
Developers may run the SQLs using `/deploy.sh` script.
diff --git a/db_scripts/mosip_authdevice/deploy.sh b/db_scripts/mosip_authdevice/deploy.sh
index 82c0770fe6b..09c64727bec 100644
--- a/db_scripts/mosip_authdevice/deploy.sh
+++ b/db_scripts/mosip_authdevice/deploy.sh
@@ -17,31 +17,31 @@ fi
## Terminate existing connections
echo "Terminating active connections"
-CONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
+CONN=$(PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
echo "Terminated connections"
## Drop db and role
echo "Dropping DB"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
echo "Dropping user"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
## Create users
echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
## Create DB
echo "Creating DB"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
## Grants
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
## Populate tables
if [ ${DML_FLAG} == 1 ]
then
echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database"
- PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
+ PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
fi
diff --git a/db_scripts/mosip_kernel/deploy.sh b/db_scripts/mosip_kernel/deploy.sh
index 4faa8d32a4a..fe697a12571 100644
--- a/db_scripts/mosip_kernel/deploy.sh
+++ b/db_scripts/mosip_kernel/deploy.sh
@@ -17,29 +17,29 @@ fi
## Terminate existing connections
echo "Terminating active connections"
-CONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
+CONN=$(PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
echo "Terminated connections"
## Drop db and role
echo "Dropping DB"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
echo "Dropping user"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
## Create DB
echo "Creating DB and tables"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
## Create users
echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
## Populate tables
if [ ${DML_FLAG} == 1 ]
then
echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database"
- PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
+ PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
fi
diff --git a/db_scripts/mosip_regdevice/deploy.sh b/db_scripts/mosip_regdevice/deploy.sh
index 36df1a987ce..4c5631601ef 100644
--- a/db_scripts/mosip_regdevice/deploy.sh
+++ b/db_scripts/mosip_regdevice/deploy.sh
@@ -17,31 +17,31 @@ fi
## Terminate existing connections
echo "Terminating active connections"
-CONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
+CONN=$(PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
echo "Terminated connections"
## Drop db and role
echo "Dropping DB"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_db.sql
echo "Dropping user"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f drop_role.sql
## Create users
echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\'
## Create DB
echo "Creating DB"
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql
## Grants
-PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
+PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql
## Populate tables
if [ ${DML_FLAG} == 1 ]
then
echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database"
- PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
+ PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql
fi
diff --git a/db_release_scripts/README.MD b/db_upgrade_scripts/README.MD
similarity index 100%
rename from db_release_scripts/README.MD
rename to db_upgrade_scripts/README.MD
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_rollback.sql b/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_rollback.sql
new file mode 100644
index 00000000000..80b4115a3fc
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_rollback.sql
@@ -0,0 +1,5 @@
+\c mosip_kernel
+
+REASSIGN OWNED BY postgres TO sysadmin;
+
+GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA kernel TO sysadmin;
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_upgrade.sql b/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_upgrade.sql
new file mode 100644
index 00000000000..98d6d039609
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.1.5.5_to_1.2.0.1-B1_upgrade.sql
@@ -0,0 +1,17 @@
+\c mosip_kernel
+
+REASSIGN OWNED BY sysadmin TO postgres;
+
+REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA kernel FROM kerneluser;
+
+REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA kernel FROM sysadmin;
+
+GRANT SELECT, INSERT, TRUNCATE, REFERENCES, UPDATE, DELETE ON ALL TABLES IN SCHEMA kernel TO kerneluser;
+
+GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA kernel TO postgres;
+
+
+CREATE INDEX IF NOT EXISTS idx_prid_status
+ ON kernel.prid USING btree
+ (prid_status COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_rollback.sql b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_rollback.sql
new file mode 100644
index 00000000000..381e2be11c1
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_rollback.sql
@@ -0,0 +1 @@
+\echo 'Upgrade Queries not required for transition from $CURRENT_VERSION to $UPGRADE_VERSION'
\ No newline at end of file
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_upgrade.sql b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_upgrade.sql
new file mode 100644
index 00000000000..381e2be11c1
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B1_to_1.2.0.1-B2_upgrade.sql
@@ -0,0 +1 @@
+\echo 'Upgrade Queries not required for transition from $CURRENT_VERSION to $UPGRADE_VERSION'
\ No newline at end of file
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_rollback.sql b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_rollback.sql
new file mode 100644
index 00000000000..381e2be11c1
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_rollback.sql
@@ -0,0 +1 @@
+\echo 'Upgrade Queries not required for transition from $CURRENT_VERSION to $UPGRADE_VERSION'
\ No newline at end of file
diff --git a/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_upgrade.sql b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_upgrade.sql
new file mode 100644
index 00000000000..381e2be11c1
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/sql/1.2.0.1-B2_to_1.2.0.1_upgrade.sql
@@ -0,0 +1 @@
+\echo 'Upgrade Queries not required for transition from $CURRENT_VERSION to $UPGRADE_VERSION'
\ No newline at end of file
diff --git a/db_upgrade_scripts/mosip_kernel/upgrade.properties b/db_upgrade_scripts/mosip_kernel/upgrade.properties
new file mode 100644
index 00000000000..1b337f9025d
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/upgrade.properties
@@ -0,0 +1,12 @@
+ACTION=upgrade
+MOSIP_DB_NAME=mosip_kernel
+DB_SERVERIP=
+DB_PORT=
+SU_USER=postgres
+SU_USER_PWD=
+SYS_ADMIN_USER=
+SYS_ADMIN_PWD=
+DEFAULT_DB_NAME=postgres
+DBUSER_PWD=
+CURRENT_VERSION=
+UPGRADE_VERSION=
diff --git a/db_upgrade_scripts/mosip_kernel/upgrade.sh b/db_upgrade_scripts/mosip_kernel/upgrade.sh
new file mode 100644
index 00000000000..f5fc706f8c3
--- /dev/null
+++ b/db_upgrade_scripts/mosip_kernel/upgrade.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+set -e
+properties_file="$1"
+echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file"
+if [ -f "$properties_file" ]
+then
+ echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found."
+ while IFS='=' read -r key value
+ do
+ key=$(echo $key | tr '.' '_')
+ eval ${key}=\${value}
+ done < "$properties_file"
+else
+ echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument."
+fi
+
+echo "Current version: "$CURRENT_VERSION
+echo "UPGRADE version: "$UPGRADE_VERSION
+echo "Action: "$ACTION
+
+# Terminate existing connections
+echo "Terminating active connections"
+CONN=$(PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;)
+echo "Terminated connections"
+
+# Execute upgrade or rollback
+if [ $ACTION == "upgrade" ]; then
+ echo "Upgrading database from $CURRENT_VERSION to $UPGRADE_VERSION"
+ UPGRADE_SCRIPT_FILE="sql/${CURRENT_VERSION}_to_${UPGRADE_VERSION}_upgrade.sql"
+ if [ -f "$UPGRADE_SCRIPT_FILE" ]; then
+ echo "Executing upgrade script $UPGRADE_SCRIPT_FILE"
+ PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $UPGRADE_SCRIPT_FILE
+ else
+ echo "Upgrade script not found, exiting."
+ exit 1
+ fi
+elif [ $ACTION == "rollback" ]; then
+ echo "Rolling back database for $CURRENT_VERSION to $UPGRADE_VERSION"
+ REVOKE_SCRIPT_FILE="sql/${CURRENT_VERSION}_to_${UPGRADE_VERSION}_rollback.sql"
+ if [ -f "$REVOKE_SCRIPT_FILE" ]; then
+ echo "Executing rollback script $REVOKE_SCRIPT_FILE"
+ PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $REVOKE_SCRIPT_FILE
+ else
+ echo "rollback script not found, exiting."
+ exit 1
+ fi
+else
+ echo "Unknown action: $ACTION, must be 'upgrade' or 'rollback'."
+ exit 1
+fi
diff --git a/kernel/kernel-applicanttype-api/pom.xml b/kernel/kernel-applicanttype-api/pom.xml
index 1ff28afa36b..346d06c8291 100644
--- a/kernel/kernel-applicanttype-api/pom.xml
+++ b/kernel/kernel-applicanttype-api/pom.xml
@@ -5,7 +5,7 @@
io.mosip.kernel
kernel-applicanttype-api
Mosip Applicant type API
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
11
11
@@ -17,7 +17,7 @@
io.mosip.kernel
kernel-core
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
org.mvel
diff --git a/kernel/kernel-authcodeflowproxy-api/pom.xml b/kernel/kernel-authcodeflowproxy-api/pom.xml
index d56d08bf2dd..64c7d749f1f 100644
--- a/kernel/kernel-authcodeflowproxy-api/pom.xml
+++ b/kernel/kernel-authcodeflowproxy-api/pom.xml
@@ -4,7 +4,7 @@
4.0.0
io.mosip.kernel
kernel-authcodeflowproxy-api
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
jar
kernel-authcodeflowproxy-api
Mosip commons project
@@ -220,7 +220,7 @@
1.8.12
1.4.2
1.4.2
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
0.8.5
diff --git a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/constants/Errors.java b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/constants/Errors.java
index be58c0378b2..3486aba46a1 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/constants/Errors.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/constants/Errors.java
@@ -35,7 +35,8 @@ public enum Errors {
EXCEPTION("KER-ACP-500", "Exception occured "),
ALLOWED_URL_EXCEPTION("KER-ACP-009", "url not found in allowed url's"),
STATE_NULL_EXCEPTION("KER-ACP-010", "state is null or empty"),
- STATE_NOT_UUID_EXCEPTION("KER-ACP-011", "state is not uuid");
+ STATE_NOT_UUID_EXCEPTION("KER-ACP-011", "state is not uuid"),
+ UNSUPPORTED_ENCODING_EXCEPTION("KER-ACP-012", "unsupported encoding exception :");
/**
* The error code
diff --git a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/controller/LoginController.java b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/controller/LoginController.java
index 6dfbb974cd1..972009b65bf 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/controller/LoginController.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/controller/LoginController.java
@@ -1,6 +1,8 @@
package io.mosip.kernel.authcodeflowproxy.api.controller;
import java.io.IOException;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.UUID;
@@ -19,6 +21,10 @@
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.util.UriComponentsBuilder;
+
+import com.auth0.jwt.JWT;
+import com.auth0.jwt.interfaces.DecodedJWT;
import io.mosip.kernel.authcodeflowproxy.api.constants.Errors;
import io.mosip.kernel.authcodeflowproxy.api.dto.AccessTokenResponseDTO;
@@ -29,6 +35,7 @@
import io.mosip.kernel.core.authmanager.model.AuthResponseDto;
import io.mosip.kernel.core.http.ResponseFilter;
import io.mosip.kernel.core.http.ResponseWrapper;
+import io.mosip.kernel.core.util.CryptoUtil;
import io.mosip.kernel.core.util.EmptyCheckUtils;
@RestController
@@ -127,14 +134,22 @@ public ResponseWrapper validateAdminToken(HttpServletRequest reque
responseWrapper.setResponse(mosipUserDto);
return responseWrapper;
}
-
+
@ResponseFilter
- @DeleteMapping(value = "/logout/user")
- public ResponseWrapper logoutUser(
- @CookieValue(value = "Authorization", required = false) String token, HttpServletResponse res) {
- AuthResponseDto authResponseDto = loginService.logoutUser(token);
- ResponseWrapper responseWrapper = new ResponseWrapper<>();
- responseWrapper.setResponse(authResponseDto);
- return responseWrapper;
+ @GetMapping(value = "/logout/user")
+ public void logoutUser(
+ @CookieValue(value = "Authorization", required = false) String token,@RequestParam(name = "redirecturi", required = true) String redirectURI, HttpServletResponse res) throws IOException {
+ redirectURI = new String(Base64.decodeBase64(redirectURI));
+ if(redirectURI.contains("#")) {
+ redirectURI= redirectURI.split("#")[0];
+ }
+ if(!allowedUrls.contains(redirectURI)) {
+ LOGGER.error("Url {} was not part of allowed url's",redirectURI);
+ throw new ServiceException(Errors.ALLOWED_URL_EXCEPTION.getErrorCode(), Errors.ALLOWED_URL_EXCEPTION.getErrorMessage());
+ }
+ String uri = loginService.logoutUser(token,redirectURI);
+ res.setStatus(302);
+ res.sendRedirect(uri);
}
+
}
\ No newline at end of file
diff --git a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/exception/AuthCodeProxyExceptionHandler.java b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/exception/AuthCodeProxyExceptionHandler.java
index b6777ce6978..97a5b85209b 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/exception/AuthCodeProxyExceptionHandler.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/exception/AuthCodeProxyExceptionHandler.java
@@ -11,6 +11,7 @@
import org.springframework.core.annotation.Order;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
+import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.util.ContentCachingRequestWrapper;
@@ -48,6 +49,14 @@ public ResponseEntity> servieException(
return new ResponseEntity<>(
getErrorResponse(httpServletRequest, e.getErrorCode(), e.getErrorText()), HttpStatus.OK);
}
+
+ @ExceptionHandler(AuthenticationServiceException.class)
+ public ResponseEntity> servieException(
+ HttpServletRequest httpServletRequest, final AuthenticationServiceException e) throws IOException {
+ ExceptionUtils.logRootCause(e);
+ return new ResponseEntity<>(
+ getErrorResponse(httpServletRequest,Errors.INVALID_TOKEN.getErrorCode(), e.getMessage()), HttpStatus.OK);
+ }
@ExceptionHandler(AuthRestException.class)
public ResponseEntity> authRestException(
diff --git a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/LoginService.java b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/LoginService.java
index 021ac39575c..1d739234238 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/LoginService.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/LoginService.java
@@ -6,7 +6,6 @@
import io.mosip.kernel.authcodeflowproxy.api.dto.AccessTokenResponseDTO;
import io.mosip.kernel.authcodeflowproxy.api.dto.MosipUserDto;
-import io.mosip.kernel.core.authmanager.model.AuthResponseDto;
public interface LoginService {
@@ -20,7 +19,7 @@ public interface LoginService {
AccessTokenResponseDTO loginRedirect(String state, String sessionState, String code, String stateCookie,
String redirectURI);
- AuthResponseDto logoutUser(String token);
+ String logoutUser(String token, String redirectURI);
}
diff --git a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/impl/LoginServiceImpl.java b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/impl/LoginServiceImpl.java
index 9120d2a9d10..2db2f7ad1ac 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/impl/LoginServiceImpl.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/main/java/io/mosip/kernel/authcodeflowproxy/api/service/impl/LoginServiceImpl.java
@@ -1,6 +1,9 @@
package io.mosip.kernel.authcodeflowproxy.api.service.impl;
import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -30,7 +33,6 @@
import io.mosip.kernel.authcodeflowproxy.api.constants.Constants;
import io.mosip.kernel.authcodeflowproxy.api.constants.Errors;
-import io.mosip.kernel.authcodeflowproxy.api.constants.IAMConstants;
import io.mosip.kernel.authcodeflowproxy.api.dto.AccessTokenResponse;
import io.mosip.kernel.authcodeflowproxy.api.dto.AccessTokenResponseDTO;
import io.mosip.kernel.authcodeflowproxy.api.dto.IAMErrorResponseDto;
@@ -39,7 +41,6 @@
import io.mosip.kernel.authcodeflowproxy.api.exception.ClientException;
import io.mosip.kernel.authcodeflowproxy.api.exception.ServiceException;
import io.mosip.kernel.authcodeflowproxy.api.service.LoginService;
-import io.mosip.kernel.core.authmanager.model.AuthResponseDto;
import io.mosip.kernel.core.exception.ExceptionUtils;
import io.mosip.kernel.core.exception.ServiceError;
import io.mosip.kernel.core.http.ResponseWrapper;
@@ -89,6 +90,14 @@ public class LoginServiceImpl implements LoginService {
@Value("${auth.server.admin.validate.url}")
private String validateUrl;
+
+
+ @Value("${mosip.iam.post-logout-uri-param-key:post_logout_redirect_uri}")
+ private String postLogoutRedirectURIParamKey;
+
+ @Value("${mosip.iam.end-session-endpoint-path:/protocol/openid-connect/logout}")
+ private String endSessionEndpointPath;
+
@Autowired
private RestTemplate restTemplate;
@@ -96,14 +105,6 @@ public class LoginServiceImpl implements LoginService {
@Autowired
private ObjectMapper objectMapper;
- private static final String LOG_OUT_FAILED = "log out failed";
-
- private static final String FAILED = "Failed";
-
- private static final String SUCCESS = "Success";
-
- private static final String SUCCESSFULLY_LOGGED_OUT = "successfully loggedout";
-
@Override
public String login(String redirectURI, String state) {
Map pathParam = new HashMap<>();
@@ -225,35 +226,21 @@ private IAMErrorResponseDto parseKeyClockErrorResponse(HttpStatusCodeException e
}
@Override
- public AuthResponseDto logoutUser(String token) {
+ public String logoutUser(String token,String redirectURI) {
if (EmptyCheckUtils.isNullEmpty(token)) {
throw new AuthenticationServiceException(Errors.INVALID_TOKEN.getErrorMessage());
}
- Map pathparams = new HashMap<>();
String issuer = getissuer(token);
- ResponseEntity response = null;
- AuthResponseDto authResponseDto = new AuthResponseDto();
- StringBuilder urlBuilder = new StringBuilder().append(issuer).append("/protocol/openid-connect/logout");
- UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.fromUriString(urlBuilder.toString())
- .queryParam(IAMConstants.ID_TOKEN_HINT, token);
-
+ StringBuilder urlBuilder = new StringBuilder().append(issuer).append(endSessionEndpointPath);
+ UriComponentsBuilder uriComponentsBuilder;
try {
- response = restTemplate.getForEntity(uriComponentsBuilder.buildAndExpand(pathparams).toUriString(),
- String.class);
-
- } catch (HttpClientErrorException | HttpServerErrorException e) {
- throw new ServiceException(Errors.REST_EXCEPTION.getErrorCode(),
- Errors.REST_EXCEPTION.getErrorMessage() + e.getResponseBodyAsString());
- }
-
- if (response.getStatusCode().is2xxSuccessful()) {
- authResponseDto.setMessage(SUCCESSFULLY_LOGGED_OUT);
- authResponseDto.setStatus(SUCCESS);
- } else {
- authResponseDto.setMessage(LOG_OUT_FAILED);
- authResponseDto.setStatus(FAILED);
+ uriComponentsBuilder = UriComponentsBuilder.fromUriString(urlBuilder.toString())
+ .queryParam(postLogoutRedirectURIParamKey, URLEncoder.encode(redirectURI, StandardCharsets.UTF_8.toString()));
+ } catch (UnsupportedEncodingException e) {
+ throw new ServiceException(Errors.UNSUPPORTED_ENCODING_EXCEPTION.getErrorCode(),
+ Errors.UNSUPPORTED_ENCODING_EXCEPTION.getErrorMessage() + Constants.WHITESPACE + e.getMessage());
}
- return authResponseDto;
+ return uriComponentsBuilder.build().toString();
}
public String getissuer(String token) {
diff --git a/kernel/kernel-authcodeflowproxy-api/src/test/java/io/mosip/kernel/authcodeflowproxy/api/test/controller/AuthProxyControllerTests.java b/kernel/kernel-authcodeflowproxy-api/src/test/java/io/mosip/kernel/authcodeflowproxy/api/test/controller/AuthProxyControllerTests.java
index 979cec2a27d..c5df1d8cb12 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/test/java/io/mosip/kernel/authcodeflowproxy/api/test/controller/AuthProxyControllerTests.java
+++ b/kernel/kernel-authcodeflowproxy-api/src/test/java/io/mosip/kernel/authcodeflowproxy/api/test/controller/AuthProxyControllerTests.java
@@ -42,16 +42,19 @@
import io.mosip.kernel.authcodeflowproxy.api.test.AuthProxyFlowTestBootApplication;
import io.mosip.kernel.core.exception.ServiceError;
import io.mosip.kernel.core.http.ResponseWrapper;
+import io.mosip.kernel.core.util.CryptoUtil;
@SpringBootTest(classes = { AuthProxyFlowTestBootApplication.class })
@RunWith(SpringRunner.class)
@AutoConfigureMockMvc
public class AuthProxyControllerTests {
-
@Value("${auth.server.admin.validate.url}")
private String validateUrl;
-
+
+ @Value("${mosip.iam.post-logout-uri-param-key}")
+ private String postLogoutRedirectURIParamKey;
+
@Autowired
private RestTemplate restTemplate;
@@ -60,7 +63,7 @@ public class AuthProxyControllerTests {
@Before
public void init() {
mockServer = MockRestServiceServer.createServer(restTemplate);
-
+
}
@Autowired
@@ -69,7 +72,6 @@ public void init() {
@Autowired
private ObjectMapper objectMapper;
-
@Test
public void validateTokenTest() throws Exception {
ResponseWrapper responseWrapper = new ResponseWrapper();
@@ -79,132 +81,89 @@ public void validateTokenTest() throws Exception {
mosipUserDto.setMobile("9999999999");
mosipUserDto.setRole("MOCK-ROLE");
responseWrapper.setResponse(mosipUserDto);
-
-
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI(validateUrl)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.OK)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(responseWrapper)));
+
+ mockServer.expect(ExpectedCount.once(), requestTo(new URI(validateUrl))).andExpect(method(HttpMethod.GET))
+ .andRespond(withStatus(HttpStatus.OK).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(responseWrapper)));
Cookie cookie = new Cookie("Authorization", "mock_access_token");
- mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk())
- .andExpect(jsonPath("$.response.userId", is("mock-user")));
+ mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk()).andExpect(jsonPath("$.response.userId", is("mock-user")));
}
-
+
@Test
public void validateTokenHttpClientExceptionTest() throws Exception {
ResponseWrapper responseWrapper = new ResponseWrapper();
- ServiceError serviceError = new ServiceError("KER-ATH-401", "un auth");
+ ServiceError serviceError = new ServiceError("KER-ATH-401", "un auth");
List serviceErrors = new ArrayList<>();
serviceErrors.add(serviceError);
responseWrapper.setErrors(serviceErrors);
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI(validateUrl)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.UNAUTHORIZED)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(responseWrapper)));
+ mockServer.expect(ExpectedCount.once(), requestTo(new URI(validateUrl))).andExpect(method(HttpMethod.GET))
+ .andRespond(withStatus(HttpStatus.UNAUTHORIZED).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(responseWrapper)));
Cookie cookie = new Cookie("Authorization", "mock_access_token");
- mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isUnauthorized())
- .andExpect(jsonPath("$.errors[0].errorCode", is("KER-ATH-401")));
+ mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isUnauthorized()).andExpect(jsonPath("$.errors[0].errorCode", is("KER-ATH-401")));
}
-
+
@Test
public void validateTokenInternalServerTest() throws Exception {
ResponseWrapper responseWrapper = new ResponseWrapper();
- ServiceError serviceError = new ServiceError("KER-ATH-401", "un auth");
+ ServiceError serviceError = new ServiceError("KER-ATH-401", "un auth");
List serviceErrors = new ArrayList<>();
serviceErrors.add(serviceError);
responseWrapper.setErrors(serviceErrors);
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI(validateUrl)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString("internal server error")));
+ mockServer.expect(ExpectedCount.once(), requestTo(new URI(validateUrl))).andExpect(method(HttpMethod.GET))
+ .andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString("internal server error")));
Cookie cookie = new Cookie("Authorization", "mock_access_token");
- mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk())
+ mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk())
.andExpect(jsonPath("$.errors[0].errorCode", is(Errors.REST_EXCEPTION.getErrorCode())));
}
-
+
@Test
public void validateTokenErrorResponseTest() throws Exception {
ResponseWrapper responseWrapper = new ResponseWrapper();
- List errors =new ArrayList<>();
- ServiceError error= new ServiceError("MOCKERRORCODE", "MOCKERROR");
- errors.add(error);
+ List errors = new ArrayList<>();
+ ServiceError error = new ServiceError("MOCKERRORCODE", "MOCKERROR");
+ errors.add(error);
responseWrapper.setErrors(errors);
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI(validateUrl)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.OK)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(responseWrapper)));
+ mockServer.expect(ExpectedCount.once(), requestTo(new URI(validateUrl))).andExpect(method(HttpMethod.GET))
+ .andRespond(withStatus(HttpStatus.OK).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(responseWrapper)));
Cookie cookie = new Cookie("Authorization", "mock_access_token");
- mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk())
- .andExpect(jsonPath("$.errors[0].errorCode", is("MOCKERRORCODE")));
+ mockMvc.perform(get("/authorize/admin/validateToken").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is("MOCKERRORCODE")));
}
-
+
@Test
public void logoutTest() throws Exception {
- ResponseWrapper responseWrapper = new ResponseWrapper();
- MosipUserDto mosipUserDto = new MosipUserDto();
- mosipUserDto.setUserId("mock-user");
- mosipUserDto.setMail("mock-user@mosip.io");
- mosipUserDto.setMobile("9999999999");
- mosipUserDto.setRole("MOCK-ROLE");
- responseWrapper.setResponse(mosipUserDto);
-
- String mockToken="eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJzNmYxcDYwYWVDTTBrNy1NaW9sN0Zib2FTdXlRYm95UC03S1RUTmVWLWZNIn0.eyJqdGkiOiJmYTU4Y2NjMC00ZDRiLTQ2ZjAtYjgwOC0yMWI4ZTdhNmMxNDMiLCJleHAiOjE2NDAxODc3MTksIm5iZiI6MCwiaWF0IjoxNjQwMTUxNzE5LCJpc3MiOiJodHRwczovL2Rldi5tb3NpcC5uZXQva2V5Y2xvYWsvYXV0aC9yZWFsbXMvbW9zaXAiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOWRiZTE0MDEtNTQ1NC00OTlhLTlhMWItNzVhZTY4M2Q0MjZhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwiYXV0aF90aW1lIjowLCJzZXNzaW9uX3N0YXRlIjoiY2QwYjU5NjEtOTYzMi00NmE0LWIzMzgtODc4MWEzNDVmMTZiIiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5tb3NpcC5uZXQiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbIkNSRURFTlRJQUxfUkVRVUVTVCIsIlJFU0lERU5UIiwib2ZmbGluZV9hY2Nlc3MiLCJQQVJUTkVSX0FETUlOIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJtb3NpcC1yZXNpZGVudC1jbGllbnQiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoicHJvZmlsZSBlbWFpbCIsImNsaWVudEhvc3QiOiIxMC4yNDQuNS4xNDgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImNsaWVudElkIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwicHJlZmVycmVkX3VzZXJuYW1lIjoic2VydmljZS1hY2NvdW50LW1vc2lwLXJlc2lkZW50LWNsaWVudCIsImNsaWVudEFkZHJlc3MiOiIxMC4yNDQuNS4xNDgifQ.xZq1m3mBTEvFDENKFOI59QsSl3sd_TSDNbhTAOq4x_x_4voPc4hh08gIxUdsVHfXY4T0P8DdZ1xNt8xd1VWc33Hc4b_3kK7ksGY4wwqtb0-pDLQGajCGuG6vebC1rYcjsGRbJ1Gnrj_F2RNY4Ky6Nq5SAJ1Lh_NVKNKFghAXb3YrlmqlmCB1fCltC4XBqNnF5_k4uzLCu_Wr0lt_M87X97DktaRGLOD2_HY1Ire9YPsWkoO8y7X_DRCY59yQDVgYs2nAiR6Am-c55Q0fEQ0HuB4IJHlhtMHm27dXPdOEhFhR8ZPOyeO6ZIcIm0ZTDjusrruqWy2_yO5fe3XIHkCOAw";
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("https://dev.mosip.net/keycloak/auth/realms/mosip/protocol/openid-connect/logout?id_token_hint="+mockToken)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.OK));
+ String mockToken = "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJzNmYxcDYwYWVDTTBrNy1NaW9sN0Zib2FTdXlRYm95UC03S1RUTmVWLWZNIn0.eyJqdGkiOiJmYTU4Y2NjMC00ZDRiLTQ2ZjAtYjgwOC0yMWI4ZTdhNmMxNDMiLCJleHAiOjE2NDAxODc3MTksIm5iZiI6MCwiaWF0IjoxNjQwMTUxNzE5LCJpc3MiOiJodHRwczovL2Rldi5tb3NpcC5uZXQva2V5Y2xvYWsvYXV0aC9yZWFsbXMvbW9zaXAiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOWRiZTE0MDEtNTQ1NC00OTlhLTlhMWItNzVhZTY4M2Q0MjZhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwiYXV0aF90aW1lIjowLCJzZXNzaW9uX3N0YXRlIjoiY2QwYjU5NjEtOTYzMi00NmE0LWIzMzgtODc4MWEzNDVmMTZiIiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5tb3NpcC5uZXQiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbIkNSRURFTlRJQUxfUkVRVUVTVCIsIlJFU0lERU5UIiwib2ZmbGluZV9hY2Nlc3MiLCJQQVJUTkVSX0FETUlOIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJtb3NpcC1yZXNpZGVudC1jbGllbnQiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoicHJvZmlsZSBlbWFpbCIsImNsaWVudEhvc3QiOiIxMC4yNDQuNS4xNDgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImNsaWVudElkIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwicHJlZmVycmVkX3VzZXJuYW1lIjoic2VydmljZS1hY2NvdW50LW1vc2lwLXJlc2lkZW50LWNsaWVudCIsImNsaWVudEFkZHJlc3MiOiIxMC4yNDQuNS4xNDgifQ.xZq1m3mBTEvFDENKFOI59QsSl3sd_TSDNbhTAOq4x_x_4voPc4hh08gIxUdsVHfXY4T0P8DdZ1xNt8xd1VWc33Hc4b_3kK7ksGY4wwqtb0-pDLQGajCGuG6vebC1rYcjsGRbJ1Gnrj_F2RNY4Ky6Nq5SAJ1Lh_NVKNKFghAXb3YrlmqlmCB1fCltC4XBqNnF5_k4uzLCu_Wr0lt_M87X97DktaRGLOD2_HY1Ire9YPsWkoO8y7X_DRCY59yQDVgYs2nAiR6Am-c55Q0fEQ0HuB4IJHlhtMHm27dXPdOEhFhR8ZPOyeO6ZIcIm0ZTDjusrruqWy2_yO5fe3XIHkCOAw";
Cookie cookie = new Cookie("Authorization", mockToken);
- mockMvc.perform(delete("/logout/user").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk())
- .andExpect(jsonPath("$.response.status", is("Success")));
+ mockMvc.perform(get(
+ "/logout/user?redirecturi=" + CryptoUtil.encodeToURLSafeBase64("http://localhost:5000/".getBytes()))
+ .contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().is3xxRedirection());
}
-
-
+
+ @Test
public void logoutNullTokenTest() throws Exception {
- ResponseWrapper responseWrapper = new ResponseWrapper();
- MosipUserDto mosipUserDto = new MosipUserDto();
- mosipUserDto.setUserId("mock-user");
- mosipUserDto.setMail("mock-user@mosip.io");
- mosipUserDto.setMobile("9999999999");
- mosipUserDto.setRole("MOCK-ROLE");
- responseWrapper.setResponse(mosipUserDto);
-
- String mockToken="eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJzNmYxcDYwYWVDTTBrNy1NaW9sN0Zib2FTdXlRYm95UC03S1RUTmVWLWZNIn0.eyJqdGkiOiJmYTU4Y2NjMC00ZDRiLTQ2ZjAtYjgwOC0yMWI4ZTdhNmMxNDMiLCJleHAiOjE2NDAxODc3MTksIm5iZiI6MCwiaWF0IjoxNjQwMTUxNzE5LCJpc3MiOiJodHRwczovL2Rldi5tb3NpcC5uZXQva2V5Y2xvYWsvYXV0aC9yZWFsbXMvbW9zaXAiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOWRiZTE0MDEtNTQ1NC00OTlhLTlhMWItNzVhZTY4M2Q0MjZhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwiYXV0aF90aW1lIjowLCJzZXNzaW9uX3N0YXRlIjoiY2QwYjU5NjEtOTYzMi00NmE0LWIzMzgtODc4MWEzNDVmMTZiIiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5tb3NpcC5uZXQiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbIkNSRURFTlRJQUxfUkVRVUVTVCIsIlJFU0lERU5UIiwib2ZmbGluZV9hY2Nlc3MiLCJQQVJUTkVSX0FETUlOIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJtb3NpcC1yZXNpZGVudC1jbGllbnQiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoicHJvZmlsZSBlbWFpbCIsImNsaWVudEhvc3QiOiIxMC4yNDQuNS4xNDgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImNsaWVudElkIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwicHJlZmVycmVkX3VzZXJuYW1lIjoic2VydmljZS1hY2NvdW50LW1vc2lwLXJlc2lkZW50LWNsaWVudCIsImNsaWVudEFkZHJlc3MiOiIxMC4yNDQuNS4xNDgifQ.xZq1m3mBTEvFDENKFOI59QsSl3sd_TSDNbhTAOq4x_x_4voPc4hh08gIxUdsVHfXY4T0P8DdZ1xNt8xd1VWc33Hc4b_3kK7ksGY4wwqtb0-pDLQGajCGuG6vebC1rYcjsGRbJ1Gnrj_F2RNY4Ky6Nq5SAJ1Lh_NVKNKFghAXb3YrlmqlmCB1fCltC4XBqNnF5_k4uzLCu_Wr0lt_M87X97DktaRGLOD2_HY1Ire9YPsWkoO8y7X_DRCY59yQDVgYs2nAiR6Am-c55Q0fEQ0HuB4IJHlhtMHm27dXPdOEhFhR8ZPOyeO6ZIcIm0ZTDjusrruqWy2_yO5fe3XIHkCOAw";
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("https://dev.mosip.net/keycloak/auth/realms/mosip/protocol/openid-connect/logout?id_token_hint="+mockToken)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.OK));
- mockMvc.perform(delete("/logout/user").contentType(MediaType.APPLICATION_JSON))
- .andExpect(jsonPath("$.errors[0].errorCode", is("KER-ACP-500")));;
+ mockMvc.perform(get(
+ "/logout/user?redirecturi=" + CryptoUtil.encodeToURLSafeBase64("http://localhost:5000/".getBytes()))
+ .contentType(MediaType.APPLICATION_JSON))
+ .andExpect(jsonPath("$.errors[0].errorCode", is(Errors.INVALID_TOKEN.getErrorCode())));
}
-
+
@Test
public void logoutServerErrorTokenTest() throws Exception {
- ResponseWrapper responseWrapper = new ResponseWrapper();
- MosipUserDto mosipUserDto = new MosipUserDto();
- mosipUserDto.setUserId("mock-user");
- mosipUserDto.setMail("mock-user@mosip.io");
- mosipUserDto.setMobile("9999999999");
- mosipUserDto.setRole("MOCK-ROLE");
- responseWrapper.setResponse(mosipUserDto);
-
- String mockToken="eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJzNmYxcDYwYWVDTTBrNy1NaW9sN0Zib2FTdXlRYm95UC03S1RUTmVWLWZNIn0.eyJqdGkiOiJmYTU4Y2NjMC00ZDRiLTQ2ZjAtYjgwOC0yMWI4ZTdhNmMxNDMiLCJleHAiOjE2NDAxODc3MTksIm5iZiI6MCwiaWF0IjoxNjQwMTUxNzE5LCJpc3MiOiJodHRwczovL2Rldi5tb3NpcC5uZXQva2V5Y2xvYWsvYXV0aC9yZWFsbXMvbW9zaXAiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOWRiZTE0MDEtNTQ1NC00OTlhLTlhMWItNzVhZTY4M2Q0MjZhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwiYXV0aF90aW1lIjowLCJzZXNzaW9uX3N0YXRlIjoiY2QwYjU5NjEtOTYzMi00NmE0LWIzMzgtODc4MWEzNDVmMTZiIiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5tb3NpcC5uZXQiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbIkNSRURFTlRJQUxfUkVRVUVTVCIsIlJFU0lERU5UIiwib2ZmbGluZV9hY2Nlc3MiLCJQQVJUTkVSX0FETUlOIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJtb3NpcC1yZXNpZGVudC1jbGllbnQiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoicHJvZmlsZSBlbWFpbCIsImNsaWVudEhvc3QiOiIxMC4yNDQuNS4xNDgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImNsaWVudElkIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwicHJlZmVycmVkX3VzZXJuYW1lIjoic2VydmljZS1hY2NvdW50LW1vc2lwLXJlc2lkZW50LWNsaWVudCIsImNsaWVudEFkZHJlc3MiOiIxMC4yNDQuNS4xNDgifQ.xZq1m3mBTEvFDENKFOI59QsSl3sd_TSDNbhTAOq4x_x_4voPc4hh08gIxUdsVHfXY4T0P8DdZ1xNt8xd1VWc33Hc4b_3kK7ksGY4wwqtb0-pDLQGajCGuG6vebC1rYcjsGRbJ1Gnrj_F2RNY4Ky6Nq5SAJ1Lh_NVKNKFghAXb3YrlmqlmCB1fCltC4XBqNnF5_k4uzLCu_Wr0lt_M87X97DktaRGLOD2_HY1Ire9YPsWkoO8y7X_DRCY59yQDVgYs2nAiR6Am-c55Q0fEQ0HuB4IJHlhtMHm27dXPdOEhFhR8ZPOyeO6ZIcIm0ZTDjusrruqWy2_yO5fe3XIHkCOAw";
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("https://dev.mosip.net/keycloak/auth/realms/mosip/protocol/openid-connect/logout?id_token_hint="+mockToken)))
- .andExpect(method(HttpMethod.GET))
- .andRespond(withStatus(HttpStatus.BAD_REQUEST));
+
+ String mockToken = "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJzNmYxcDYwYWVDTTBrNy1NaW9sN0Zib2FTdXlRYm95UC03S1RUTmVWLWZNIn0.eyJqdGkiOiJmYTU4Y2NjMC00ZDRiLTQ2ZjAtYjgwOC0yMWI4ZTdhNmMxNDMiLCJleHAiOjE2NDAxODc3MTksIm5iZiI6MCwiaWF0IjoxNjQwMTUxNzE5LCJpc3MiOiJodHRwczovL2Rldi5tb3NpcC5uZXQva2V5Y2xvYWsvYXV0aC9yZWFsbXMvbW9zaXAiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOWRiZTE0MDEtNTQ1NC00OTlhLTlhMWItNzVhZTY4M2Q0MjZhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwiYXV0aF90aW1lIjowLCJzZXNzaW9uX3N0YXRlIjoiY2QwYjU5NjEtOTYzMi00NmE0LWIzMzgtODc4MWEzNDVmMTZiIiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5tb3NpcC5uZXQiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbIkNSRURFTlRJQUxfUkVRVUVTVCIsIlJFU0lERU5UIiwib2ZmbGluZV9hY2Nlc3MiLCJQQVJUTkVSX0FETUlOIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJtb3NpcC1yZXNpZGVudC1jbGllbnQiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoicHJvZmlsZSBlbWFpbCIsImNsaWVudEhvc3QiOiIxMC4yNDQuNS4xNDgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImNsaWVudElkIjoibW9zaXAtcmVzaWRlbnQtY2xpZW50IiwicHJlZmVycmVkX3VzZXJuYW1lIjoic2VydmljZS1hY2NvdW50LW1vc2lwLXJlc2lkZW50LWNsaWVudCIsImNsaWVudEFkZHJlc3MiOiIxMC4yNDQuNS4xNDgifQ.xZq1m3mBTEvFDENKFOI59QsSl3sd_TSDNbhTAOq4x_x_4voPc4hh08gIxUdsVHfXY4T0P8DdZ1xNt8xd1VWc33Hc4b_3kK7ksGY4wwqtb0-pDLQGajCGuG6vebC1rYcjsGRbJ1Gnrj_F2RNY4Ky6Nq5SAJ1Lh_NVKNKFghAXb3YrlmqlmCB1fCltC4XBqNnF5_k4uzLCu_Wr0lt_M87X97DktaRGLOD2_HY1Ire9YPsWkoO8y7X_DRCY59yQDVgYs2nAiR6Am-c55Q0fEQ0HuB4IJHlhtMHm27dXPdOEhFhR8ZPOyeO6ZIcIm0ZTDjusrruqWy2_yO5fe3XIHkCOAw";
Cookie cookie = new Cookie("Authorization", mockToken);
- mockMvc.perform(delete("/logout/user").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk())
- .andExpect(jsonPath("$.errors[0].errorCode", isA(String.class)));
+ mockMvc.perform(get(
+ "/logout/user?redirecturi=" + CryptoUtil.encodeToURLSafeBase64("http://localhost:2000/".getBytes())).contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is(Errors.ALLOWED_URL_EXCEPTION.getErrorCode())));
}
-
+
@Test
public void loginTest() throws Exception {
@@ -212,94 +171,112 @@ public void loginTest() throws Exception {
Cookie cookie = new Cookie("state", UUID.randomUUID().toString());
mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().is3xxRedirection());
}
-
+
+
@Test
- public void logoutRedirectTest() throws Exception {
+ public void loginRedirectTest() throws Exception {
AccessTokenResponse accessTokenResponse = new AccessTokenResponse();
accessTokenResponse.setAccess_token("mock-access-token");
accessTokenResponse.setExpires_in("111");
-
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
- .andExpect(method(HttpMethod.POST))
- .andRespond(withStatus(HttpStatus.OK)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(accessTokenResponse)));
+
+ mockServer
+ .expect(ExpectedCount.once(),
+ requestTo(new URI(
+ "http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
+ .andExpect(method(HttpMethod.POST))
+ .andRespond(withStatus(HttpStatus.OK).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(accessTokenResponse)));
Cookie cookie = new Cookie("state", "mockstate");
- mockMvc.perform(get("/login-redirect/aHR0cDovL2xvY2FsaG9zdDo1MDAwLw==?state=mockstate&session_state=mock-session-state&code=mockcode").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().is3xxRedirection());
+ mockMvc.perform(get(
+ "/login-redirect/aHR0cDovL2xvY2FsaG9zdDo1MDAwLw==?state=mockstate&session_state=mock-session-state&code=mockcode")
+ .contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().is3xxRedirection());
}
-
-
+
@Test
- public void logoutRedirectTestWithHash() throws Exception {
+ public void loginRedirectTestWithHash() throws Exception {
AccessTokenResponse accessTokenResponse = new AccessTokenResponse();
accessTokenResponse.setAccess_token("mock-access-token");
accessTokenResponse.setExpires_in("111");
-
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
- .andExpect(method(HttpMethod.POST))
- .andRespond(withStatus(HttpStatus.OK)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(accessTokenResponse)));
+
+ mockServer
+ .expect(ExpectedCount.once(),
+ requestTo(new URI(
+ "http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
+ .andExpect(method(HttpMethod.POST))
+ .andRespond(withStatus(HttpStatus.OK).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(accessTokenResponse)));
Cookie cookie = new Cookie("state", "mockstate");
- mockMvc.perform(get("/login-redirect/aHR0cDovL2xvY2FsaG9zdDo1MDAwLyMvcmFuZG9tcGF0bS9yYW5kb21wYXRo?state=mockstate&session_state=mock-session-state&code=mockcode").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().is3xxRedirection());
+ mockMvc.perform(get(
+ "/login-redirect/aHR0cDovL2xvY2FsaG9zdDo1MDAwLyMvcmFuZG9tcGF0bS9yYW5kb21wYXRo?state=mockstate&session_state=mock-session-state&code=mockcode")
+ .contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().is3xxRedirection());
}
-
+
@Test
- public void logoutServerExceptionRedirectTest() throws Exception {
+ public void loginServerExceptionRedirectTest() throws Exception {
IAMErrorResponseDto errorResponseDto = new IAMErrorResponseDto();
errorResponseDto.setError("seerver error");
errorResponseDto.setError_description("sending mock error");
-
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
- .andExpect(method(HttpMethod.POST))
- .andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(errorResponseDto)));
+
+ mockServer
+ .expect(ExpectedCount.once(),
+ requestTo(new URI(
+ "http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
+ .andExpect(method(HttpMethod.POST))
+ .andRespond(withStatus(HttpStatus.INTERNAL_SERVER_ERROR).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(errorResponseDto)));
Cookie cookie = new Cookie("state", "mockstate");
- mockMvc.perform(get("/login-redirect/abc?state=mockstate&session_state=mock-session-state&code=mockcode").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().is2xxSuccessful()).andExpect(jsonPath("$.errors[0].message", isA(String.class)));
+ mockMvc.perform(get("/login-redirect/abc?state=mockstate&session_state=mock-session-state&code=mockcode")
+ .contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().is2xxSuccessful())
+ .andExpect(jsonPath("$.errors[0].message", isA(String.class)));
}
-
+
@Test
public void loginUUIDEmptyTest() throws Exception {
- //http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
+ // http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
Cookie cookie = new Cookie("state", "");
- mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NULL_EXCEPTION.getErrorCode())));
+ mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NULL_EXCEPTION.getErrorCode())));
}
-
+
@Test
public void loginUUIDNullTest() throws Exception {
- //http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
- mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON)).andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NULL_EXCEPTION.getErrorCode())));
+ // http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
+ mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
+ .andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NULL_EXCEPTION.getErrorCode())));
}
-
+
@Test
public void loginInvalidUUIDTest() throws Exception {
- //http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
+ // http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/auth?client_id=mosip-admin-client&redirect_uri=http://localhost:8082/v1/admin/login-redirect/abc&state=mock-state&response_type=code&scope=cls
Cookie cookie = new Cookie("state", "abc/nabc");
- mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NOT_UUID_EXCEPTION.getErrorCode())));
+ mockMvc.perform(get("/login/abc").contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.errors[0].errorCode", is(Errors.STATE_NOT_UUID_EXCEPTION.getErrorCode())));
}
-
-
-
-
+
@Test
public void logoutRedirectHostCheckTest() throws Exception {
AccessTokenResponse accessTokenResponse = new AccessTokenResponse();
accessTokenResponse.setAccess_token("mock-access-token");
accessTokenResponse.setExpires_in("111");
-
- mockServer.expect(ExpectedCount.once(),
- requestTo(new URI("http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
- .andExpect(method(HttpMethod.POST))
- .andRespond(withStatus(HttpStatus.OK)
- .contentType(MediaType.APPLICATION_JSON)
- .body(objectMapper.writeValueAsString(accessTokenResponse)));
+
+ mockServer
+ .expect(ExpectedCount.once(),
+ requestTo(new URI(
+ "http://localhost:8080/keycloak/auth/realms/mosip/protocol/openid-connect/token")))
+ .andExpect(method(HttpMethod.POST))
+ .andRespond(withStatus(HttpStatus.OK).contentType(MediaType.APPLICATION_JSON)
+ .body(objectMapper.writeValueAsString(accessTokenResponse)));
Cookie cookie = new Cookie("state", "mockstate");
- mockMvc.perform(get("/login-redirect/aHR0cDovL2FiOjUwMDAv?state=mockstate&session_state=mock-session-state&code=mockcode").contentType(MediaType.APPLICATION_JSON).cookie(cookie)).andExpect(status().isOk()).andExpect(jsonPath("$.errors[0].errorCode", is(Errors.ALLOWED_URL_EXCEPTION.getErrorCode())));;
+ mockMvc.perform(get(
+ "/login-redirect/aHR0cDovL2FiOjUwMDAv?state=mockstate&session_state=mock-session-state&code=mockcode")
+ .contentType(MediaType.APPLICATION_JSON).cookie(cookie))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.errors[0].errorCode", is(Errors.ALLOWED_URL_EXCEPTION.getErrorCode())));
+ ;
}
-
}
diff --git a/kernel/kernel-authcodeflowproxy-api/src/test/resources/application-test.properties b/kernel/kernel-authcodeflowproxy-api/src/test/resources/application-test.properties
index 9ef680b7db7..ccdc42f4d1e 100644
--- a/kernel/kernel-authcodeflowproxy-api/src/test/resources/application-test.properties
+++ b/kernel/kernel-authcodeflowproxy-api/src/test/resources/application-test.properties
@@ -39,4 +39,6 @@ mosip.iam.base-url=http://localhost:8080/keycloak
mosip.iam.authorization_endpoint=${mosip.iam.base-url}/auth/realms/{realmId}/protocol/openid-connect/auth
mosip.iam.token_endpoint=${mosip.iam.base-url}/auth/realms/{realmId}/protocol/openid-connect/token
auth.allowed.urls=http://localhost:5000/
+mosip.iam.post-logout-uri-param-key=post_logout_redirect_uri
+mosip.iam.end-session-endpoint-path=/protocol/openid-connect/logout
diff --git a/kernel/kernel-bioapi-provider/pom.xml b/kernel/kernel-bioapi-provider/pom.xml
index f86532b5905..0867bc03a2f 100644
--- a/kernel/kernel-bioapi-provider/pom.xml
+++ b/kernel/kernel-bioapi-provider/pom.xml
@@ -5,7 +5,7 @@
io.mosip.kernel
kernel-bioapi-provider
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
kernel-bioapi-provider
bioapi provider
https://github.com/mosip/commons
@@ -221,8 +221,8 @@
1.8.12
1.4.2
1.4.2
- 1.2.0.1-SNAPSHOT
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
+ 1.2.0.1
0.8.5
diff --git a/kernel/kernel-biometrics-api/README.md b/kernel/kernel-biometrics-api/README.md
index 5151c746b74..680a9abf93e 100644
--- a/kernel/kernel-biometrics-api/README.md
+++ b/kernel/kernel-biometrics-api/README.md
@@ -1,5 +1,7 @@
# Kernel Biometrics Api
+[MOVED TO mosip/bio-utils]
+
## Overview
This library provides core funtions related to biometrics.
diff --git a/kernel/kernel-biometrics-api/pom.xml b/kernel/kernel-biometrics-api/pom.xml
index 9529bd4a362..5e4b4078095 100644
--- a/kernel/kernel-biometrics-api/pom.xml
+++ b/kernel/kernel-biometrics-api/pom.xml
@@ -4,7 +4,7 @@
4.0.0
io.mosip.kernel
kernel-biometrics-api
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
kernel-biometrics-api
biometrics api definitions
https://github.com/mosip/commons
@@ -220,7 +220,7 @@
1.8.12
1.4.2
1.4.2
- 1.2.0.1-SNAPSHOT
+ 1.2.0.1
0.8.5
diff --git a/kernel/kernel-biosdk-provider/.gitignore b/kernel/kernel-biosdk-provider/.gitignore
deleted file mode 100644
index 579a0cbf9b9..00000000000
--- a/kernel/kernel-biosdk-provider/.gitignore
+++ /dev/null
@@ -1,31 +0,0 @@
-*.class
-.mtj.tmp/
-*.war
-*.ear
-hs_err_pid*
-target/
-.springBeans
-.metadata
-.factorypath
-.classpath
-.project
-.settings/
-.vertx/
-bin/
-tmp/
-logs/
-*.tmp
-*.bak
-*.swp
-*~.nib
-local.properties
-.loadpath
-.DS_Store
-test.txt
-.idea/
-.settings/
-.sonarlint/
-.recommenders/
-/.recommenders/
-**/*.iml
-.vscode
diff --git a/kernel/kernel-biosdk-provider/README.md b/kernel/kernel-biosdk-provider/README.md
deleted file mode 100644
index f05d123b031..00000000000
--- a/kernel/kernel-biosdk-provider/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# Kernel Biosdk Provider
-
-## Overview
-This library provide factory funtion to get different bio providers for different sdks.
-
-## Technical features
-- Provides functions related to biometric modalities such as verify, template extraction, quality calculation.
diff --git a/kernel/kernel-biosdk-provider/pom.xml b/kernel/kernel-biosdk-provider/pom.xml
deleted file mode 100644
index 52a68a33d4c..00000000000
--- a/kernel/kernel-biosdk-provider/pom.xml
+++ /dev/null
@@ -1,364 +0,0 @@
-
- 4.0.0
-
- kernel-biosdk-provider
- io.mosip.kernel
- 1.2.0.1-SNAPSHOT
- kernel-biosdk-provider
- Implementation of biosdk provider
- https://github.com/mosip/commons
-
- jar
-
-
- UTF-8
-
-
- 11
- 11
- 3.8.0
-
-
- 3.0.2
- 3.1.0
-
-
- 3.2.0
- 2.3
-
-
- 2.0.2.RELEASE
- 2.0.7.RELEASE
- 5.0.5.RELEASE
- 2.0.4.RELEASE
-
-
- 2.0.7
- 1.5.21
- 2.9.2
-
-
- 3.6.2
- 3.7.0
-
-
-
- 1.2
- 3.0.0
- 1.3
- 2.2
- 2.0.1.Final
- 2.2.6
-
-
- 1.4.197
- 5.1.46
- 42.2.2
- 2.5.0
- 5.2.17.Final
- 6.0.12.Final
-
-
- 4.12
- 2.23.4
- 1.7.4
- 2.0.7
-
-
- 1.2.3
- 1.7.19
- 1.7.7
- 1.7.25
-
-
- 2.9.5
- 2.9.8
- 2.9.6
- 20180130
- 2.2.10
- 20180813
- 1.1.1
-
-
- 3.6.1
- 3.7
- 2.6
- 1.11
- 4.3
- 1.9.2
- 2.2
- 4.5.6
- 19.0
- 1.18.8
- 0.1.54
- 1.4.0
- 7.1.0
- 2.0.0
- 5.5.13
- 2.3.23
- 1.7
- 2.0
- 1.5.2
- 2.1.1
- 1.66
- 63.1
- 1.0.0
- 3.3.3
- 3.1.0
- 4.1.0-incubating
- 1.11.368
- 0.2.4
- 2.3.0
- 3.0.1
- 1.9.12
- 0.6.0
- 2.0.0.AM2
- 1.8.12
- 1.4.2
- 1.4.2
- 1.2.0.1-SNAPSHOT
- 1.2.0.1-SNAPSHOT
- 1.2.0.1-SNAPSHOT
- 0.8.5
-
-
-
-
- org.springframework.boot
- spring-boot-starter
- ${spring.boot.version}
-
-
- org.springframework.boot
- spring-boot-starter-web
- ${spring.boot.version}
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
-
- org.springframework.boot
- spring-boot-starter-test
- ${spring.boot.version}
-
-
- io.mosip.kernel
- kernel-biometrics-api
- ${kernel.biometricsdk.version}
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
-
- io.mosip.kernel
- kernel-core
- ${kernel.core.version}
-
-
-
- io.mosip.kernel
- kernel-logger-logback
- ${kernel.logger.logback.version}
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
-
-
- org.projectlombok
- lombok
- ${lombok.version}
- compile
-
-
-
-
-
- ossrh
- https://oss.sonatype.org/content/repositories/snapshots
-
-
- ossrh
- https://oss.sonatype.org/service/local/staging/deploy/maven2/
-
-
-
-
-
- maven-deploy-plugin
- 2.8.1
-
-
- default-deploy
- deploy
-
- deploy
-
-
-
-
-
- org.sonatype.plugins
- nexus-staging-maven-plugin
- 1.6.7
- true
-
-
- default-deploy
- deploy
-
- deploy
-
-
-
-
- ossrh
- https://oss.sonatype.org/
- false
-
-
-
-
- org.apache.maven.plugins
- maven-source-plugin
- true
- 2.2.1
-
-
- attach-sources
-
- jar-no-fork
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-javadoc-plugin
- 3.2.0
-
-
- attach-javadocs
-
- jar
-
-
-
-
- none
-
-
-
- org.apache.maven.plugins
- maven-gpg-plugin
- 1.5
-
-
- sign-artifacts
- verify
-
- sign
-
-
-
- --pinentry-mode
- loopback
-
-
-
-
-
-
- pl.project13.maven
- git-commit-id-plugin
- 3.0.1
-
-
- get-the-git-infos
-
- revision
-
- validate
-
-
-
- true
- ${project.build.outputDirectory}/git.properties
-
- ^git.build.(time|version)$
- ^git.commit.id.(abbrev|full)$
-
- full
- ${project.basedir}/.git
-
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 2.22.0
-
- false
- false
-
- ${argLine} --add-opens
- java.xml/jdk.xml.internal=ALL-UNNAMED
- --illegal-access=permit
-
-
-
-
- org.jacoco
- jacoco-maven-plugin
- ${jacoco.maven.plugin.version}
-
-
-
- prepare-agent
-
-
-
- report
- prepare-package
-
- report
-
-
-
-
-
-
-
- scm:git:git://github.com/mosip/commons.git
- scm:git:ssh://github.com:mosip/commons.git
- https://github.com/mosip/commons
- HEAD
-
-
-
- MPL 2.0
- https://www.mozilla.org/en-US/MPL/2.0/
-
-
-
-
- Mosip
- mosip.emailnotifier@gmail.com
- io.mosip
- https://github.com/mosip/commons
-
-
-
-
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/factory/BioAPIFactory.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/factory/BioAPIFactory.java
deleted file mode 100644
index 16dc676a3b2..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/factory/BioAPIFactory.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package io.mosip.kernel.biosdk.provider.factory;
-
-import java.util.*;
-
-import javax.annotation.PostConstruct;
-
-import lombok.Getter;
-import lombok.Setter;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Component;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.BioSDKProviderLoggerFactory;
-import io.mosip.kernel.biosdk.provider.util.ErrorCode;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.logger.spi.Logger;
-
-@ConfigurationProperties(prefix = "mosip.biometric.sdk.providers")
-@Component
-public class BioAPIFactory {
-
- private static final Logger LOGGER = BioSDKProviderLoggerFactory.getLogger(BioAPIFactory.class);
-
- @Getter
- @Setter
- private Map> finger;
-
- @Getter
- @Setter
- private Map> iris;
-
- @Getter
- @Setter
- private Map> face;
-
- @Autowired
- private List providerApis;
-
- private Map> providerRegistry = new HashMap<>();
-
- /**
- *
- * @throws BiometricException
- */
- @PostConstruct
- public void initializeBioAPIProviders() throws BiometricException {
- if(providerApis == null || providerApis.isEmpty()) {
- throw new BiometricException(ErrorCode.NO_PROVIDERS.getErrorCode(), ErrorCode.NO_PROVIDERS.getErrorMessage());
- }
-
- List vendorIds = new ArrayList<>();
- vendorIds.addAll(this.finger == null ? Collections.EMPTY_LIST : this.finger.keySet());
- vendorIds.addAll(this.iris == null ? Collections.EMPTY_LIST : this.iris.keySet());
- vendorIds.addAll(this.face == null ? Collections.EMPTY_LIST : this.face.keySet());
-
- for (String vendorId : new HashSet<>(vendorIds)) {
-
- if(isProviderRegistryFilled()) {
- LOGGER.info("Provider registry is already filled : {}", providerRegistry.keySet());
- break;
- }
-
- Map> params = new HashMap<>();
- params.put(BiometricType.FINGER, getFingerEntry(vendorId));
- params.put(BiometricType.IRIS, getIrisEntry(vendorId));
- params.put(BiometricType.FACE, getFaceEntry(vendorId));
-
- LOGGER.info("Starting initialization for vendor {} with params >> {}", vendorId, params);
-
- if(params.isEmpty())
- throw new BiometricException(ErrorCode.NO_SDK_CONFIG.getErrorCode(), ErrorCode.NO_SDK_CONFIG.getErrorMessage());
-
- //pass params per modality to each provider, each providers will initialize supported SDK's
- for(iBioProviderApi provider : providerApis) {
- try {
- Map> supportedModalities = provider.init(params);
- if(supportedModalities != null && !supportedModalities.isEmpty()) {
- supportedModalities.forEach((modality, functions) -> {
- functions.forEach(function -> {
- addToRegistry(modality, function, provider);
- });
- });
- }
- } catch (BiometricException ex) {
- LOGGER.error("Failed to initialize SDK instance", ex);
- }
- }
- }
-
- if(!isProviderRegistryFilled())
- throw new BiometricException(ErrorCode.SDK_REGISTRY_EMPTY.getErrorCode(),
- ErrorCode.SDK_REGISTRY_EMPTY.getErrorMessage());
- }
-
- /**
- * Returns BioAPIProvider for provided modality and Function
- * @param modality
- * @param biometricFunction
- * @return
- * @throws BiometricException
- */
- public iBioProviderApi getBioProvider(BiometricType modality, BiometricFunction biometricFunction) throws BiometricException {
- if(providerRegistry.get(modality) != null && providerRegistry.get(modality).get(biometricFunction) != null)
- return providerRegistry.get(modality).get(biometricFunction);
-
- throw new BiometricException(ErrorCode.NO_PROVIDERS.getErrorCode(), ErrorCode.NO_PROVIDERS.getErrorMessage());
- }
-
- private void addToRegistry(BiometricType modality, BiometricFunction function, iBioProviderApi provider) {
- if(providerRegistry.get(modality) == null)
- providerRegistry.put(modality, new HashMap<>());
-
- providerRegistry.get(modality).put(function, provider);
- }
-
- private boolean isProviderRegistryFilled() {
- if(isModalityConfigured(BiometricType.FINGER) && !providerRegistry.containsKey(BiometricType.FINGER))
- return false;
-
- if(isModalityConfigured(BiometricType.IRIS) && !providerRegistry.containsKey(BiometricType.IRIS))
- return false;
-
- if(isModalityConfigured(BiometricType.FACE) && !providerRegistry.containsKey(BiometricType.FACE))
- return false;
-
- return true;
- }
-
- private boolean isModalityConfigured(BiometricType modality) {
- switch (modality) {
- case FINGER: return this.finger != null && !this.finger.isEmpty();
- case IRIS: return this.iris != null && !this.iris.isEmpty();
- case FACE: return this.face != null && !this.face.isEmpty();
- }
- return false;
- }
-
- private Map getFingerEntry(String key) {
- return this.finger == null ? Collections.EMPTY_MAP : this.finger.getOrDefault(key, Collections.EMPTY_MAP);
- }
- private Map getIrisEntry(String key) {
- return this.iris == null ? Collections.EMPTY_MAP : this.iris.getOrDefault(key, Collections.EMPTY_MAP);
- }
- private Map getFaceEntry(String key) {
- return this.face == null ? Collections.EMPTY_MAP : this.face.getOrDefault(key, Collections.EMPTY_MAP);
- }
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_7.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_7.java
deleted file mode 100644
index 946d6b799a8..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_7.java
+++ /dev/null
@@ -1,321 +0,0 @@
-package io.mosip.kernel.biosdk.provider.impl;
-
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.OptionalDouble;
-import java.util.stream.Collectors;
-import java.util.stream.LongStream;
-
-import org.springframework.data.util.ReflectionUtils;
-import org.springframework.stereotype.Component;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.BioProviderUtil;
-import io.mosip.kernel.biosdk.provider.util.BioSDKProviderLoggerFactory;
-import io.mosip.kernel.biosdk.provider.util.ProviderConstants;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.bioapi.model.CompositeScore;
-import io.mosip.kernel.core.bioapi.model.KeyValuePair;
-import io.mosip.kernel.core.bioapi.model.QualityScore;
-import io.mosip.kernel.core.bioapi.model.Score;
-import io.mosip.kernel.core.exception.ExceptionUtils;
-import io.mosip.kernel.core.logger.spi.Logger;
-
-@Component
-public class BioProviderImpl_V_0_7 implements iBioProviderApi {
-
- private static final Logger LOGGER = BioSDKProviderLoggerFactory.getLogger(BioProviderImpl_V_0_7.class);
-
- private static final String METHOD_NAME_KEY = "_METHOD_NAME";
- private static final String THRESHOLD_KEY = "_THRESHOLD";
- private static final String API_VERSION = "0.7";
-
- private Map sdkRegistry = new HashMap<>();
- private Map thresholds = new HashMap<>();
-
-
- @Override
- public Map> init(Map> params)
- throws BiometricException {
- for(BiometricType modality : params.keySet()) {
- Map modalityParams = params.get(modality);
-
- //check if version matches supported API version of this provider
- if(modalityParams != null && !modalityParams.isEmpty()
- && API_VERSION.equals(modalityParams.get(ProviderConstants.VERSION))) {
- Object instance = BioProviderUtil.getSDKInstance(modalityParams);
- addToRegistry(instance, modality);
- thresholds.put(modality, modalityParams.getOrDefault(ProviderConstants.THRESHOLD, "60"));
- }
- }
- return getSupportedModalities();
- }
-
- /*
- * compositeMatch --> is intended to be used for match on multiple modalities
- * NOte: compositeMatch should not be used on multiple segments of same modality
- */
- @Override
- public boolean verify(List sample, List record, BiometricType modality, Map flags) {
- LOGGER.info(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE, "verify invoked",
- "modality >>> " + modality);
-
- if(Objects.isNull(flags)) { flags = new HashMap<>(); }
-
- String methodName = flags.getOrDefault(METHOD_NAME_KEY, "match");
- String threshold = flags.getOrDefault(THRESHOLD_KEY, thresholds.getOrDefault(modality, "60"));
-
- sample = sample.stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
-
- record = record.stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
-
- switch (methodName) {
- case "match":
- return getSDKMatchResult(sample, record.toArray(new BIR[record.size()]), modality, flags, threshold);
-
- case "compositeMatch":
- return getSDKCompositeMatchResult(sample, record.toArray(new BIR[record.size()]), modality, flags, threshold);
- }
-
- return false;
- }
-
- @Override
- public Map identify(List sample, Map> gallery, BiometricType modality,
- Map flags) {
- LOGGER.info(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE, "identify invoked",
- "modality >>> " + modality);
-
- if(Objects.isNull(flags)) { flags = new HashMap<>(); }
-
- String methodName = flags.getOrDefault(METHOD_NAME_KEY, "compositeMatch");
- String threshold = flags.getOrDefault(THRESHOLD_KEY, thresholds.getOrDefault(modality, "60"));
-
- sample = sample.stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
-
- Map result = new HashMap<>();
- for(Entry> entry : gallery.entrySet()) {
-
- if(Objects.nonNull(entry.getValue())) {
-
- List record = entry.getValue().stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
-
- switch (methodName) {
- case "match":
- result.put(entry.getKey(), getSDKMatchResult(sample, record.toArray(new BIR[record.size()]),
- modality, flags, threshold));
- break;
-
- case "compositeMatch":
- result.put(entry.getKey(), getSDKCompositeMatchResult(sample, record.toArray(new BIR[record.size()]),
- modality, flags, threshold));
- break;
- }
- }
- }
- return result;
- }
-
- //QualityScore checkQuality(BIR sample, KeyValuePair[] flags)
- @Override
- public float[] getSegmentQuality(BIR[] sample, Map flags) {
- float[] scores = new float[sample.length];
- for(int i =0; i< sample.length; i++) {
- BiometricType modality = BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value());
- Method method = ReflectionUtils.findRequiredMethod(this.sdkRegistry.get(modality).getClass(),
- "checkQuality", BIR.class, KeyValuePair[].class);
- method.setAccessible(true);
-
- if(Objects.nonNull(method)) {
- try {
- Object response = method.invoke(this.sdkRegistry.get(modality), sample[i], getKeyValuePairs(flags));
- if(Objects.nonNull(response)) {
- QualityScore qualityScore = (QualityScore) response;
- scores[i] = qualityScore.getInternalScore();
- }
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
- LOGGER.error(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE,
- "getSegmentQuality invoked", ExceptionUtils.getStackTrace(e));
- }
- }
- else
- scores[i] = 0;
- }
-
- return scores;
- }
-
- //QualityScore checkQuality(BIR sample, KeyValuePair[] flags)
- @Override
- public Map getModalityQuality(BIR[] sample, Map flags) {
- Map result = new HashMap<>();
- for(BIR bir : sample) {
- BiometricType modality = BiometricType.fromValue(bir.getBdbInfo().getType().get(0).value());
- Method method = ReflectionUtils.findRequiredMethod(this.sdkRegistry.get(modality).getClass(),
- "checkQuality", BIR.class, KeyValuePair[].class);
- method.setAccessible(true);
-
- if(Objects.nonNull(method)) {
- try {
- Object response = method.invoke(this.sdkRegistry.get(modality), bir, getKeyValuePairs(flags));
- if(Objects.nonNull(response)) {
- QualityScore qualityScore = (QualityScore) response;
- result.computeIfAbsent(modality, k -> LongStream.builder()).add(qualityScore.getInternalScore());
- }
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
- LOGGER.error(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE,
- "getModalityQuality invoked", ExceptionUtils.getStackTrace(e));
- }
- }
- }
-
- Map finalResult = new HashMap();
- result.forEach((k , v) -> {
- OptionalDouble avg = v.build().average();
- if(avg.isPresent())
- finalResult.put(k, (float) avg.getAsDouble());
- });
- return finalResult;
- }
-
- //BIR extractTemplate(BIR paramBIR, KeyValuePair[] paramArrayOfKeyValuePair)
- @Override
- public List extractTemplate(List sample, Map flags) {
- List extracts = new ArrayList<>();
- for(BIR bir : sample) {
- BiometricType modality = BiometricType.fromValue(bir.getBdbInfo().getType().get(0).value());
- Method method = ReflectionUtils.findRequiredMethod(this.sdkRegistry.get(modality).getClass(),
- "extractTemplate", BIR.class, KeyValuePair[].class);
- method.setAccessible(true);
-
- if(Objects.nonNull(method)) {
- try {
- Object response = method.invoke(this.sdkRegistry.get(modality), bir, getKeyValuePairs(flags));
- extracts.add(Objects.nonNull(response) ? (BIR) response : null);
-
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
- LOGGER.error(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE,
- "extractTemplate invoked", ExceptionUtils.getStackTrace(e));
- }
- }
- }
- return extracts;
- }
-
- //Score[] match(BIR sample, BIR[] gallery, KeyValuePair[] flags)
- private boolean getSDKMatchResult(List sample, BIR[] record, BiometricType modality, Map flags,
- String threshold) {
-
- Method method = ReflectionUtils.findRequiredMethod(this.sdkRegistry.get(modality).getClass(), "match",
- BIR.class, BIR[].class, KeyValuePair[].class);
- method.setAccessible(true);
-
- boolean isMatched = false;
- //TODO check for duplicate segment in sample. will SDK handle it or should this be handled in provider ?
-
- if(Objects.nonNull(method)) {
- LOGGER.debug(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE, "verify invoked", "Match method found");
-
- LongStream.Builder scaleScores = LongStream.builder();
- for(int i=0;i result = Arrays.stream(scores)
- .max((s1, s2) -> (int) (s1.getScaleScore() - s2.getScaleScore()));
- scaleScores.add(result.isPresent() ? (long) result.get().getScaleScore() : 0L);
- }
-
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
- LOGGER.error(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE,
- "getSDKMatchResult invoked", ExceptionUtils.getStackTrace(e));
- }
- }
- OptionalDouble result = scaleScores.build().average();
- isMatched = ( result.isPresent() && result.getAsDouble() >= Float.valueOf(threshold) ) ? true : false;
- }
- return isMatched;
- }
-
-
- //CompositeScore compositeMatch(BIR[] sampleList, BIR[] recordList, KeyValuePair[] flags)
- private boolean getSDKCompositeMatchResult(List sample, BIR[] record, BiometricType modality, Map flags,
- String threshold) {
- Method method = ReflectionUtils.findRequiredMethod(this.sdkRegistry.get(modality).getClass(), "compositeMatch",
- BIR[].class, BIR[].class, KeyValuePair[].class);
- method.setAccessible(true);
-
- boolean isMatched = false;
- if(Objects.nonNull(method)) {
- LOGGER.debug(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE, "verify invoked", "CompositeMatch method found");
-
- try {
- Object response = method.invoke(this.sdkRegistry.get(modality), sample.toArray(new BIR[sample.size()]),
- record, getKeyValuePairs(flags));
-
- if( Objects.nonNull(response) ) {
- CompositeScore compositeScore = (CompositeScore) response;
- if(compositeScore.getScaledScore() >= Float.valueOf(threshold))
- isMatched = true;
- }
-
- } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
- LOGGER.error(ProviderConstants.LOGGER_SESSIONID, ProviderConstants.LOGGER_IDTYPE,
- "getSDKCompositeMatchResult invoked", ExceptionUtils.getStackTrace(e));
- }
- }
- return isMatched;
- }
-
-
-
- private Map> getSupportedModalities() {
- Map> result = new HashMap<>();
- sdkRegistry.forEach((modality, map) -> {
- result.put(modality, Arrays.asList(BiometricFunction.values()));
- });
- return result;
- }
-
- private void addToRegistry(Object sdkInstance, BiometricType modality) {
- sdkRegistry.put(modality, sdkInstance);
- }
-
-
- private KeyValuePair[] getKeyValuePairs(Map flags) {
- if(flags == null)
- return null;
-
- flags.remove(METHOD_NAME_KEY);
- flags.remove(THRESHOLD_KEY);
-
- int i=0;
- KeyValuePair kvp[] = new KeyValuePair[flags.size()];
- for(String k : flags.keySet()){
- KeyValuePair keyValuePair = new KeyValuePair();
- keyValuePair.setKey(k);
- keyValuePair.setValue(flags.get(k));
- kvp[i++] = keyValuePair;
- }
- return kvp;
- }
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_8.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_8.java
deleted file mode 100644
index 81b833fecc6..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_8.java
+++ /dev/null
@@ -1,199 +0,0 @@
-package io.mosip.kernel.biosdk.provider.impl;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import org.springframework.stereotype.Component;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.BIRConverter;
-import io.mosip.kernel.biosdk.provider.util.BioProviderUtil;
-import io.mosip.kernel.biosdk.provider.util.ProviderConstants;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.bioapi.model.KeyValuePair;
-import io.mosip.kernel.core.bioapi.model.MatchDecision;
-import io.mosip.kernel.core.bioapi.model.QualityScore;
-import io.mosip.kernel.core.bioapi.model.Response;
-import io.mosip.kernel.core.bioapi.spi.IBioApi;
-
-
-@Component
-public class BioProviderImpl_V_0_8 implements iBioProviderApi {
-
- private static final String API_VERSION = "0.8";
- private Map> sdkRegistry = new HashMap<>();
- //TODO - as sdk instance is heavy (around 2GB), rethink on the way of reusing the instances
-
- @Override
- public Map> init(Map> params)
- throws BiometricException {
- for(BiometricType modality : params.keySet()) {
- Map modalityParams = params.get(modality);
-
- //check if version matches supported API version of this provider
- if(modalityParams != null && !modalityParams.isEmpty()
- && API_VERSION.equals(modalityParams.get(ProviderConstants.VERSION))) {
- IBioApi iBioApi = (IBioApi) BioProviderUtil.getSDKInstance(modalityParams);
- addToRegistry(iBioApi, modality);
- }
- }
- return getSupportedModalities();
- }
-
- @Override
- public boolean verify(List sample, List record, BiometricType modality, Map flags) {
- sample = sample.stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
- return match("AUTH", sample, record.toArray(new BIR[record.size()]), modality, flags);
- }
-
- @Override
- public Map identify(List sample, Map> gallery, BiometricType modality,
- Map flags) {
- Map result = new HashMap<>();
-
- sample = sample.stream().filter(obj -> modality == BiometricType.fromValue(obj.getBdbInfo().getType().get(0).value()))
- .collect(Collectors.toList());
-
- for(String key : gallery.keySet()) {
- result.put(key, match("DEDUPE", sample, gallery.get(key).toArray(new BIR[gallery.get(key).size()]), modality, flags));
- }
- return result;
- }
-
- @Override
- public float[] getSegmentQuality(BIR[] sample, Map flags) {
- float score[] = new float[sample.length];
- for(int i=0; i< sample.length; i++) {
- Response response = sdkRegistry.get(BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value())).
- get(BiometricFunction.QUALITY_CHECK).checkQuality(BIRConverter.convertToBIR(sample[i]), getKeyValuePairs(flags));
-
- score[i] = isSuccessResponse(response) ? response.getResponse().getScore() : 0;
- //TODO log the analytics info
- }
- return score;
- }
-
- @Override
- public Map getModalityQuality(BIR[] sample, Map flags) {
- Map> scoresByModality = new HashMap<>();
- for(int i=0; i< sample.length; i++) {
- BiometricType modality = BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value());
- Response response = sdkRegistry.get(modality).get(BiometricFunction.QUALITY_CHECK)
- .checkQuality(BIRConverter.convertToBIR(sample[i]), getKeyValuePairs(flags));
-
- if(!scoresByModality.containsKey(modality))
- scoresByModality.put(modality, new ArrayList<>());
-
- scoresByModality.get(modality).add(isSuccessResponse(response) ? response.getResponse().getScore() : 0);
- //TODO log the analytics info
- }
-
- Map result = new HashMap<>();
- scoresByModality.forEach((modality, scores) -> {
- result.put(modality, (float) scores.stream().mapToDouble(s -> s).average().getAsDouble());
- });
- return result;
- }
-
- @Override
- public List extractTemplate(List sample, Map flags) {
- List templates = new LinkedList<>();
- for(BIR bir : sample) {
- Response response = sdkRegistry.get(BiometricType.fromValue(bir.getBdbInfo().getType().get(0).value())).
- get(BiometricFunction.EXTRACT).extractTemplate(BIRConverter.convertToBIR(bir), getKeyValuePairs(flags));
- templates.add(isSuccessResponse(response) ? BIRConverter.convertToBiometricRecordBIR(response.getResponse()) : null);
- }
- return templates;
- }
-
-
- private boolean match(String operation, List sample, BIR[] record, BiometricType modality, Map flags) {
- List result = new LinkedList<>();
- io.mosip.kernel.core.cbeffutil.entity.BIR[] recordBIR= new io.mosip.kernel.core.cbeffutil.entity.BIR[record.length];
- for (int i = 0; i < record.length; i++) {
- recordBIR[i] = BIRConverter.convertToBIR(record[i]);
-
- }
- for(int i=0; i< sample.size(); i++) {
- Response response = sdkRegistry.get(modality).
- get(BiometricFunction.MATCH).match(BIRConverter.convertToBIR(sample.get(i)), recordBIR, getKeyValuePairs(flags));
-
- result.add(isSuccessResponse(response) ? response.getResponse() : null);
- }
-
- return evaluateMatchDecision(operation, sample, result);
- }
-
- private void addToRegistry(IBioApi iBioApi, BiometricType modality) {
- sdkRegistry.computeIfAbsent(modality, k -> new HashMap<>()).put(BiometricFunction.EXTRACT, iBioApi);
- sdkRegistry.computeIfAbsent(modality, k -> new HashMap<>()).put(BiometricFunction.QUALITY_CHECK, iBioApi);
- sdkRegistry.computeIfAbsent(modality, k -> new HashMap<>()).put(BiometricFunction.MATCH, iBioApi);
- sdkRegistry.computeIfAbsent(modality, k -> new HashMap<>()).put(BiometricFunction.SEGMENT, iBioApi);
- }
-
- private Map> getSupportedModalities() {
- Map> result = new HashMap<>();
- sdkRegistry.forEach((modality, map) -> {
- if(result.get(modality) == null)
- result.put(modality, new ArrayList());
-
- result.get(modality).addAll(map.keySet());
- });
- return result;
- }
-
- private boolean isSuccessResponse(Response> response) {
- if(response != null && response.getStatusCode() >= 200
- && response.getStatusCode() <= 299 && response.getResponse() != null)
- return true;
- return false;
- }
-
- //TODO matching strategy based on caller (auth / dedupe)
- private boolean evaluateMatchDecision(String operation, List sample, List result) {
- int segmentCount = sample.size();
- result = result.stream().filter(decision -> decision != null).collect(Collectors.toList());
-
- switch (operation) {
- case "AUTH":
- if(result.size() < segmentCount)
- return false;
-
- return result.stream().allMatch(decision -> Arrays.stream(decision).anyMatch(d -> d.isMatch()));
-
- case "DEDUPE":
-
- return result.stream().anyMatch(decision -> Arrays.stream(decision).anyMatch(d -> d.isMatch()));
-
- }
- return false;
- }
-
- private KeyValuePair[] getKeyValuePairs(Map flags) {
- if(flags == null)
- return null;
-
- int i=0;
- KeyValuePair kvp[] = new KeyValuePair[flags.size()];
- for(String k : flags.keySet()){
- KeyValuePair keyValuePair = new KeyValuePair();
- keyValuePair.setKey(k);
- keyValuePair.setValue(flags.get(k));
- kvp[i++] = keyValuePair;
- }
- return kvp;
- }
-
-
-
-
-}
\ No newline at end of file
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_9.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_9.java
deleted file mode 100644
index ab10b2504ae..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/impl/BioProviderImpl_V_0_9.java
+++ /dev/null
@@ -1,258 +0,0 @@
-package io.mosip.kernel.biosdk.provider.impl;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import io.micrometer.core.annotation.Counted;
-import io.micrometer.core.annotation.Timed;
-import io.mosip.kernel.biosdk.provider.util.BioSDKProviderLoggerFactory;
-import io.mosip.kernel.core.logger.spi.Logger;
-import org.springframework.stereotype.Component;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.constant.Match;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.biometrics.entities.BiometricRecord;
-import io.mosip.kernel.biometrics.model.Decision;
-import io.mosip.kernel.biometrics.model.MatchDecision;
-import io.mosip.kernel.biometrics.model.QualityCheck;
-import io.mosip.kernel.biometrics.model.Response;
-import io.mosip.kernel.biometrics.model.SDKInfo;
-import io.mosip.kernel.biometrics.spi.IBioApi;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.BioProviderUtil;
-import io.mosip.kernel.biosdk.provider.util.ErrorCode;
-import io.mosip.kernel.biosdk.provider.util.ProviderConstants;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-
-@Component
-public class BioProviderImpl_V_0_9 implements iBioProviderApi {
-
- private static final Logger logger = BioSDKProviderLoggerFactory.getLogger(BioProviderImpl_V_0_9.class);
-
- private static final String API_VERSION = "0.9";
- private final Map> sdkRegistry = new HashMap<>();
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public Map> init(Map> params)
- throws BiometricException {
- for (BiometricType modality : params.keySet()) {
- Map modalityParams = params.get(modality);
-
- // check if version matches supported API version of this provider
- if (modalityParams != null && !modalityParams.isEmpty()
- && API_VERSION.equals(modalityParams.get(ProviderConstants.VERSION))) {
-
- IBioApi iBioApi = (IBioApi) BioProviderUtil.getSDKInstance(modalityParams);
- SDKInfo sdkInfo = iBioApi.init(modalityParams);
-
- // cross check loaded SDK version and configured SDK version
- if (!API_VERSION.equals(sdkInfo.getApiVersion()))
- throw new BiometricException(ErrorCode.INVALID_SDK_VERSION.getErrorCode(),
- ErrorCode.INVALID_SDK_VERSION.getErrorCode());
-
- addToRegistry(sdkInfo, iBioApi, modality);
- }
- }
- return getSupportedModalities();
- }
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public boolean verify(List sample, List record, BiometricType modality, Map flags) {
- BiometricRecord galleryRecord = getBiometricRecord(record.toArray(new BIR[record.size()]));
- Response response = sdkRegistry.get(modality).get(BiometricFunction.MATCH).match(
- getBiometricRecord(sample.toArray(new BIR[sample.size()])), new BiometricRecord[] { galleryRecord },
- Arrays.asList(modality), flags);
-
- if (isSuccessResponse(response)) {
- Map decisions = response.getResponse()[0].getDecisions();
- if (decisions.containsKey(modality)) {
- Match matchResult = decisions.get(modality).getMatch();
- logger.info("AnalyticsInfo : {}, errors : {}", decisions.get(modality).getAnalyticsInfo(),
- decisions.get(modality).getErrors());
- return Match.MATCHED.equals(matchResult);
- }
- }
-
- return false;
- }
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public Map identify(List sample, Map> gallery, BiometricType modality,
- Map flags) {
- Map keyIndexMapping = new HashMap<>();
- BiometricRecord galleryRecords[] = new BiometricRecord[gallery.size()];
- int i = 0;
- for (String key : gallery.keySet()) {
- keyIndexMapping.put(key, i);
- galleryRecords[i++] = getBiometricRecord(gallery.get(key).toArray(new BIR[gallery.get(key).size()]));
- }
-
- Response response = sdkRegistry.get(modality).get(BiometricFunction.MATCH).match(
- getBiometricRecord(sample.toArray(new BIR[sample.size()])), galleryRecords, Arrays.asList(modality),
- flags);
-
- Map result = new HashMap<>();
- if (isSuccessResponse(response)) {
- keyIndexMapping.forEach((key, index) -> {
- if (response.getResponse()[index].getDecisions().containsKey(modality)) {
- result.put(key, Match.MATCHED
- .equals(response.getResponse()[index].getDecisions().get(modality).getMatch()));
- logger.info("AnalyticsInfo : {}, errors : {}",
- response.getResponse()[index].getDecisions().get(modality).getAnalyticsInfo(),
- response.getResponse()[index].getDecisions().get(modality).getErrors());
- } else
- result.put(key, false);
- });
- }
- return result;
- }
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public float[] getSegmentQuality(BIR[] sample, Map flags) {
- float scores[] = new float[sample.length];
- for (int i = 0; i < sample.length; i++) {
- BiometricType modality = BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value());
- Response response = sdkRegistry.get(modality).get(BiometricFunction.QUALITY_CHECK)
- .checkQuality(getBiometricRecord(sample[i]), Arrays.asList(modality), flags);
-
- if (isSuccessResponse(response) && response.getResponse().getScores() != null &&
- response.getResponse().getScores().containsKey(modality)) {
- scores[i] = response.getResponse().getScores().get(modality).getScore();
- logger.info("AnalyticsInfo : {}, errors : {}",
- response.getResponse().getScores().get(modality).getAnalyticsInfo(),
- response.getResponse().getScores().get(modality).getErrors());
- } else
- scores[i] = 0;
- }
- return scores;
- }
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public Map getModalityQuality(BIR[] sample, Map flags) {
- Set modalitites = new HashSet<>();
- for (int i = 0; i < sample.length; i++) {
- modalitites.add(BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value()));
- }
-
- Map scoreMap = new HashMap<>();
- for (BiometricType modality : modalitites) {
- Response response = sdkRegistry.get(modality).get(BiometricFunction.QUALITY_CHECK)
- .checkQuality(getBiometricRecord(sample), Arrays.asList(modality), flags);
-
- if (isSuccessResponse(response) && response.getResponse().getScores() != null &&
- response.getResponse().getScores().containsKey(modality)) {
- scoreMap.put(modality,response.getResponse().getScores().get(modality).getScore());
- logger.info("AnalyticsInfo : {}, errors : {}",
- response.getResponse().getScores().get(modality).getAnalyticsInfo(),
- response.getResponse().getScores().get(modality).getErrors());
- }
- else { scoreMap.put(modality, 0f); }
- }
-
- float scores[] = new float[sample.length];
- for (int i = 0; i < sample.length; i++) {
- BiometricType modality = BiometricType.fromValue(sample[i].getBdbInfo().getType().get(0).value());
- if (scoreMap.containsKey(modality))
- scores[i] = scoreMap.get(modality);
- else
- scores[i] = 0;
- }
- return scoreMap;
- }
-
- @Counted(value = "sdk.count", extraTags = {"api_version", API_VERSION})
- @Timed(value = "sdk.time", extraTags = {"api_version", API_VERSION})
- @Override
- public List extractTemplate(List sample, Map flags) {
- Map> birsByModality = sample.stream().collect(Collectors.groupingBy(bir -> BiometricType.fromValue(bir
- .getBdbInfo()
- .getType()
- .get(0).value())));
-
- List templates = birsByModality.entrySet().stream()
- .flatMap(entry -> {
- BiometricType modality = entry.getKey();
- List birsForModality = entry.getValue();
-
- BiometricRecord sampleRecord = getBiometricRecord(birsForModality.toArray(new BIR[birsForModality.size()]));
-
- Response response = sdkRegistry
- .get(modality)
- .get(BiometricFunction.EXTRACT).extractTemplate(sampleRecord, List.of(modality), flags);
-
- if(isSuccessResponse(response)) {
- return response.getResponse().getSegments().stream();
- }
-
- return Stream.empty();
- })
- .collect(Collectors.toList());
-
- return templates;
- }
-
- private boolean isSuccessResponse(Response> response) {
- if (response != null && response.getStatusCode() >= 200 && response.getStatusCode() <= 299
- && response.getResponse() != null)
- return true;
- return false;
- }
-
- private void addToRegistry(SDKInfo sdkInfo, IBioApi iBioApi, BiometricType modality) {
- for (BiometricFunction biometricFunction : sdkInfo.getSupportedMethods().keySet()) {
- if (sdkInfo.getSupportedMethods().get(biometricFunction).contains(modality)) {
- if (sdkRegistry.get(modality) == null)
- sdkRegistry.put(modality, new HashMap<>());
-
- sdkRegistry.get(modality).put(biometricFunction, iBioApi);
- }
- logger.info("Successfully registered SDK : {}, BiometricFunction: {}",
- sdkInfo.getProductOwner().getOrganization(), biometricFunction);
- }
- }
-
- private Map> getSupportedModalities() {
- Map> result = new HashMap<>();
- sdkRegistry.forEach((modality, map) -> {
- if (result.get(modality) == null)
- result.put(modality, new ArrayList());
-
- result.get(modality).addAll(map.keySet());
- });
- return result;
- }
-
- // TODO - set cebffversion and version in biometricRecord
- private BiometricRecord getBiometricRecord(BIR[] birs) {
- BiometricRecord biometricRecord = new BiometricRecord();
- biometricRecord.setSegments(Arrays.asList(birs));
- return biometricRecord;
- }
-
- // TODO - set cebffversion and version in biometricRecord
- private BiometricRecord getBiometricRecord(BIR bir) {
- BiometricRecord biometricRecord = new BiometricRecord();
- biometricRecord.getSegments().add(bir);
- return biometricRecord;
- }
-
-}
\ No newline at end of file
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/spi/iBioProviderApi.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/spi/iBioProviderApi.java
deleted file mode 100644
index c0059fa3c9f..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/spi/iBioProviderApi.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package io.mosip.kernel.biosdk.provider.spi;
-
-import java.util.List;
-import java.util.Map;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.logger.logback.util.MetricTag;
-
-
-public interface iBioProviderApi {
-
- /**
- * loading of SDK based on the provided params
- * and other initialization stuff
- * @param params
- */
- Map> init(Map> params) throws BiometricException;
-
- /**
- * 1:1 match
- *
- * @param sample
- * @param record
- * @param modality
- * @return
- */
- boolean verify(List sample, List record,
- @MetricTag(value = "modality", extractor = "arg.value") BiometricType modality,
- Map flags);
-
- /**
- * 1:n match
- *
- * @param sample
- * @param gallery
- * @param modality
- * @return
- */
- Map identify(List sample, Map> gallery,
- @MetricTag(value = "modality", extractor = "arg.value") BiometricType modality,
- Map flags);
-
-
- /**
- * Score provided by SDK, later should be added in BIR "others" attribute
- * @param sample
- * @return
- */
- float[] getSegmentQuality(@MetricTag(value = "modality",
- extractor = "int size = arg.length; String[] names = new String[size];for(int i=0;i flags);
-
- /**
- *
- * @param sample
- * @return
- */
- Map getModalityQuality(@MetricTag(value = "modality",
- extractor = "int size = arg.length; String[] names = new String[size];for(int i=0;i flags);
-
-
- /**
- *
- * @param sample
- * @return
- */
- List extractTemplate(@MetricTag(value = "modality",
- extractor = "int size = arg.size(); String[] names = new String[size];for(int i=0;i sample, Map flags);
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BIRConverter.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BIRConverter.java
deleted file mode 100644
index 376740e11f5..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BIRConverter.java
+++ /dev/null
@@ -1,126 +0,0 @@
-package io.mosip.kernel.biosdk.provider.util;
-
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.core.cbeffutil.entity.BDBInfo;
-import io.mosip.kernel.core.cbeffutil.entity.BIR;
-import io.mosip.kernel.core.cbeffutil.entity.BIRInfo;
-import io.mosip.kernel.core.cbeffutil.entity.BIRVersion;
-import io.mosip.kernel.core.cbeffutil.jaxbclasses.*;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-
-public class BIRConverter {
-
- public static io.mosip.kernel.biometrics.entities.BIR convertToBiometricRecordBIR(BIR bir) {
- List bioTypes = new ArrayList<>();
- for(SingleType type : bir.getBdbInfo().getType()) {
- bioTypes.add(BiometricType.fromValue(type.value()));
- }
-
- io.mosip.kernel.biometrics.entities.RegistryIDType format = new io.mosip.kernel.biometrics.entities.RegistryIDType(bir.getBdbInfo().getFormat().getOrganization(),
- bir.getBdbInfo().getFormat().getType());
-
- io.mosip.kernel.biometrics.constant.QualityType qualityType;
-
- if(Objects.nonNull(bir.getBdbInfo().getQuality())) {
- io.mosip.kernel.biometrics.entities.RegistryIDType birAlgorithm = new io.mosip.kernel.biometrics.entities.RegistryIDType(
- bir.getBdbInfo().getQuality().getAlgorithm().getOrganization(),
- bir.getBdbInfo().getQuality().getAlgorithm().getType());
-
- qualityType = new io.mosip.kernel.biometrics.constant.QualityType();
- qualityType.setAlgorithm(birAlgorithm);
- qualityType.setQualityCalculationFailed(bir.getBdbInfo().getQuality().getQualityCalculationFailed());
- qualityType.setScore(bir.getBdbInfo().getQuality().getScore());
-
- } else {
- qualityType = null;
- }
-
- io.mosip.kernel.biometrics.entities.VersionType version;
- if(Objects.nonNull(bir.getVersion())) {
- version = new io.mosip.kernel.biometrics.entities.VersionType(bir.getVersion().getMajor(),
- bir.getVersion().getMinor());
- } else {
- version = null;
- }
-
- io.mosip.kernel.biometrics.entities.VersionType cbeffversion;
- if(Objects.nonNull(bir.getCbeffversion())) {
- cbeffversion = new io.mosip.kernel.biometrics.entities.VersionType(bir.getCbeffversion().getMajor(),
- bir.getCbeffversion().getMinor());
- } else {
- cbeffversion = null;
- }
-
- io.mosip.kernel.biometrics.constant.PurposeType purposeType;
- if(Objects.nonNull(bir.getBdbInfo().getPurpose())) {
- purposeType = io.mosip.kernel.biometrics.constant.PurposeType.fromValue(bir.getBdbInfo().getPurpose().name());
- } else {
- purposeType = null;
- }
-
- io.mosip.kernel.biometrics.constant.ProcessedLevelType processedLevelType;
- if(Objects.nonNull(bir.getBdbInfo().getLevel())) {
- processedLevelType = io.mosip.kernel.biometrics.constant.ProcessedLevelType.fromValue(
- bir.getBdbInfo().getLevel().name());
- } else{
- processedLevelType = null;
- }
-
- return new io.mosip.kernel.biometrics.entities.BIR.BIRBuilder()
- .withBdb(bir.getBdb())
- .withVersion(version)
- .withCbeffversion(cbeffversion)
- .withBirInfo(new io.mosip.kernel.biometrics.entities.BIRInfo.BIRInfoBuilder().withIntegrity(true).build())
- .withBdbInfo(new io.mosip.kernel.biometrics.entities.BDBInfo.BDBInfoBuilder()
- .withFormat(format)
- .withType(bioTypes)
- .withQuality(qualityType)
- .withCreationDate(bir.getBdbInfo().getCreationDate())
- .withIndex(bir.getBdbInfo().getIndex())
- .withPurpose(purposeType)
- .withLevel(processedLevelType)
- .withSubtype(bir.getBdbInfo().getSubtype()).build()).build();
- }
-
- public static BIR convertToBIR(io.mosip.kernel.biometrics.entities.BIR bir) {
- List bioTypes = new ArrayList<>();
- for(BiometricType type : bir.getBdbInfo().getType()) {
- bioTypes.add(SingleType.fromValue(type.value()));
- }
-
- RegistryIDType format = new RegistryIDType();
- format.setOrganization(bir.getBdbInfo().getFormat().getOrganization());
- format.setType(bir.getBdbInfo().getFormat().getType());
-
- RegistryIDType birAlgorithm = new RegistryIDType();
- birAlgorithm.setOrganization(bir.getBdbInfo().getQuality().getAlgorithm().getOrganization());
- birAlgorithm.setType(bir.getBdbInfo().getQuality().getAlgorithm().getType());
-
- QualityType qualityType = new QualityType();
- qualityType.setAlgorithm(birAlgorithm);
- qualityType.setQualityCalculationFailed(bir.getBdbInfo().getQuality().getQualityCalculationFailed());
- qualityType.setScore(bir.getBdbInfo().getQuality().getScore());
-
- return new BIR.BIRBuilder()
- .withBdb(bir.getBdb())
- .withVersion(new BIRVersion.BIRVersionBuilder()
- .withMinor(bir.getVersion().getMinor())
- .withMajor(bir.getVersion().getMajor()).build())
- .withCbeffversion(new BIRVersion.BIRVersionBuilder()
- .withMinor(bir.getCbeffversion().getMinor())
- .withMajor(bir.getCbeffversion().getMajor()).build())
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(true).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder()
- .withFormat(format)
- .withType(bioTypes)
- .withQuality(qualityType)
- .withCreationDate(bir.getBdbInfo().getCreationDate())
- .withIndex(bir.getBdbInfo().getIndex())
- .withPurpose(PurposeType.fromValue(bir.getBdbInfo().getPurpose().name()))
- .withLevel(ProcessedLevelType.fromValue(bir.getBdbInfo().getLevel().name()))
- .withSubtype(bir.getBdbInfo().getSubtype()).build()).build();
- }
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioProviderUtil.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioProviderUtil.java
deleted file mode 100644
index 40e87f6c9cb..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioProviderUtil.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package io.mosip.kernel.biosdk.provider.util;
-
-import java.lang.reflect.Constructor;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-import org.springframework.data.util.ReflectionUtils;
-
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.exception.ExceptionUtils;
-import io.mosip.kernel.core.logger.spi.Logger;
-
-public class BioProviderUtil {
-
- private static Map sdkInstances = new HashMap<>();
-
- private static final Logger LOGGER = BioSDKProviderLoggerFactory.getLogger(BioProviderUtil.class);
-
- public static Object getSDKInstance(Map modalityParams) throws BiometricException {
- try {
- String instanceKey = modalityParams.entrySet().stream().sorted(Map.Entry.comparingByKey())
- .map(entry -> entry.getKey() + "=" + entry.getValue()).collect(Collectors.joining("-"));
- if (sdkInstances.containsKey(instanceKey)) {
- LOGGER.debug("SDK instance reused for modality class >>> {}", modalityParams.get(ProviderConstants.CLASSNAME));
- return sdkInstances.get(instanceKey);
- }
- Class> object = Class.forName(modalityParams.get(ProviderConstants.CLASSNAME));
- Object[] args = new Object[0];
- if (modalityParams.get(ProviderConstants.ARGUMENTS) != null
- && !modalityParams.get(ProviderConstants.ARGUMENTS).isEmpty())
- args = modalityParams.get(ProviderConstants.ARGUMENTS).split(",");
-
- Optional> result = ReflectionUtils.findConstructor(object, args);
- if (result.isPresent()) {
- Constructor> constructor = result.get();
- constructor.setAccessible(true);
- LOGGER.debug("SDK instance created with params >>> {}", modalityParams);
- Object newInstance = constructor.newInstance(args);
- sdkInstances.put(instanceKey, newInstance);
- return newInstance;
- } else {
- throw new BiometricException(ErrorCode.NO_CONSTRUCTOR_FOUND.getErrorCode(),
- String.format(ErrorCode.NO_CONSTRUCTOR_FOUND.getErrorMessage(),
- modalityParams.get(ProviderConstants.CLASSNAME),
- modalityParams.get(ProviderConstants.ARGUMENTS)));
- }
- } catch (Exception e) {
- throw new BiometricException(ErrorCode.SDK_INITIALIZATION_FAILED.getErrorCode(),
- String.format(ErrorCode.SDK_INITIALIZATION_FAILED.getErrorMessage(),
- modalityParams.get(ProviderConstants.CLASSNAME), ExceptionUtils.getStackTrace(e)));
- }
- }
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioSDKProviderLoggerFactory.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioSDKProviderLoggerFactory.java
deleted file mode 100644
index d04fbd18c7a..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/BioSDKProviderLoggerFactory.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package io.mosip.kernel.biosdk.provider.util;
-
-import io.mosip.kernel.core.logger.spi.Logger;
-import io.mosip.kernel.logger.logback.factory.Logfactory;
-
-
-public class BioSDKProviderLoggerFactory {
-
- /**
- * Instantiates a new bio sdk logger.
- */
- private BioSDKProviderLoggerFactory() {
- }
-
- /**
- * Gets the logger.
- *
- * @param clazz the clazz
- * @return the logger
- */
- public static Logger getLogger(Class> clazz) {
- return Logfactory.getSlf4jLogger(clazz);
- }
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ErrorCode.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ErrorCode.java
deleted file mode 100644
index 64f3635e872..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ErrorCode.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package io.mosip.kernel.biosdk.provider.util;
-
-public enum ErrorCode {
-
- NO_PROVIDERS("BIO_SDK_001", "No Biometric provider API implementations found"),
- SDK_INITIALIZATION_FAILED("BIO_SDK_002", "Failed to initialize %s due to %s"),
- NO_CONSTRUCTOR_FOUND("BIO_SDK_003", "Constructor not found for %s with args %s"),
- NO_SDK_CONFIG("BIO_SDK_004", "SDK Configurations not found"),
- INVALID_SDK_VERSION("BIO_SDK_005", "Configured SDK version is different"),
- UNSUPPORTED_OPERATION("BIO_SDK_006", "Unsupported Operation"),
- SDK_REGISTRY_EMPTY("BIO_SDK_007", "SDK provider registry is empty!");
-
-
- private String errorCode;
- private String errorMessage;
-
- ErrorCode(String errorCode, String errorMessage) {
- this.errorCode = errorCode;
- this.errorMessage = errorMessage;
- }
-
- public String getErrorCode() {
- return errorCode;
- }
-
- public String getErrorMessage() {
- return errorMessage;
- }
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ProviderConstants.java b/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ProviderConstants.java
deleted file mode 100644
index 9ebc36eb49b..00000000000
--- a/kernel/kernel-biosdk-provider/src/main/java/io/mosip/kernel/biosdk/provider/util/ProviderConstants.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package io.mosip.kernel.biosdk.provider.util;
-
-public class ProviderConstants {
-
- public static final String CLASSNAME = "classname";
- public static final String VERSION = "version";
- public static final String ARGUMENTS = "args";
- public static final String THRESHOLD = "threshold";
-
- public static final String LOGGER_SESSIONID = "BIO-SDK-PROVIDER";
- public static final String LOGGER_IDTYPE = "REGISTRATION / AUTH";
-
-}
diff --git a/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_7Test.java b/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_7Test.java
deleted file mode 100644
index 9973626dd81..00000000000
--- a/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_7Test.java
+++ /dev/null
@@ -1,388 +0,0 @@
-package io.mosip.kernel.biosdk.provider.test;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.constant.ProcessedLevelType;
-import io.mosip.kernel.biometrics.constant.PurposeType;
-import io.mosip.kernel.biometrics.constant.QualityType;
-import io.mosip.kernel.biometrics.entities.BDBInfo;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.biometrics.entities.BIRInfo;
-import io.mosip.kernel.biometrics.entities.RegistryIDType;
-import io.mosip.kernel.biometrics.entities.VersionType;
-import io.mosip.kernel.biometrics.model.SDKInfo;
-import io.mosip.kernel.biosdk.provider.impl.BioProviderImpl_V_0_7;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.ProviderConstants;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.cbeffutil.common.CbeffISOReader;
-import io.mosip.kernel.core.cbeffutil.exception.CbeffException;
-
-
-public class BioProviderImpl_V_0_7Test {
-
- private List record;
- private List updateList;
- private List sample;
- private String localpath = "./src/test/resources";
- byte[] rindexFinger = null;
- byte[] rmiddleFinger = null;
- byte[] rringFinger = null;
- byte[] rlittleFinger = null;
- byte[] rightthumb = null;
- byte[] lindexFinger = null;
- byte[] lmiddleFinger = null;
- byte[] lringFinger = null;
- byte[] llittleFinger = null;
- byte[] leftthumb = null;
-
-
-
- @Before
- public void setUp() throws Exception {
-
- rindexFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Index.iso", "Finger");
- rmiddleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Middle.iso",
- "Finger");
- rringFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Ring.iso",
- "Finger");
- rlittleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Little.iso",
- "Finger");
- rightthumb = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Thumb.iso",
- "Finger");
- lindexFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Index.iso",
- "Finger");
- lmiddleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Middle.iso",
- "Finger");
- lringFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Ring.iso", "Finger");
- llittleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Little.iso",
- "Finger");
- leftthumb = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Thumb.iso", "Finger");
- // byte[] irisImg1 = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "IrisImageRight.iso", "Iris");
- // byte[] irisImg2 = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "IrisImageLeft.iso", "Iris");
- // byte[] faceImg = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "faceImage.iso", "Face");
- RegistryIDType format = new RegistryIDType();
- format.setOrganization("257");
- format.setType("7");
- QualityType Qtype = new QualityType();
- Qtype.setScore(new Long(100));
- RegistryIDType algorithm = new RegistryIDType();
- algorithm.setOrganization("HMAC");
- algorithm.setType("SHA-256");
- Qtype.setAlgorithm(algorithm);
- record = new ArrayList<>();
- sample = new ArrayList<>();
- BIR rIndexFinger = new BIR.BIRBuilder().withBdb(rindexFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right IndexFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rIndexFinger);
-
- BIR rMiddleFinger = new BIR.BIRBuilder().withBdb(rmiddleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("MiddleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rMiddleFinger);
-
- BIR rRingFinger = new BIR.BIRBuilder().withBdb(rringFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right RingFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rRingFinger);
-
- BIR rLittleFinger = new BIR.BIRBuilder().withBdb(rlittleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right LittleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rLittleFinger);
-
- BIR lIndexFinger = new BIR.BIRBuilder().withBdb(lindexFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left IndexFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lIndexFinger);
-
- BIR lMiddleFinger = new BIR.BIRBuilder().withBdb(lmiddleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left MiddleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lMiddleFinger);
-
- BIR lRightFinger = new BIR.BIRBuilder().withBdb(lringFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left RingFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lRightFinger);
-
- BIR lLittleFinger = new BIR.BIRBuilder().withBdb(llittleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left LittleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lLittleFinger);
-
- BIR rightThumb = new BIR.BIRBuilder().withBdb(rightthumb).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right Thumb"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rightThumb);
-
- BIR leftThumb = new BIR.BIRBuilder().withBdb(leftthumb).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left Thumb"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(leftThumb);
- sample.addAll(record);
-
- SDKInfo sdkInfo = new SDKInfo("0.7", "1", "MOCKVendor2", "test2");
- sdkInfo.withSupportedMethod(BiometricFunction.MATCH, BiometricType.IRIS);
- sdkInfo.withSupportedMethod(BiometricFunction.EXTRACT, BiometricType.IRIS);
- sdkInfo.withSupportedMethod(BiometricFunction.QUALITY_CHECK, BiometricType.IRIS);
-
- sdkInfo.withSupportedMethod(BiometricFunction.MATCH, BiometricType.FACE);
- sdkInfo.withSupportedMethod(BiometricFunction.EXTRACT, BiometricType.FACE);
- sdkInfo.withSupportedMethod(BiometricFunction.QUALITY_CHECK, BiometricType.FACE);
- }
-
- @Test
- public void initTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- input.put(BiometricType.FINGER, modalityParams);
- Map> map=bioProviderImpl_V_0_7.init(input);
- assertNotNull(map.get(BiometricType.FINGER));
- List biometricFunctions = map.get(BiometricType.FINGER);
- assertTrue(biometricFunctions.contains(BiometricFunction.MATCH));
- assertTrue(biometricFunctions.contains(BiometricFunction.EXTRACT));
- assertTrue(biometricFunctions.contains(BiometricFunction.QUALITY_CHECK));
- }
-
- @Test(expected = BiometricException.class)
- public void initBiometricExceptionTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.SDKInstanceException");
- input.put(BiometricType.FINGER, modalityParams);
- Map> map=bioProviderImpl_V_0_7.init(input);
- assertNotNull(map.get(BiometricType.FINGER));
- List biometricFunctions = map.get(BiometricType.FINGER);
- assertTrue(biometricFunctions.contains(BiometricFunction.MATCH));
- assertTrue(biometricFunctions.contains(BiometricFunction.EXTRACT));
- assertTrue(biometricFunctions.contains(BiometricFunction.QUALITY_CHECK));
- }
-
-
- @Test
- public void verifyTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- assertTrue(bioProviderImpl_V_0_7.verify(sample, record, BiometricType.FINGER, modalityParams));
- }
-
- @Test
- public void verifyIdentifyTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- modalityParams.put("_METHOD_NAME","match");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- Mapresult= bioProviderImpl_V_0_7.identify(sample, gallery, BiometricType.FINGER, modalityParams);
- assertTrue(result.get("check"));
- }
-
- @Test
- public void verifyFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- assertFalse(bioProviderImpl_V_0_7.verify(sample, record, BiometricType.FINGER, modalityParams));
- }
-
- @Test
- public void getSegmentQualityTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- float[] result= bioProviderImpl_V_0_7.getSegmentQuality(smp,modalityParams);
- assertThat(result[0],is(90.0F));
- }
-
- @Test
- public void getModalityQualityTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- Map result= bioProviderImpl_V_0_7.getModalityQuality(smp,modalityParams);
- assertThat(result.get(BiometricType.FINGER),is(90.0F));
- }
-
- @Test
- public void getSegmentQualityFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- float[] result= bioProviderImpl_V_0_7.getSegmentQuality(smp,modalityParams);
- assertThat(result[0],is(0F));
- }
-
- @Test
- public void getModalityQualityFalseTest() throws Exception {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- Map result= bioProviderImpl_V_0_7.getModalityQuality(smp,modalityParams);
- assertThat(result.get(BiometricType.FINGER),is(0F));
- }
-
- @Test
- public void extractTemplateTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- List result= bioProviderImpl_V_0_7.extractTemplate(sample,modalityParams);
- assertThat(result.size(),is(sample.size()));
- }
-
- @Test
- public void extractTemplateFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_7 = new BioProviderImpl_V_0_7();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.7");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_7");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_7.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- List result= bioProviderImpl_V_0_7.extractTemplate(sample,modalityParams);
- assertTrue(result.stream().filter(x -> x!=null).count()==0);
- }
-
-
-}
\ No newline at end of file
diff --git a/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_8Test.java b/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_8Test.java
deleted file mode 100644
index 3d89ec41cff..00000000000
--- a/kernel/kernel-biosdk-provider/src/test/java/io/mosip/kernel/biosdk/provider/test/BioProviderImpl_V_0_8Test.java
+++ /dev/null
@@ -1,387 +0,0 @@
-package io.mosip.kernel.biosdk.provider.test;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import io.mosip.kernel.biometrics.constant.BiometricFunction;
-import io.mosip.kernel.biometrics.constant.BiometricType;
-import io.mosip.kernel.biometrics.constant.ProcessedLevelType;
-import io.mosip.kernel.biometrics.constant.PurposeType;
-import io.mosip.kernel.biometrics.constant.QualityType;
-import io.mosip.kernel.biometrics.entities.BDBInfo;
-import io.mosip.kernel.biometrics.entities.BIR;
-import io.mosip.kernel.biometrics.entities.BIRInfo;
-import io.mosip.kernel.biometrics.entities.RegistryIDType;
-import io.mosip.kernel.biometrics.entities.VersionType;
-import io.mosip.kernel.biometrics.model.SDKInfo;
-import io.mosip.kernel.biosdk.provider.impl.BioProviderImpl_V_0_8;
-import io.mosip.kernel.biosdk.provider.spi.iBioProviderApi;
-import io.mosip.kernel.biosdk.provider.util.ProviderConstants;
-import io.mosip.kernel.core.bioapi.exception.BiometricException;
-import io.mosip.kernel.core.cbeffutil.common.CbeffISOReader;
-import io.mosip.kernel.core.cbeffutil.exception.CbeffException;
-
-
-public class BioProviderImpl_V_0_8Test {
-
- private List record;
- private List updateList;
- private List sample;
- private String localpath = "./src/test/resources";
- byte[] rindexFinger = null;
- byte[] rmiddleFinger = null;
- byte[] rringFinger = null;
- byte[] rlittleFinger = null;
- byte[] rightthumb = null;
- byte[] lindexFinger = null;
- byte[] lmiddleFinger = null;
- byte[] lringFinger = null;
- byte[] llittleFinger = null;
- byte[] leftthumb = null;
-
-
-
- @Before
- public void setUp() throws Exception {
-
- rindexFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Index.iso", "Finger");
- rmiddleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Middle.iso",
- "Finger");
- rringFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Ring.iso",
- "Finger");
- rlittleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Little.iso",
- "Finger");
- rightthumb = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintRight_Thumb.iso",
- "Finger");
- lindexFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Index.iso",
- "Finger");
- lmiddleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Middle.iso",
- "Finger");
- lringFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Ring.iso", "Finger");
- llittleFinger = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Little.iso",
- "Finger");
- leftthumb = CbeffISOReader.readISOImage(localpath + "/images/" + "FingerPrintLeft_Thumb.iso", "Finger");
- // byte[] irisImg1 = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "IrisImageRight.iso", "Iris");
- // byte[] irisImg2 = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "IrisImageLeft.iso", "Iris");
- // byte[] faceImg = CbeffISOReader.readISOImage(localpath + "/images/" +
- // "faceImage.iso", "Face");
- RegistryIDType format = new RegistryIDType();
- format.setOrganization("257");
- format.setType("7");
- QualityType Qtype = new QualityType();
- Qtype.setScore(new Long(100));
- RegistryIDType algorithm = new RegistryIDType();
- algorithm.setOrganization("HMAC");
- algorithm.setType("SHA-256");
- Qtype.setAlgorithm(algorithm);
- record = new ArrayList<>();
- sample = new ArrayList<>();
- BIR rIndexFinger = new BIR.BIRBuilder().withBdb(rindexFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right IndexFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rIndexFinger);
-
- BIR rMiddleFinger = new BIR.BIRBuilder().withBdb(rmiddleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("MiddleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rMiddleFinger);
-
- BIR rRingFinger = new BIR.BIRBuilder().withBdb(rringFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right RingFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rRingFinger);
-
- BIR rLittleFinger = new BIR.BIRBuilder().withBdb(rlittleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right LittleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rLittleFinger);
-
- BIR lIndexFinger = new BIR.BIRBuilder().withBdb(lindexFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left IndexFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lIndexFinger);
-
- BIR lMiddleFinger = new BIR.BIRBuilder().withBdb(lmiddleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left MiddleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lMiddleFinger);
-
- BIR lRightFinger = new BIR.BIRBuilder().withBdb(lringFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left RingFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lRightFinger);
-
- BIR lLittleFinger = new BIR.BIRBuilder().withBdb(llittleFinger).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left LittleFinger"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(lLittleFinger);
-
- BIR rightThumb = new BIR.BIRBuilder().withBdb(rightthumb).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Right Thumb"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(rightThumb);
-
- BIR leftThumb = new BIR.BIRBuilder().withBdb(leftthumb).withVersion(new VersionType(1, 1))
- .withCbeffversion(new VersionType(1, 1))
- .withBirInfo(new BIRInfo.BIRInfoBuilder().withIntegrity(false).build())
- .withBdbInfo(new BDBInfo.BDBInfoBuilder().withFormat(format).withQuality(Qtype)
- .withType(Arrays.asList(BiometricType.FINGER)).withSubtype(Arrays.asList("Left Thumb"))
- .withPurpose(PurposeType.ENROLL).withLevel(ProcessedLevelType.RAW)
- .withCreationDate(LocalDateTime.now(ZoneId.of("UTC"))).build())
- .build();
-
- record.add(leftThumb);
- sample.addAll(record);
-
- SDKInfo sdkInfo = new SDKInfo("0.8", "1", "MOCKVendor2", "test2");
- sdkInfo.withSupportedMethod(BiometricFunction.MATCH, BiometricType.IRIS);
- sdkInfo.withSupportedMethod(BiometricFunction.EXTRACT, BiometricType.IRIS);
- sdkInfo.withSupportedMethod(BiometricFunction.QUALITY_CHECK, BiometricType.IRIS);
-
- sdkInfo.withSupportedMethod(BiometricFunction.MATCH, BiometricType.FACE);
- sdkInfo.withSupportedMethod(BiometricFunction.EXTRACT, BiometricType.FACE);
- sdkInfo.withSupportedMethod(BiometricFunction.QUALITY_CHECK, BiometricType.FACE);
- }
-
- @Test
- public void initTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- Map> map=bioProviderImpl_V_0_8.init(input);
- assertNotNull(map.get(BiometricType.FINGER));
- List biometricFunctions = map.get(BiometricType.FINGER);
- assertTrue(biometricFunctions.contains(BiometricFunction.MATCH));
- assertTrue(biometricFunctions.contains(BiometricFunction.EXTRACT));
- assertTrue(biometricFunctions.contains(BiometricFunction.QUALITY_CHECK));
- }
-
- @Test(expected = BiometricException.class)
- public void initBiometricExceptionTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.SDKInstanceException");
- input.put(BiometricType.FINGER, modalityParams);
- Map> map=bioProviderImpl_V_0_8.init(input);
- assertNotNull(map.get(BiometricType.FINGER));
- List biometricFunctions = map.get(BiometricType.FINGER);
- assertTrue(biometricFunctions.contains(BiometricFunction.MATCH));
- assertTrue(biometricFunctions.contains(BiometricFunction.EXTRACT));
- assertTrue(biometricFunctions.contains(BiometricFunction.QUALITY_CHECK));
- }
-
-
- @Test
- public void verifyTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- assertTrue(bioProviderImpl_V_0_8.verify(sample, record, BiometricType.FINGER, modalityParams));
- }
-
- @Test
- public void verifyIdentifyTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- Mapresult= bioProviderImpl_V_0_8.identify(sample, gallery, BiometricType.FINGER, modalityParams);
- assertTrue(result.get("check"));
- }
-
- @Test
- public void verifyFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- assertFalse(bioProviderImpl_V_0_8.verify(sample, record, BiometricType.FINGER, modalityParams));
- }
-
- @Test
- public void getSegmentQualityTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- float[] result= bioProviderImpl_V_0_8.getSegmentQuality(smp,modalityParams);
- assertThat(result[0],is(90.0F));
- }
-
- @Test
- public void getModalityQualityTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- Map result= bioProviderImpl_V_0_8.getModalityQuality(smp,modalityParams);
- assertThat(result.get(BiometricType.FINGER),is(90.0F));
- }
-
- @Test
- public void getSegmentQualityFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- float[] result= bioProviderImpl_V_0_8.getSegmentQuality(smp,modalityParams);
- assertThat(result[0],is(0F));
- }
-
- @Test
- public void getModalityQualityFalseTest() throws Exception {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- BIR[] smp = new BIR[sample.size()];
- smp=sample.toArray(smp);
- gallery.put("check", record);
- Map result= bioProviderImpl_V_0_8.getModalityQuality(smp,modalityParams);
- assertThat(result.get(BiometricType.FINGER),is(0F));
- }
-
- @Test
- public void extractTemplateTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceOne0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- List result= bioProviderImpl_V_0_8.extractTemplate(sample,modalityParams);
- assertThat(result.size(),is(sample.size()));
- }
-
- @Test
- public void extractTemplateFalseTest() throws CbeffException, BiometricException {
- iBioProviderApi bioProviderImpl_V_0_8 = new BioProviderImpl_V_0_8();
- Map> input = new HashMap<>();
- Map modalityParams = new HashMap<>();
- modalityParams.put(ProviderConstants.VERSION,"0.8");
- modalityParams.put(ProviderConstants.CLASSNAME,"io.mosip.kernel.biosdk.provider.test.dto.SDKInstanceTwo0_8");
- input.put(BiometricType.FINGER, modalityParams);
- bioProviderImpl_V_0_8.init(input);
- Map> gallery= new HashMap>();
- gallery.put("check", record);
- List