diff --git a/.env.sample b/.env.sample index e022ee24..02754bd6 100644 --- a/.env.sample +++ b/.env.sample @@ -1 +1,2 @@ ALGOLIA_API_KEY=213 +ALGOLIA_APP_ID=XYZ diff --git a/.github/workflows/deployment_new_aws_account.yml b/.github/workflows/deployment_new_aws_account.yml index 9b8fa4c3..1ca578ab 100644 --- a/.github/workflows/deployment_new_aws_account.yml +++ b/.github/workflows/deployment_new_aws_account.yml @@ -1,4 +1,4 @@ -name: Polygon ID Docs Deployment +name: Privado ID Docs Deployment on: push: @@ -8,6 +8,7 @@ on: env: AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} ECR_REPOSITORY: devs-ecr ECS_SERVICE: devs-ecs-service @@ -26,8 +27,6 @@ jobs: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 - env: - AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} with: aws-region: ${{ env.AWS_REGION }} role-to-assume: arn:aws:iam::${{ env.AWS_ACCOUNT_ID }}:role/PolygonIDActionsRole @@ -36,26 +35,31 @@ jobs: - name: Login to Amazon ECR id: login-ecr uses: aws-actions/amazon-ecr-login@v1 - with: - mask-password: 'true' - name: Install dependencies run: rm -rf node_modules && npm ci - name: Create .env file - run: echo 'ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }}' >> .env + run: | + echo -e "ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }}" >> .env + echo -e "ALGOLIA_APP_ID=${{ secrets.ALGOLIA_APP_ID }}" >> .env + cat .env - # Runs a single command using the runners shell - name: Build run: npm run build + - name: Set ECR registry + run: echo "ECR_REGISTRY=${{ steps.login-ecr.outputs.registry }}" >> $GITHUB_ENV + - name: Build, tag, and push image to Amazon ECR id: build-image env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REGISTRY: ${{ env.ECR_REGISTRY }} IMAGE_TAG: ${{ github.sha }} run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + echo "Using ECR_REGISTRY=$ECR_REGISTRY" + echo "Using IMAGE_TAG=$IMAGE_TAG" + docker build --cache-from $ECR_REGISTRY/$ECR_REPOSITORY:latest -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT diff --git a/.github/workflows/prod_deployment.yml b/.github/workflows/prod_deployment.yml deleted file mode 100644 index d8f79f91..00000000 --- a/.github/workflows/prod_deployment.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: Polygon ID Docs Prod Deployment - -on: - push: - branches: - - main - -env: - AWS_REGION: eu-west-1 - ECR_REPOSITORY: devs-ecr - ECS_SERVICE: devs-ecs-service - ECS_CLUSTER: frontend-prod-ecs-cluster - ECS_TASK_DEFINITION: prod-taskdef.json - CONTAINER_NAME: devs - -jobs: - deploy_prod: - name: Prod Deployment - permissions: - id-token: write - contents: write - environment: prod - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ env.AWS_REGION }} - role-to-assume: arn:aws:iam::399679353009:role/devs-GithubActionsRole - role-session-name: GithubActionsSession - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 - with: - mask-password: 'true' - - - name: Install dependencies - run: rm -rf node_modules && npm ci - - - name: Create .env file - run: echo 'ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }}' >> .env - - # Runs a single command using the runners shell - - name: Build - run: npm run build - - - name: Build, tag, and push image to Amazon ECR - id: build-image - env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: ${{ github.sha }} - run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT - - - name: Fill in the new image ID in the Amazon ECS task definition - id: task-def - uses: aws-actions/amazon-ecs-render-task-definition@v1 - with: - task-definition: ${{ env.ECS_TASK_DEFINITION }} - container-name: ${{ env.CONTAINER_NAME }} - image: ${{ steps.build-image.outputs.image }} - - - name: Deploy Amazon ECS task definition - uses: aws-actions/amazon-ecs-deploy-task-definition@v1 - with: - task-definition: ${{ steps.task-def.outputs.task-definition }} - service: ${{ env.ECS_SERVICE }} - cluster: ${{ env.ECS_CLUSTER }} - wait-for-service-stability: true - - - name: Cloudflare Cache Purge - uses: nathanvaughn/actions-cloudflare-purge@master - with: - cf_zone: ${{ secrets.CLOUDFLARE_ZONE }} - cf_auth: ${{ secrets.CLOUDFLARE_AUTH_KEY }} - hosts: devs.polygonid.com diff --git a/.github/workflows/staging_deployment.yml b/.github/workflows/staging_deployment.yml deleted file mode 100644 index 8f1462d1..00000000 --- a/.github/workflows/staging_deployment.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: Polygon ID Docs Staging Deployment - -on: - push: - branches: - - develop - -env: - AWS_REGION: eu-west-1 - ECR_REPOSITORY: devs-staging-ecr - ECS_SERVICE: devs-staging-ecs-service - ECS_CLUSTER: frontend-staging-ecs-cluster - ECS_TASK_DEFINITION: staging-taskdef.json - CONTAINER_NAME: devs-staging - -jobs: - deploy_staging: - name: Staging Deployment - permissions: - id-token: write - contents: write - environment: staging - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-region: ${{ env.AWS_REGION }} - role-to-assume: arn:aws:iam::021594655844:role/devs-staging-GithubActionsRole - role-session-name: GithubActionsSession - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 - with: - mask-password: 'true' - - - name: Install dependencies - run: rm -rf node_modules && npm ci - - - name: Create .env file - run: echo 'ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }}' >> .env - - # Runs a single command using the runners shell - - name: Build - run: npm run build - - - name: Build, tag, and push image to Amazon ECR - id: build-image - env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: ${{ github.sha }} - run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT - - - name: Fill in the new image ID in the Amazon ECS task definition - id: task-def - uses: aws-actions/amazon-ecs-render-task-definition@v1 - with: - task-definition: ${{ env.ECS_TASK_DEFINITION }} - container-name: ${{ env.CONTAINER_NAME }} - image: ${{ steps.build-image.outputs.image }} - - - name: Deploy Amazon ECS task definition - uses: aws-actions/amazon-ecs-deploy-task-definition@v1 - with: - task-definition: ${{ steps.task-def.outputs.task-definition }} - service: ${{ env.ECS_SERVICE }} - cluster: ${{ env.ECS_CLUSTER }} - wait-for-service-stability: true - - - name: Cloudflare Cache Purge - uses: nathanvaughn/actions-cloudflare-purge@master - with: - cf_zone: ${{ secrets.CLOUDFLARE_ZONE }} - cf_auth: ${{ secrets.CLOUDFLARE_AUTH_KEY }} - hosts: devs-staging.polygonid.com diff --git a/Dockerfile b/Dockerfile index 4fcf17eb..20e766c9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,4 +11,4 @@ COPY nginx.conf /etc/nginx/conf.d/default.conf EXPOSE 80 # Start NGINX -CMD ["nginx", "-g", "daemon off;"] +CMD ["nginx", "-g", "daemon off;"] \ No newline at end of file diff --git a/docs/quick-start-demo.md b/docs/quick-start-demo.md index 5b94d097..f43491b9 100644 --- a/docs/quick-start-demo.md +++ b/docs/quick-start-demo.md @@ -23,57 +23,41 @@ This guide will briefly touch on the 3 roles of the [Triangle of Trust](introduc These are the steps we will cover in this article: -1. [Set up a Polygon ID wallet](#set-up-a-polygon-id-wallet) +1. [Set up a Privado ID wallet](#set-up-a-privado-id-wallet) 2. [Issue a new credential to attest to the ID Holder's attendance to the event](#issue-a-new-credential-to-attest-to-the-id-holders-event-attendance) 3. [Fetch the newly created credential](#fetch-the-newly-created-credential) -4. [Verify the credential validity](#verify-the-id-holder-credential) +4. [Verify the ID holder credential](#verify-the-id-holder-credential) -## Set up a Polygon ID wallet - -As an Identity Holder, the individual who wants to have a credential to prove his age, for example, will need an application that can hold their credentials. In our case, we will be using the Polygon ID Wallet. - -:::note - -You can also use any Polygon ID compatible wallet. Please, check our [Ecosystem page](https://marketplace.polygonid.me/ecosystem) for other options. - -::: - -To get started with the Polygon ID Wallet, download the Polygon ID Wallet App and create an Identity: - -- For Android: Polygon ID on Google Play -- For iOS: Polygon ID on the App Store +## Set up a Privado ID wallet +To store verifiable credentials, the Identity Holder (the individual receiving the credential) needs a compatible wallet. Here, we’ll use the Privado ID Wallet, which supports both mobile and web-based access. :::note -Polygon ID wallet is an implementation of the Wallet SDK, as a way of showcasing its possibilities. Head to [the Wallet SDK documentation](./wallet/wallet-sdk/polygonid-sdk/polygonid-sdk-overview.md) to know more about how it works. +You can also use any Privado ID compatible wallet. Please, check our [Ecosystem page](https://marketplace.privado.id/ecosystem) for other options. ::: -The process from downloading to creating an identity on the Polygon ID Wallet is just as it is shown below. You need to download the app, create a wallet, set up a PIN number and the wallet is ready to be used. +To get started with the Privado ID Wallet, you can either visit the [Privado ID Web Wallet](https://wallet.privado.id/) in your browser or download the mobile app and create an Identity. -
- -
+- Web Browser: Privado ID Web Wallet +- For Android: Privado ID Wallet App on Google Play +- For iOS: Privado ID Wallet App on the App Store -:::caution - -This demo is using Polygon Amoy testnet. Go to the gear icon at the top right and ensure "Polygon Amoy network" is selected instead of "Polygon Main network". - -
- -
+:::note +- The Privado ID Web Wallet is a web based identity wallet. It is a reference implementation built using our [JS SDK](/docs/js-sdk/js-sdk-overview.md). Learn more about the Web Wallet [here](/docs/wallet/web-wallet.md). +- Privado ID Wallet App is an implementation of the [Wallet SDK](/docs/category/wallet-sdk), as a way of showcasing its possibilities. Learn more about the Wallet App [here](/docs/wallet/wallet-app/privadoid-app.md). ::: ## Issue a new credential to attest to the ID Holder's event attendance A trusted entity, for instance, a private institution will now play the role of an issuer. It will be responsible for creating the credential and sending it to the ID Holder. -We are using the Issuer Node UI testing environment to manage credentials. This is the place where the trusted entity can create credentials, manage schemas and generate connections. +We are using the Issuer Node UI Demo environment to manage credentials. This is the place where you as an issuer can can create and manage identities and credentials, generate connections and manage schemas. However, if you are using a new credential type, you actually need to create a schema for that credential, which basically is the set of JSON files that gather all the attributes of that specific credential. -To facilitate this issuance process, we have already created the credential schema whose URLs are the following: +To facilitate this issuance process, we have already created the credential schema with schema type POAP01 whose URLs are the following: - JSON schema URL `ipfs://QmTSwnuCB9grYMB2z5EKXDagfChurK5MiMCS6efrRbsyVX` @@ -88,15 +72,15 @@ To learn how to set up your own issuer environment by deploying an issuer node, :::info -The schema used in this demo was built using the Privado ID Schema Builder and is available on [the Privado ID Schema Explorer](https://schema-builder.polygonid.me/schemas/1fa99457-b2ae-4884-ae12-d658bd6abf69). Learn more about creating new schemas on [the Schema Builder UI guide](https://devs.polygonid.com/docs/issuer/schema-builder/). +The schema used in this demo was built using the Privado ID Schema Builder and is available on [the Privado ID Schema Explorer](https://tools.privado.id/schemas/1fa99457-b2ae-4884-ae12-d658bd6abf69). Learn more about creating new schemas on [the Schema Builder UI guide](/docs/issuer/schema-builder/). ::: -### Issue the credential +### Issue a new credential to attest to the ID Holder's attendance to the event With the new schema in hand, the issuer should now be able to generate a credential. -1. First, go to the the Issuer Node UI testing environment. +1. First, go to the the Issuer Node UI testing environment. :::warning @@ -118,88 +102,96 @@ With the new schema in hand, the issuer should now be able to generate a credent -4. After you click on **Create Credential Link**, you can also click on **View Link** on the next screen to check the generated QR code. +4. After you click on **Create Credential Link**, you'll be presented with a Universal Link and a Deep Link to the credential offer, along with a QR code configured for these links. The QR code can be scanned directly with the Privado ID Wallet app. -
+ :::info + + When the user interacts with the [Universal Link](/docs/wallet/universal-links.md) , it will launch the Privado ID Web Wallet in the browser or Privado ID Wallet app in case of a mobile phone displaying the credential offer to claim the credential. Deep link could be handled only by mobile wallet app though. + + ::: + +
-
+
## Fetch the newly created credential -Now we are back to the ID Holder role. They will use their mobile application to authenticate themselves by scanning the QR code generated by the issuer in the last step. +Now we are back to the ID Holder role. You can either use the Web Wallet on the browser or the Wallet App to accept the credential via the link or the QR code generated by the issuer in the last step. -
- -
+:::note +To sync the identity and its associated credentials between the Privado ID Web Wallet and the Privado ID Wallet App, the user must log in with the same crypto wallet account on both platforms. Once you claim a credential on one platform, it will be visible on the other. +::: -Connect to the issuer: +#### Web Wallet -
- -
+After clicking the Universal link, it will take you to the Web Wallet. Click **Sign In** and connect your crypto wallet such as Metamask. Click **Add to my wallet**. This should add the credential to your wallet. -This will instantly trigger a notification on the mobile which will look like this:
- +
-Accept the credential: +Click on **Manage your credentials** to view and manage the credential in the Web Wallet.
- +
-The ID Holder successfully retrieved the credential and it is visible on the app: +#### Mobile Wallet App + +Alternatively, Scan the QR code from the Wallet App. Click **Sign In**. This should authenticate and add the credential to the Wallet.
- +
+ ## Verify the ID holder credential -Here comes the third role in this tutorial: the verifier. This could be represented by an organization that needs to check the accuracy of someone's credentials. In our use case, this organization wants to verify whether the ID holder actually attended our made-up Paris event. +Here comes the third role in this tutorial: the verifier. This could be represented by an organization that needs to verify some details of someone's credentials. In our use case, this organization wants to verify whether the ID holder actually attended our made-up Paris event. Here are the steps to verify the credential: -1. Visit the [Query builder website](https://schema-builder.polygonid.me/query-builder/). The [Query Builder](/docs/verifier/query-builder/) is an awesome tool designed to simplify the creation of verification queries. +1. Visit the [Query builder website](https://tools.privado.id/query-builder/). The [Query Builder](/docs/verifier/query-builder/) is a tool designed to simplify the creation of verification queries.
-2. You now need to define the query. You will again make use of the JSON-LD URL which we have also provided: `ipfs://QmdH1Vu79p2NcZLFbHxzJnLuUHJiMZnBeT7SNpLaqK7k9X`. Here is how the query should look like: +2. You now need to define the query. You will now make use of the JSON-LD URL which we have also provided: `ipfs://QmdH1Vu79p2NcZLFbHxzJnLuUHJiMZnBeT7SNpLaqK7k9X`. Here is how the query should look like:
-
-3. Click **Create Query**. Now select the Network as Polygon Amoy (testnet) +3. Click **Create Query**. Now click **Test query** button which will take you the Privado ID Web Wallet
- +
-4. After clicking on **Test query**, you should scan the resulting QR code and follow the instructions on the mobile app. +4. Click **Sign in** and connect your crypto wallet. As you have already claimed the credential, it shows as 'claimed'. Click **Verify**, after which the process of generating the proof starts.
- +
-5. Click on Approve. After which, the process of generating the proof starts: - +5. Finally, the proof is generated and sent to the verifier. The verifier will then check the revocation status and any additional information to validate the proof. You will receive the following response on the Query Builder website:
- +
-6. And finally, the proof is generated. The verifier will check the revocation status and some additional information. The proof is then sent and validated by the verifier. You will receive the following response on the Query builder website: +Alternatively, you can also use Privado ID Wallet app to verify. +After Step 3, once you are redirected to Web Wallet, click on **Continue via app**, +this should present a QR code. Open the Privado ID Wallet App and scan the QR code. +Click on **Verify**
- +
-:::info -This was a quick demonstration of Privado ID's basic functionalities. However, Privado ID is far more complex than this. It offers a range of SSI-focused tools that allow for decentralized identity and verifiable credentials management. +

+ +This quick-start guide demonstrates Privado ID’s basic functionalities through a POAP use case, covering wallet setup, credential issuance, retrieval, and verification. While this is a simplified example, Privado ID provides a comprehensive suite of SSI tools for managing decentralized identity and verifiable credentials. + -::: diff --git a/docs/verifier/on-chain-verification/cross-chain.md b/docs/verifier/on-chain-verification/cross-chain.md new file mode 100644 index 00000000..f6e757d2 --- /dev/null +++ b/docs/verifier/on-chain-verification/cross-chain.md @@ -0,0 +1,212 @@ +--- +id: cross-chain +title: Cross Chain Verification +sidebar_label: Cross Chain Verification +description: Cross-chain verification tutorial. +keywords: + - docs + - privado id + - ID holder + - cross-chain + - verifier + - on-chain +--- + +## ZK Airdrop: A Secure Cross-Chain Token Distribution Tutorial + +In decentralized finance and other blockchain-based applications, airdrops are a popular method to distribute tokens. However, ensuring that only eligible users participate in the airdrop is crucial for maintaining the integrity and security of the process. Cross-chain verification using zero-knowledge (ZK) proofs offers a scalable and secure solution. This tutorial will guide you through implementing a secure airdrop using cross-chain verification, ensuring only users who meet specific criteria can claim tokens. + +### Challenges with Traditional On-Chain Verification: + +- **Limited Scope**: Traditional on-chain verification methods typically require that all involved components such as user credentials, verification logic, and identity data exist on the same blockchain. This limits interoperability and restricts user participation in a single blockchain ecosystem. +- **Fragmented Identity Management**: With the rise of multiple blockchain networks (e.g., Ethereum, Polygon, Binance Smart Chain), users often have identities and credentials spread across different chains. Managing and verifying these identities on separate chains becomes complex and inefficient. +- **Scalability and Flexibility**: As decentralized applications (dApps) scale and operate across multiple chains, they require a verification mechanism that can handle credentials and verification requests from various sources without compromising security or user privacy. + +### Benefits of Cross-Chain Verification: + +- **Interoperability**: Cross-chain verification allows users to prove their credentials across different blockchains seamlessly. This means a user with credentials on one chain can participate in activities (like airdrops) on another chain without the need to transfer or duplicate their identity data. +- **Scalability**: By enabling verification across multiple chains, applications can scale more efficiently, accommodating users and issuers from diverse blockchain environments. This opens up broader participation and adoption of decentralized services. +- **Streamlined User Experience**: Users can participate in various blockchain-based activities without having to manage multiple identities or credentials. Cross-chain verification provides a unified approach, simplifying the interaction with decentralized services. + +## Step-by-Step Implementation + +Imagine a scenario where a new "XYZ Protocol" is launching and wants to distribute tokens to users via an airdrop. However, to ensure the quality and compliance of the distribution, XYZ Protocol wants to restrict participation to users who have Proof of Humanity (PoH) Credential to restrict bots from participating in the airdrop. This airdrop will be conducted using cross-chain verification, allowing users from different blockchain networks to participate securely. + +### Key Requirements for the Airdrop: + +- **PoH Credential Verification**: Users must possess Proof Of Human(PoH) credentials. +- **Cross-Chain Participation**: Users can prove their eligibility from different chains, such as Ethereum, Polygon, etc. +- **Privacy Preservation**: Use zero-knowledge proofs to maintain user privacy while verifying credentials. + +### Components Involved: + +- **User Chain**: The chain where users hold their identity and credentials. +- **Issuer Chain**: The chain where issuers manage and verify credential validity. +- **Verification Chain**: The chain where the airdrop verification occurs. +- **Universal Verifier**: A smart contract on verification chain that handles the verification of ZK proofs and the issuance of airdrop tokens. + +### General Flow of Cross-Chain Verification: + +To implement on-chain verification user should collect User and Issuer state data from corresponding chains and submit it to the Universal Verifier smart contract on the verification chain. However, the verifier contract can't check the integrity of the states from other chains. So we need a trusted service to sign the States, which is centralised Universal Resolver at the moment but will be substituted by decentralised signer services in the future. + +![Crosschain Verification Flow](../../../static/img/onchain-verifier/crosschain.png) + +The general flow of cross-chain verification involves the following steps: + +1. **Obtain PoH Verifiable Credential (VC)**: The user obtains a Verifiable Credential proving their Proof of Humanity. +2. **Get Signed User and Issuer State**: The user gets a signed User State (Global State / GIST Root) and Issuer State from a trusted Resolver. +3. **Generate ZK Proof**: The user generates a Zero-Knowledge proof based on their VC and signed User State. +4. **Submit ZK Proof**: The user submits the ZK proof and signed data to the Universal Verifier in the verification chain. The new `submitZKPResponseV2` method is used to submit the proof along with the cross-chain data. +5. **Trigger User Action**: When the user action triggers (e.g., minting tokens), the Logic Smart Contract (here, Airdrop Smart Contract) checks the Universal Verifier Smart Contract for the proof status and issues tokens if the criteria are met. + +### Step 1: Setting Up the Query Request + +The first step in using XYZ Protocol is to set up an airdrop request that specifies the verification criteria. This criteria can be customized to fit your needs, but for this example, we'll demonstrate how to check if a user has a Proof Of Humanity (PoH) credential and verify their isHuman value. + +**NOTE:** Schema URLs used for this tutorial: +- `"jsonLdContext": "https://ilvcs.github.io/JsonHosting/poh-context.json"` +- `"jsonSchema": "https://ilvcs.github.io/JsonHosting/poh-json-schema.json"` + +For setting up the ZKP request, please visit Set ZKP Request section: +[Set ZKP Request](/docs/verifier/on-chain-verification/set-zkp-request.md) +:::note +This request can be reused for all users for a specific query. +::: + +### Step 2: Creating Secure Airdrop System + +#### A. Implementing Airdrop Smart Contract: + +Now we need to create an Airdrop smart contract that can check if the user has already presented proofs to the Universal Verifier (and has been verified). If so, mint tokens for the user. + +Create a Hardhat project and ensure you add the Amoy testnet JSON RPC URL and a private key with Amoy tokens. Update the `hardhat.config.js` file accordingly. Then, create the `ZKAirdropVerifier.sol` smart contract in the `contracts` folder using the code provided below. + +```solidity +// SPDX-License-Identifier: MIT +pragma solidity 0.8.20; + +import {ERC20} from '@openzeppelin/contracts/token/ERC20/ERC20.sol'; +import {PrimitiveTypeUtils} from '@iden3/contracts/lib/PrimitiveTypeUtils.sol'; +import {ICircuitValidator} from '@iden3/contracts/interfaces/ICircuitValidator.sol'; +import {UniversalVerifier} from '@iden3/contracts/verifiers/UniversalVerifier.sol'; + +contract ZKAirdropVerifier is ERC20 { + uint64 public constant REQUEST_ID = 12345; // replace with your own requestID + + UniversalVerifier public verifier; + + uint256 public TOKEN_AMOUNT_FOR_AIRDROP_PER_ID = 5 * 10 ** uint256(decimals()); + + mapping(address => bool) public isClaimed; + + modifier beforeTokenTransfer(address to) { + // only one airdrop per address is allowed + require( + !isClaimed[to], + 'only one airdrop per address is allowed' + ); + + require( + verifier.getProofStatus(to, REQUEST_ID).isVerified , + 'only identities who provided sig or mtp proof for transfer requests are allowed to receive tokens' + ); + _; + } + + constructor( + UniversalVerifier verifier_, + string memory name_, + string memory symbol_ + ) ERC20(name_, symbol_) { + verifier = verifier_; + } + + function mint() public { + + require(msg.sender == tx.origin, 'only EOA can mint'); + require(msg.sender != address(0), 'invalid address'); + _mint(msg.sender, TOKEN_AMOUNT_FOR_AIRDROP_PER_ID); + // mark the address as claimed + isClaimed[msg.sender] = true; + } + + function _update( + address from, + address to, + uint256 value + ) internal override beforeTokenTransfer(to) { + super._update(from, to, value); + } +} +``` + +You can see that the smart contract's construction requires the address of the deployed Universal Verifier smart contract. + +Make sure to add the request ID used to set the `ZKPRequest` for the `uint64 public constant REQUEST_ID`. + +The `beforeTokenTransfer` modifier checks the Universal Verifier smart contract to see if the user has been verified and if they have already claimed the airdrop. + +We override the `_update` function to use the `beforeTokenTransfer` modifier, ensuring that every time a user calls the `mint` function, it will trigger this check. The contract verifies that the user has submitted the necessary proofs and is verified before allowing token minting. Once the tokens are minted, the `mint` function sets the `isClaimed` status to `true` to prevent users from reclaiming the airdrop. + +In summary, when a user calls the `mint` function, the smart contract checks if the user has been verified (submitted proofs and got verified by Universal Verifier). If verified, it mints 5 tokens for the user. + +**Note:** You can observe that you don't need to implement the Universal Verifier smart contract yourself. Instead, you only need the address of the deployed Universal Verifier smart contract to use in your custom smart contract implementation. + +#### B. Deploy the Smart Contract: + +```javascript +const { ethers } = require("hardhat"); + +async function main() { + const universalVerifierAddress = "0xfcc86A79fCb057A8e55C6B853dff9479C3cf607c"; + const verifierName = "ZKAirdropVerifier"; + const verifierSymbol = "zkERC20"; + + const verifier = await ethers.deployContract(verifierName, [ + universalVerifierAddress, + verifierName, + verifierSymbol, + ]); + await verifier.waitForDeployment(); + console.log(verifierName, " contract address:", await verifier.getAddress()); +} + +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error(error); + process.exit(1); + }); +``` +deploy the smart contracts using the following command: + +`npx hardhat run scripts/deploy.js --network amoy` + + Ensure that the smart contract address is saved to interact with the DApp. +### Step 3: User Claiming The Airdrop + +#### A. Obtaining and Signing the Verifiable Credential + +Users need to obtain a `POH Credential`, which proves their Proof of Humanity (PoH). This credential can be issued by a trusted identity provider on the Issuer Chain. + +On the Privado ID issuer node, credentials will be issued using the Privado chain, which acts like a base layer for issuing identities. + +**Note:** Follow this tutorial to learn how to get a Proof of Humanity (PoH) credential from the Privado ID demo issuer. + +#### B. User Submitting Proof to Verification Chain + +The user submits the ZK proof and signed data to the Universal Verifier smart contract on the verification chain. For this example, we are using Polygon Amoy to submit proof. + +The Universal Verifier smart contract verifies the ZK proof and checks the signed GIST Root and issuer state data. + +Once the user submits the proof and gets verified by the Universal Verifier, they can claim tokens by calling the `mint` function in the `ZKAirdropVerifier` smart contract. + + +#### C. Claiming Airdrop + +Once the user submits the proof and gets verified by the Universal Verifier, they can claim tokens by calling the `mint` function in the `ZKAirdropVerifier` smart contract. + +:::note + Use the same Ethereum wallet account that was used to submit proofs for claiming tokens. Ideally, protocol developers should create a frontend that connects with the `ZKAirdropVerifier` smart contract. +::: + diff --git a/docs/verifier/on-chain-verification/overview.md b/docs/verifier/on-chain-verification/overview.md index bdaa7b64..9ecb68d9 100644 --- a/docs/verifier/on-chain-verification/overview.md +++ b/docs/verifier/on-chain-verification/overview.md @@ -1,8 +1,8 @@ --- id: overview title: On-chain verification -sidebar_label: On-chain verification -description: On-chain verification tutorials. +sidebar_label: Overview +description: On-chain verification overview. keywords: - docs - privado id @@ -46,7 +46,7 @@ On the other hand, `UniversalVerifier` is deployed as a standalone contract and 1. The Request is generated at verifier backend and delivered to a user within a QR code (or via deep-linking, depending on the implementation). 1. The user scans the QR code using his/her mobile ID wallet and parses the request. 1. A ZK proof is generated on mobile or web wallet according to the request of the website and based on the credentials held in his/her wallet. -1. The user sends the ZK proof to the Verifier Smart Contract via `submitZKPResponse` method. +1. The user sends the ZK proof to the Verifier Smart Contract via `submitZKPResponse` or `submitZKPResponseV2` method. 1. The Verifier Smart Contract verifies the ZK Proof. 1. The Verifier Smart Contract checks that the State of the Issuer of the credential and the State of the user are still valid and have not been revoked. 1. If the verification is successful, the proof status is recorded on-chain. @@ -56,7 +56,7 @@ Note that the Verifier only sets the Request at step 1. All the rest of the inte ### Universal ZKP Verifier -The beginning of the flow up to submitting Proof Response is similar to that of Embedded ZKP Verifier. The difference is that you should call `setZKPRequest` and `submitZKPResponse` in `UniversalVerifier` but not in client custom contract. +The beginning of the flow up to submitting Proof Response is similar to that of Embedded ZKP Verifier. The difference is that you should call `setZKPRequest` and `submitZKPResponse` or `submitZKPResponseV2` in `UniversalVerifier` but not in client custom contract. Once proof response is submitted, any client custom logic should be executed via a separate transaction invoked on client contract directly. The custom logic may refer to `UniversalVerifier` contract to check for user verification. @@ -64,654 +64,3 @@ Once proof response is submitted, any client custom logic should be executed via -## Implement an ERC20 ZK Airdrop - -In this tutorial, we will create an ERC20 ZK Airdrop Contract. The chosen query criterium is to be born before `01/01/2002`. Users that can prove that they were born before that date will be able to get the airdrop. Otherwise, they will not. The proof submitted to the Smart Contract will not reveal any information about the specific date of birth of the user as we are using zero knowledge. - -:::note - -To set up a different query check out the [ZK Query Language section](/docs/verifier/verification-library/zk-query-language.md). - -::: - -This tutorial is based on the verification of a Credential of Type `KYCAgeCredential` with an attribute `birthday` with a Schema URL `https://raw.githubusercontent.com/iden3/claim-schema-vocab/main/schemas/json-ld/kyc-v3.json-ld`. - -The prerequisite is that users have the [Polygon ID Wallet app](/docs/wallet/wallet-overview.md) installed and self-issued a Credential of type `KYC Age Credential Merklized` using our [Demo Issuer](https://issuer-demo.polygonid.me/) - -:::note - -Some executable code related to this tutorial is in this repository. - -::: - - -## Design the ERC20 zk Airdrop Contract with ZK-proof verification - -### Embedded ZKPVerifier Smart Contract -This is an abstract smart contract, which implements the logic of verifying ZK Proofs and saving the verification result. It is designed to be inherited by another smart contract with own business logic, which may consume proof verification functionality. - -The contract is designed to work with different ZK Validator contracts and different proof requests, both or which are set by the contract owner. - -### Universal Verifier Smart Contract -This smart contract implements the same functionality as `EmbeddedZKPVerifier` Smart Contract, however it is not an abstract but a standalone contract. - -The `UniversalVerifier` is designed to be used by multiple external contracts. Not only a `UniversalVerifier` owner but actually any address can set a `ZKPRequest` in `UniversalVerifier`. The only restriction for the proof request at the moment is that it should use a ZK Validator, which is whitelisted. The whitelisting is managed by the contract owner. - -### Let us jump into the code by writing the ERC20Verifier contract in each of the two ways. - -#### Inheriting EmbeddedZKPVerifier abstract smart contract - -We'll create a `ERC20Verifier`, which is an ERC20 standard contract. The extra functionality is given by the zero-knowledge proof verification. All the functions dedicated to the ZK verification are contained inside the `EmbeddedZKPVerifier` Contract and inherited within the `ERC20Verifier`. For example, users will submit their proof to claim the airdrop by calling `submitZKPResponse`. - -The `ERC20Verifier` contract must define at least a single `TRANSFER_REQUEST_ID`. This is the Identifier of the request that the contract is making to the user. - -The `EmbeddedZKPVerifier` Contract provides 2 hooks: `_beforeProofSubmit` and `_afterProofSubmit`. - -These hooks are called before and after any proof gets submitted and can be used to create personalized logic inside your Smart Contract. In this specific case, it must be checked that the sender of the proof matches the address contained in the proof challenge. This requirement is necessary to prevent proof front-running. This condition is added inside `_beforeProofSubmit`. - -In this specific example, the airdrop token minting is inside `_afterProofSubmit`, which is executed if the proof is correctly verified. Of course, the airdrop logic can be personalized according to the needs of the project. As another option, you may mint tokens to a user via a separate `mint` function call if the user address was verified before. - -Finally, we will add another element of security inside the Smart Contract: prevent any type of token transfer unless there is a proof verification from a destination address. This last condition is added by overriding the ERC20 `_update` function and checking that the receiver address `to` of the transfer is included inside the `proofs` mapping. - -```solidity -// SPDX-License-Identifier: MIT -pragma solidity 0.8.20; - -import {ERC20Upgradeable} from '@openzeppelin/contracts-upgradeable/token/ERC20/ERC20Upgradeable.sol'; -import {PrimitiveTypeUtils} from '@iden3/contracts/lib/PrimitiveTypeUtils.sol'; -import {ICircuitValidator} from '@iden3/contracts/interfaces/ICircuitValidator.sol'; -import {EmbeddedZKPVerifier} from '@iden3/contracts/verifiers/EmbeddedZKPVerifier.sol'; - -contract ERC20Verifier is ERC20Upgradeable, EmbeddedZKPVerifier { - uint64 public constant TRANSFER_REQUEST_ID_SIG_VALIDATOR = 1; - uint64 public constant TRANSFER_REQUEST_ID_MTP_VALIDATOR = 2; - - /// @custom:storage-location erc7201:polygonid.storage.ERC20Verifier - struct ERC20VerifierStorage { - mapping(uint256 => address) idToAddress; - mapping(address => uint256) addressToId; - uint256 TOKEN_AMOUNT_FOR_AIRDROP_PER_ID; - } - - // keccak256(abi.encode(uint256(keccak256("polygonid.storage.ERC20Verifier")) - 1)) & ~bytes32(uint256(0xff)) - bytes32 private constant ERC20VerifierStorageLocation = - 0x3b1c3bd751d9cd42a3739426a271cdc235017946663d56eeaf827d70f8b77000; - - function _getERC20VerifierStorage() private pure returns (ERC20VerifierStorage storage $) { - assembly { - $.slot := ERC20VerifierStorageLocation - } - } - - modifier beforeTransfer(address to) { - require( - isProofVerified(to, TRANSFER_REQUEST_ID_SIG_VALIDATOR) || - isProofVerified(to, TRANSFER_REQUEST_ID_MTP_VALIDATOR), - 'only identities who provided sig or mtp proof for transfer requests are allowed to receive tokens' - ); - _; - } - - function initialize(string memory name, string memory symbol) public initializer { - ERC20VerifierStorage storage $ = _getERC20VerifierStorage(); - super.__ERC20_init(name, symbol); - super.__EmbeddedZKPVerifier_init(_msgSender()); - $.TOKEN_AMOUNT_FOR_AIRDROP_PER_ID = 5 * 10 ** uint256(decimals()); - } - - function _beforeProofSubmit( - uint64 /* requestId */, - uint256[] memory inputs, - ICircuitValidator validator - ) internal view override { - // check that challenge input is address of sender - address addr = PrimitiveTypeUtils.uint256LEToAddress( - inputs[validator.inputIndexOf('challenge')] - ); - // this is linking between msg.sender and - require(_msgSender() == addr, 'address in proof is not a sender address'); - } - - function _afterProofSubmit( - uint64 requestId, - uint256[] memory inputs, - ICircuitValidator validator - ) internal override { - ERC20VerifierStorage storage $ = _getERC20VerifierStorage(); - if ( - requestId == TRANSFER_REQUEST_ID_SIG_VALIDATOR || - requestId == TRANSFER_REQUEST_ID_MTP_VALIDATOR - ) { - // if proof is given for transfer request id ( mtp or sig ) and it's a first time we mint tokens to sender - uint256 id = inputs[1]; - if ($.idToAddress[id] == address(0) && $.addressToId[_msgSender()] == 0) { - super._mint(_msgSender(), $.TOKEN_AMOUNT_FOR_AIRDROP_PER_ID); - $.addressToId[_msgSender()] = id; - $.idToAddress[id] = _msgSender(); - } - } - } - - function _update( - address from /* from */, - address to, - uint256 amount /* amount */ - ) internal override beforeTransfer(to) { - super._update(from, to, amount); - } - - function getIdByAddress(address addr) public view returns (uint256) { - return _getERC20VerifierStorage().addressToId[addr]; - } - - function getAddressById(uint256 id) public view returns (address) { - return _getERC20VerifierStorage().idToAddress[id]; - } - - function getTokenAmountForAirdropPerId() public view returns (uint256) { - return _getERC20VerifierStorage().TOKEN_AMOUNT_FOR_AIRDROP_PER_ID; - } -} -``` - -#### Using Universal Verifier Smart Contract - -Unlike, the previous example, the `ERC20LinkedUniversalVerifier` contract does not inherit the `EmbeddedZKPVerifier` contract. Instead, it uses the `UniversalVerifier` contract to check the proof result. - -Unlike `ERC20Verifier` the `ERC20LinkedUniversalVerifier` does not need to implement the `_beforeProofSubmit` and `_afterProofSubmit` hooks as proof verification is assumed to be done directly to the `UniversalVerifier` contract by some other transaction. - -In the same way the `ERC20LinkedUniversalVerifier` contract must define at least one `TRANSFER_REQUEST_ID` to get proof statuses for this request id from the `UniversalVerifier`. - -In this example, you may mint tokens to a user via the `mint` function call. - -Any token transfers are prevented inside `beforeTokenTransfer` modifier (which is invoked via `mint -> _mint -> update` call chain) unless there is already verification proof in UniversalVerifier, which corresponds to `msg.sender` address. - -```solidity -// SPDX-License-Identifier: MIT -pragma solidity 0.8.20; - -import {ERC20} from '@openzeppelin/contracts/token/ERC20/ERC20.sol'; -import {PrimitiveTypeUtils} from '@iden3/contracts/lib/PrimitiveTypeUtils.sol'; -import {ICircuitValidator} from '@iden3/contracts/interfaces/ICircuitValidator.sol'; -import {EmbeddedZKPVerifier} from '@iden3/contracts/verifiers/EmbeddedZKPVerifier.sol'; -import {UniversalVerifier} from '@iden3/contracts/verifiers/UniversalVerifier.sol'; - -contract ERC20LinkedUniversalVerifier is ERC20 { - uint64 public constant TRANSFER_REQUEST_ID_SIG_VALIDATOR = 0; - uint64 public constant TRANSFER_REQUEST_ID_MTP_VALIDATOR = 1; - - UniversalVerifier public verifier; - - uint256 public TOKEN_AMOUNT_FOR_AIRDROP_PER_ID = 5 * 10 ** uint256(decimals()); - - modifier beforeTokenTransfer(address to) { - require( - verifier.getProofStatus(to, TRANSFER_REQUEST_ID_SIG_VALIDATOR).isVerified || - verifier.getProofStatus(to, TRANSFER_REQUEST_ID_MTP_VALIDATOR).isVerified, - 'only identities who provided sig or mtp proof for transfer requests are allowed to receive tokens' - ); - _; - } - - constructor( - UniversalVerifier verifier_, - string memory name_, - string memory symbol_ - ) ERC20(name_, symbol_) { - verifier = verifier_; - } - - function mint(address to) public { - _mint(to, TOKEN_AMOUNT_FOR_AIRDROP_PER_ID); - } - - function _update( - address from, - address to, - uint256 value - ) internal override beforeTokenTransfer(to) { - super._update(from, to, value); - } -} -``` - -### Deploy the Contract - -:::note "Hardhat" - -For this tutorial, we are using the Hardhat development environment to facilitate the contract deployment. You can learn how to get started with this tool by checking [their documentation](https://hardhat.org/hardhat-runner/docs/getting-started). - -::: - -#### Deploy your custom contract inherited from EmbeddedZKPVerifier - -Execute this Hardhat script to deploy either `ERC20Verifier`. Change the `verifierContract` variable to the desired contract name. - - -```js -import { ethers } from "hardhat"; -import { upgrades } from "hardhat"; - - -async function main() { - const verifierContract = "ERC20Verifier"; - const verifierName = "ERC20zkAirdrop"; - const verifierSymbol = "zkERC20"; - - const ERC20Verifier = await ethers.getContractFactory(verifierContract); - const erc20Verifier = await upgrades.deployProxy( - ERC20Verifier, - [verifierName, verifierSymbol] - ); - - await erc20Verifier.waitForDeployment(); - console.log(verifierName, " contract address:", await erc20Verifier.getAddress()); -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); -``` - -#### Deploy your custom contract linked to Universal Verifier smart contract - -```js -import { ethers } from 'hardhat'; - -async function main() { - const universalVerifierAddress = ''; - const verifierName = 'ERC20LinkedUniversalVerifier'; - const verifierSymbol = 'zkERC20'; - - const verifier = await ethers.deployContract( - verifierName, - [ universalVerifierAddress, verifierName, verifierSymbol ] - ); - await verifier.waitForDeployment(); - console.log(verifierName, ' contract address:', await verifier.getAddress()); -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); -``` - -:::note - -The contract ERC20Verifier preferably to be deployed on the Amoi test network as there is a set of supporting validator contracts. - -::: - -### Set the ZKP Request -Note: the following works the same way for both `ERC20Verifier` and `ERC20LinkedUniversalVerifier` contracts with some differences mentioned below. - -The actual ZKP request "to be born before 01/01/2002" hasn't been added to the Smart Contract yet. To do so it is necessary to call either `setZKPRequest` function inherited inside the ERC20Verifier or `setZKPRequest` function of the Universal Verifier contract. - -The request takes the following parameters: - -1. `requestId`: the ID associated with the request. -2. `request`: A struct with the following fields: - 1. `metadata`: contract invoke request. - 2. `validator` the address of the Validators Smart Contract already deployed. This is the contract that executes the verification on the ZK proof submitted by the user. It can be of type [CredentialAtomicQuerySigValidator](/docs/smart-contracts.md#credentialatomicquerysigvalidator) or [CredentialAtomicQueryMTPValidator](/docs/smart-contracts.md#credentialatomicquerymtpvalidator). - 3. `data` encoded bytes of CredentialAtomicQuery struct. - -CredentialAtomicQuery struct contains 10 fields: - -1. `schema` namely the bigInt representation of the schema of the requested credential. This can be obtained by passing your schema to this [Go Sandbox](https://go.dev/play/p/oB_oOW7kBEw). In order to use the sandbox, the constants `jsonLDContext`, `typ`, `fieldName` and `schemaJSONLD` need to be modified according to your request. -2. `claimPathKey` represents the path to the queries key inside the merklized credential. In this case, it is the path to the `birthday` key. This can be obtained by passing your schema to this [Go Sandbox](https://go.dev/play/p/oB_oOW7kBEw). In order to use the sandbox, the constants `jsonLDContext`, `typ`, `fieldName` and `schemaJSONLD` need to be modified according to your request. -3. `operator` is either 1,2,3,4,5,6. To understand more about the operator you can check the [zk query language](/docs/verifier/verification-library/zk-query-language.md). -4. `slotIndex` represents specific position for data in claim. -5. `value` represents the threshold value you are querying. In this case, it is the date 01/01/2002. -6. `queryHash` is the poseidon hash of `schemaHash`, `slotIndex`, `operator`, `claimPathKey`, `claimPathNotExists`, `valueHash`. Used for gas consumption optimization. -7. `allowedIssuers` represents the allowed issuers of the credential. -8. `circuitIds` is an array of circuit IDs (['credentialAtomicQuerySigV2OnChain'] or ['credentialAtomicQueryMTPV2OnChain']). -9. `skipClaimRevocationCheck` checks whether the credential revocation will be checked during the proof generation. -10. `claimPathNotExists`: 0 or 1; 0 for inclusion in merklized credentials, 1 for non-inclusion and for non-merklized credentials. - -To encode these fields to structure, use this function: - -```js -const { Web3 } = require("web3"); - -function packValidatorParams(query, allowedIssuers = []) { - let web3 = new Web3(Web3.givenProvider || "ws://localhost:8545"); - return web3.eth.abi.encodeParameter( - { - CredentialAtomicQuery: { - schema: "uint256", - claimPathKey: "uint256", - operator: "uint256", - slotIndex: "uint256", - value: "uint256[]", - queryHash: "uint256", - allowedIssuers: "uint256[]", - circuitIds: "string[]", - skipClaimRevocationCheck: "bool", - claimPathNotExists: "uint256", - }, - }, - { - schema: query.schema, - claimPathKey: query.claimPathKey, - operator: query.operator, - slotIndex: query.slotIndex, - value: query.value, - queryHash: query.queryHash, - allowedIssuers: allowedIssuers, - circuitIds: query.circuitIds, - skipClaimRevocationCheck: query.skipClaimRevocationCheck, - claimPathNotExists: query.claimPathNotExists, - } - ); -} -``` - -Calculate query hash: - -```js -const { poseidon } = require("@iden3/js-crypto"); -const { SchemaHash } = require("@iden3/js-iden3-core"); -const { prepareCircuitArrayValues } = require("@0xpolygonid/js-sdk"); - -function calculateQueryHash(values, schema, slotIndex, operator, claimPathKey, claimPathNotExists) { - const expValue = prepareCircuitArrayValues(values, 64); - const valueHash = poseidon.spongeHashX(expValue, 6); - const schemaHash = coreSchemaFromStr(schema); - const quaryHash = poseidon.hash([ - schemaHash.bigInt(), - BigInt(slotIndex), - BigInt(operator), - BigInt(claimPathKey), - BigInt(claimPathNotExists), - valueHash, - ]); - return quaryHash; -} - -function coreSchemaFromStr(schemaIntString) { - const schemaInt = BigInt(schemaIntString); - return SchemaHash.newSchemaHashFromInt(schemaInt); -} -``` - -:::info - -Check out our [Smart Contract section](/docs/smart-contracts.md) to learn more about the set of verifications executed on the zk proof. - -::: - -Execute this Hardhat script to set the ZK request to the Smart Contract: - -```js -import hre from "hardhat"; -import Web3 from "web3"; -import { poseidon } from "@iden3/js-crypto"; -import { SchemaHash } from "@iden3/js-iden3-core"; -import { prepareCircuitArrayValues } from "@0xpolygonid/js-sdk"; - -// Put your values here -const ERC20_VERIFIER_ADDRESS = "0x610178dA211FEF7D417bC0e6FeD39F05609AD788"; -const VALIDATOR_ADDRESS = "0x0165878A594ca255338adfa4d48449f69242Eb8F"; -const UNIVERSAL_VERIFIER_ADDRESS = "0x59b670e9fA9D0A427751Af201D676719a970857b"; - -const Operators = { - NOOP: 0, // No operation, skip query verification in circuit - EQ: 1, // equal - LT: 2, // less than - GT: 3, // greater than - IN: 4, // in - NIN: 5, // not in - NE: 6, // not equal -}; - -function packValidatorParams(query, allowedIssuers = []) { - let web3 = new Web3(Web3.givenProvider || "ws://localhost:8545"); - return web3.eth.abi.encodeParameter( - { - CredentialAtomicQuery: { - schema: "uint256", - claimPathKey: "uint256", - operator: "uint256", - slotIndex: "uint256", - value: "uint256[]", - queryHash: "uint256", - allowedIssuers: "uint256[]", - circuitIds: "string[]", - skipClaimRevocationCheck: "bool", - claimPathNotExists: "uint256", - }, - }, - { - schema: query.schema, - claimPathKey: query.claimPathKey, - operator: query.operator, - slotIndex: query.slotIndex, - value: query.value, - queryHash: query.queryHash, - allowedIssuers: allowedIssuers, - circuitIds: query.circuitIds, - skipClaimRevocationCheck: query.skipClaimRevocationCheck, - claimPathNotExists: query.claimPathNotExists, - } - ); -} - -function coreSchemaFromStr(schemaIntString) { - const schemaInt = BigInt(schemaIntString); - return SchemaHash.newSchemaHashFromInt(schemaInt); -} - -function calculateQueryHashV2(values, schema, slotIndex, operator, claimPathKey, claimPathNotExists) { - const expValue = prepareCircuitArrayValues(values, 64); - const valueHash = poseidon.spongeHashX(expValue, 6); - const schemaHash = coreSchemaFromStr(schema); - const quaryHash = poseidon.hash([ - schemaHash.bigInt(), - BigInt(slotIndex), - BigInt(operator), - BigInt(claimPathKey), - BigInt(claimPathNotExists), - valueHash, - ]); - return quaryHash; -} - -async function main() { - // you can run https://go.dev/play/p/oB_oOW7kBEw to get schema hash and claimPathKey using YOUR schema - const schemaBigInt = "74977327600848231385663280181476307657"; - - const type = "KYCAgeCredential"; - const schemaUrl = - "https://raw.githubusercontent.com/iden3/claim-schema-vocab/main/schemas/json-ld/kyc-v3.json-ld"; - // merklized path to field in the W3C credential according to JSONLD schema e.g. birthday in the KYCAgeCredential under the url "https://raw.githubusercontent.com/iden3/claim-schema-vocab/main/schemas/json-ld/kyc-v3.json-ld" - const schemaClaimPathKey = - "20376033832371109177683048456014525905119173674985843915445634726167450989630"; - - const requestId = 1; - - const query: any = { - requestId, - schema: schemaBigInt, - claimPathKey: schemaClaimPathKey, - operator: Operators.LT, - slotIndex: 0, - value: [20020101, ...new Array(63).fill(0)], // for operators 1-3 only first value matters - circuitIds: ["credentialAtomicQuerySigV2OnChain"], - skipClaimRevocationCheck: false, - claimPathNotExists: 0, - }; - - query.queryHash = calculateQueryHashV2( - query.value, - query.schema, - query.slotIndex, - query.operator, - query.claimPathKey, - query.claimPathNotExists - ).toString(); - - let erc20Verifier = await hre.ethers.getContractAt("ERC20Verifier", ERC20_VERIFIER_ADDRESS); - - const invokeRequestMetadata = { - id: "7f38a193-0918-4a48-9fac-36adfdb8b542", - typ: "application/iden3comm-plain-json", - type: "https://iden3-communication.io/proofs/1.0/contract-invoke-request", - thid: "7f38a193-0918-4a48-9fac-36adfdb8b542", - body: { - reason: "airdrop participation", - transaction_data: { - contract_address: ERC20_VERIFIER_ADDRESS, - method_id: "b68967e2", - chain_id: 80002, - network: "polygon-amoy", - }, - scope: [ - { - id: query.requestId, - circuitId: query.circuitIds[0], - query: { - allowedIssuers: ["*"], - context: schemaUrl, - credentialSubject: { - birthday: { - $lt: query.value[0], - }, - }, - type, - }, - }, - ], - }, - }; - - try { - - // ############ Use this code to set request in ERC20Verifier ############ - - await erc20Verifier.setZKPRequest(requestId, { - metadata: JSON.stringify(invokeRequestMetadata), - validator: VALIDATOR_ADDRESS, - data: packValidatorParams(query), - }); - - // ############### Use this code to set request in Universal Verifier ############ - - // const universalVerifier = await hre.ethers.getContractAt('UniversalVerifier', UNIVERSAL_VERIFIER_ADDRESS); - // - // await universalVerifier.addValidatorToWhitelist(VALIDATOR_ADDRESS); - // - //// You can call this method on behalf of any signer which is supposed to be request controller - // await universalVerifier.setZKPRequest(requestId, { - // metadata: JSON.stringify(invokeRequestMetadata), - // validator: VALIDATOR_ADDRESS, - // data: packValidatorParams(query), - // }); - - console.log("Request set"); - } catch (e) { - console.log("error: ", e); - } -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); -``` - -The contract is now correctly deployed on Polygon Amoy Testnet and the query has been set up, congratulations! Now it is time to launch the airdrop! - -### Add the Proof Request Inside a QR Code - -The last step is to design the proof request to be embedded inside a QR code. In this particular case this is how the request should look like (remember to modify it by adding the address of your ERC20Verifier Contract): - -```json -{ - "id": "7f38a193-0918-4a48-9fac-36adfdb8b542", - "typ": "application/iden3comm-plain-json", - "type": "https://iden3-communication.io/proofs/1.0/contract-invoke-request", - "thid": "7f38a193-0918-4a48-9fac-36adfdb8b542", - "body": { - "reason": "airdrop participation", - "transaction_data": { - "contract_address": "", - "method_id": "b68967e2", - "chain_id": 80002, - "network": "polygon-amoy" - }, - "scope": [ - { - "id": 1, - "circuitId": "credentialAtomicQuerySigV2OnChain", - "query": { - "allowedIssuers": ["*"], - "context": "https://raw.githubusercontent.com/iden3/claim-schema-vocab/main/schemas/json-ld/kyc-v3.json-ld", - "credentialSubject": { - "birthday": { - "$lt": 20020101 - } - }, - "type": "KYCAgeCredential" - } - } - ] - } -} -``` - -> The scope section inside the JSON file must match the query previously set when calling the `setZKPRequest` function. - -Note that the request resembles, in most of its parts, the one designed for [off-chain verification](/docs/verifier/verification-library/request-api-guide.md). The extra part that has been added here is the `transcation_data` that includes: - -- `contract_address`, namely the address of the Verifier contract, in this case, ERC20Verifier. -- `method_id`, namely the [Function Selector](https://solidity-by-example.org/function-selector/) of the `submitZKPResponse` function. -- `chain_id`, the ID of the chain where the Smart Contract has been deployed. -- `network`, the name of the network where the Smart contract has been deployed. - -> To display the QR code inside your frontend, you can use the `express.static` built-in middleware function together with this Static Folder or this [Code Sandbox](https://codesandbox.io/s/yp1pmpjo4z?file=/index.js). - -Scanning the QR with their Polygon ID Wallet, users will be able to generate proofs and send transactions to the Smart Contract in order to request credentials for their airdrops. - -The same proof generation request can also be delivered to users via Deep Linking. In order to do so, it is necessary to [encode](https://www.base64encode.org/) the JSON file to Base64 Format. The related deep link would be `iden3comm://?i_m={{base64EncodedJsonHere}}`. For example, in this specific case the deep link would be: `iden3comm://?i_m=ewogICAgImlkIjogIjdmMzhhMTkzLTA5MTgtNGE0OC05ZmFjLTM2YWRmZGI4YjU0MiIsCiAgICAidHlwIjogImFwcGxpY2F0aW9uL2lkZW4zY29tbS1wbGFpbi1qc29uIiwKICAgICJ0eXBlIjogImh0dHBzOi8vaWRlbjMtY29tbXVuaWNhdGlvbi5pby9wcm9vZnMvMS4wL2NvbnRyYWN0LWludm9rZS1yZXF1ZXN0IiwKICAgICJ0aGlkIjogIjdmMzhhMTkzLTA5MTgtNGE0OC05ZmFjLTM2YWRmZGI4YjU0MiIsCiAgICAiYm9keSI6IHsKICAgICAgICAicmVhc29uIjogImFpcmRyb3AgcGFydGljaXBhdGlvbiIsCiAgICAgICAgInRyYW5zYWN0aW9uX2RhdGEiOiB7CiAgICAgICAgICAgICJjb250cmFjdF9hZGRyZXNzIjogIjxFUkMyMFZlcmlmaWVyQWRkcmVzcz4iLAogICAgICAgICAgICAibWV0aG9kX2lkIjogImI2ODk2N2UyIiwKICAgICAgICAgICAgImNoYWluX2lkIjogODAwMDEsCiAgICAgICAgICAgICJuZXR3b3JrIjogInBvbHlnb24tbXVtYmFpIgogICAgICAgIH0sCiAgICAgICAgInNjb3BlIjogWwogICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAiaWQiOiAxLAogICAgICAgICAgICAgICAgImNpcmN1aXRJZCI6ICJjcmVkZW50aWFsQXRvbWljUXVlcnlTaWdWMk9uQ2hhaW4iLAogICAgICAgICAgICAgICAgInF1ZXJ5IjogewogICAgICAgICAgICAgICAgICAgICJhbGxvd2VkSXNzdWVycyI6IFsKICAgICAgICAgICAgICAgICAgICAgICAgIioiCiAgICAgICAgICAgICAgICAgICAgXSwKICAgICAgICAgICAgICAgICAgICAiY29udGV4dCI6ICJodHRwczovL3Jhdy5naXRodWJ1c2VyY29udGVudC5jb20vaWRlbjMvY2xhaW0tc2NoZW1hLXZvY2FiL21haW4vc2NoZW1hcy9qc29uLWxkL2t5Yy12My5qc29uLWxkIiwKICAgICAgICAgICAgICAgICAgICAiY3JlZGVudGlhbFN1YmplY3QiOiB7CiAgICAgICAgICAgICAgICAgICAgICAgICJiaXJ0aGRheSI6IHsKICAgICAgICAgICAgICAgICAgICAgICAgICAgICIkbHQiOiAyMDAyMDEwMQogICAgICAgICAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgICAgICAgICAgfSwKICAgICAgICAgICAgICAgICAgICAidHlwZSI6ICJLWUNBZ2VDcmVkZW50aWFsIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgXQogICAgfQp9` - -## Claim the Airdrop - -You can directly test it by scanning the QR Code below using your Polygon ID App: - -
- -
-

- -### How is the proof submission executed? - -A wallet needs to call the `submitZKPResponse` function before it can submit the proof for the requirements set in the Airdrop Participation process. -This function is defined by `IZKPVerifier` interface and therefore implemented in both `EmbeddedZKPVerifier` and `UniversalVerifier` contracts. - - -```solidity - function submitZKPResponse( - uint64 requestId, - uint256[] memory inputs, - uint256[2] memory a, - uint256[2][2] memory b, - uint256[2] memory c - ) external; -``` - -## Extend it to Your Own Logic - -Now that you have been able to create your first on-chain ZK-based application, you can extend it to accommodate any type of imaginable logic. The target Smart Contract doesn't have to be an ERC20 but it can be an ERC721, a DeFi pool, a voting Smart Contract or whatever contract you can think of. Equally, the query can be extended to any type of existing Credential and based on the different operators available inside the ZK Query Language. - -Another possibility to customize your Smart Contract involves setting different ZK requests. First of all, multiple `REQUEST_ID` must be defined inside the main Smart Contract. Therefore, the contract deployer can set a different query for each request ID and create different outcomes inside `_afterProofSubmit` according to the type of proof received. For example, an airdrop contract can verify the role of a user inside a DAO and distribute a different amount of tokens based on the role. - -## Estimated Gas Costs for On-Chain Verifier -The general gas cost depends on the code you put into `_beforeProofSubmit` and `_afterProofSubmit` hooks. The value we observed with our tests for `submitZKPResponce` functions calls was aroung 700K gas and zk proof verification function specifically costs approximately 520k gas as of January 2024. diff --git a/docs/verifier/on-chain-verification/set-zkp-request.md b/docs/verifier/on-chain-verification/set-zkp-request.md new file mode 100644 index 00000000..7666c9e6 --- /dev/null +++ b/docs/verifier/on-chain-verification/set-zkp-request.md @@ -0,0 +1,86 @@ +--- +id: set-zkp-request +title: Setting Up the ZKP Query Request +sidebar_label: Set ZKP Request +description: Setting ZKP Request for on-chain verification +keywords: + - docs + - privado id + - ID holder + - zkp + - zkp request + - verifier + - on-chain +--- +import useBaseUrl from '@docusaurus/useBaseUrl'; + +### Approach - A Setting Request using Query Builder: + +1. Visit the [PrivadoID Query Builder](https://tools.privado.id/query-builder) and paste the JSON-LD Context URL (mentioned above) in the input box. +2. Select the schema type to `pohcheck` and select the `human` in the Attribute field. +
+ +
+ +3. Select the Proof type "Signature-based (SIG)" and Circuit ID Credential Atomic Query v3 On Chain. + +4. Set the query type as Condition and Operator as "Is equal to," and select `true` in the Attribute value. + +5. Set the Issuer DID field to "*" to accept credentials from any issuer or, if you want, you can use any specific DID to restrict allowed issuers for the query. +
+ +
+ +6. Next, click on the **“Create query”**. + +7. Add the Universal Verifier Smart contract address for the selected network (e.g., for Polygon Amoy: `0xfcc86A79fCb057A8e55C6B853dff9479C3cf607c`) in the Smart Contract Address input. +
+ +
+ +8. Click on the **Set request** to submit the on-chain request. + +9. Click on **Confirm** in the Metamask to accept and submit the request. +
+ +
+ +:::note +Make sure you have sufficient Gas tokens (e.g. POL in case of Polygon Amoy) in your wallet to submit the transaction. +::: + +10. Once the transaction is successful, you can click on the **Test Query** button to easily create the query request, where you will be presented with a QR code that can be scanned by users to generate the proof or copy the URL and use it for verification with the web wallet. + +
+ +
+ +
+ +
+ +:::note +You only need to set the request once (per query), and it can be used by all users to scan, generate, and submit the proof to the smart contract via the Privado ID Mobile Wallet app or Privado ID Web Wallet. +::: +:::note + Make sure you note down the **Request ID** as it is used in the Logic smart contract. +::: +### Approach - B Setting Request Programmatically: + +We provide a deployment script that you can use as a starting point to set up your own query. To get started, follow these steps: + +1. Clone the XYZ Protocol contracts repository using Git: + `git clone https://github.com/iden3/contracts.git` + +2. Navigate into the cloned repository: + `cd contracts` + +3. Add your Private Key and JSON RPC URL strings for the desired blockchain network (e.g., Polygon Amoy). This will allow you to deploy the query on your chosen chain. + +4. Configure the `hardhat.config` file according to your preferences. + +5. Next, replace the placeholders in the script with your own query values. For this example, we'll use a PoH query: + `npx hardhat run scripts/maintenance/setProofRequest.ts --network amoy` + +This command will set up the airdrop request and print out the query used for convenience. You can then create a QR code using this query, which can be scanned by users with Privado ID mobile to verify their credentials. + diff --git a/docs/verifier/on-chain-verification/tutorial.md b/docs/verifier/on-chain-verification/tutorial.md new file mode 100644 index 00000000..75609828 --- /dev/null +++ b/docs/verifier/on-chain-verification/tutorial.md @@ -0,0 +1,324 @@ +--- +id: tutorial +title: On-chain verification Tutorial +sidebar_label: Tutorial +description: On-chain verification tutorial. +keywords: + - docs + - privado id + - ID holder + - issuer + - verifier + - on-chain +--- + + + +## Implementing an ERC20 ZK Airdrop + +In this tutorial, we will create an ERC20 ZK Airdrop Contract. The chosen query criterium is to be born before `01/01/2002`. Users that can prove that they were born before that date will be able to get the airdrop. Otherwise, they will not. The proof submitted to the Smart Contract will not reveal any information about the specific date of birth of the user as we are using zero knowledge. + +:::note + +To set up a different query check out the [ZK Query Language section](/docs/verifier/verification-library/zk-query-language.md). + +::: + +This tutorial is based on the verification of a Credential of Type `KYCAgeCredential` with an attribute `birthday` with a Schema URL `https://raw.githubusercontent.com/iden3/claim-schema-vocab/main/schemas/json-ld/kyc-v3.json-ld`. + +The prerequisite is that users have the [Polygon ID Wallet app](/docs/wallet/wallet-overview.md) installed and self-issued a Credential of type `KYC Age Credential Merklized` using our [Demo Issuer](https://issuer-demo.polygonid.me/) + +:::note + +Some executable code related to this tutorial is in this repository. + +::: + + +## Design the ERC20 zk Airdrop Contract with ZK-proof verification + +### Embedded ZKPVerifier Smart Contract +This is an abstract smart contract, which implements the logic of verifying ZK Proofs and saving the verification result. It is designed to be inherited by another smart contract with own business logic, which may consume proof verification functionality. + +The contract is designed to work with different ZK Validator contracts and different proof requests, both or which are set by the contract owner. + +### Universal Verifier Smart Contract +This smart contract implements the same functionality as `EmbeddedZKPVerifier` Smart Contract, however it is not an abstract but a standalone contract. + +The `UniversalVerifier` is designed to be used by multiple external contracts. Not only a `UniversalVerifier` owner but actually any address can set a `ZKPRequest` in `UniversalVerifier`. The only restriction for the proof request at the moment is that it should use a ZK Validator, which is whitelisted. The whitelisting is managed by the contract owner. + +### Let us jump into the code by writing the ERC20Verifier contract in each of the two ways. + +#### Inheriting EmbeddedZKPVerifier abstract smart contract + +We'll create a `ERC20Verifier`, which is an ERC20 standard contract. The extra functionality is given by the zero-knowledge proof verification. All the functions dedicated to the ZK verification are contained inside the `EmbeddedZKPVerifier` Contract and inherited within the `ERC20Verifier`. For example, users will submit their proof to claim the airdrop by calling `submitZKPResponse`. + +The `ERC20Verifier` contract must define at least a single `TRANSFER_REQUEST_ID`. This is the Identifier of the request that the contract is making to the user. + +The `EmbeddedZKPVerifier` Contract provides 2 hooks: `_beforeProofSubmit` and `_afterProofSubmit`. + +These hooks are called before and after any proof gets submitted and can be used to create personalized logic inside your Smart Contract. In this specific case, it must be checked that the sender of the proof matches the address contained in the proof challenge. This requirement is necessary to prevent proof front-running. This condition is added inside `_beforeProofSubmit`. + +In this specific example, the airdrop token minting is inside `_afterProofSubmit`, which is executed if the proof is correctly verified. Of course, the airdrop logic can be personalized according to the needs of the project. As another option, you may mint tokens to a user via a separate `mint` function call if the user address was verified before. + +Finally, we will add another element of security inside the Smart Contract: prevent any type of token transfer unless there is a proof verification from a destination address. This last condition is added by overriding the ERC20 `_update` function and checking that the receiver address `to` of the transfer is included inside the `proofs` mapping. + +```solidity +// SPDX-License-Identifier: MIT +pragma solidity 0.8.20; +import {ERC20Upgradeable} from '@openzeppelin/contracts-upgradeable/token/ERC20/ERC20Upgradeable.sol'; +import {PrimitiveTypeUtils} from '@iden3/contracts/lib/PrimitiveTypeUtils.sol'; +import {ICircuitValidator} from '@iden3/contracts/interfaces/ICircuitValidator.sol'; +import {EmbeddedZKPVerifier} from '@iden3/contracts/verifiers/EmbeddedZKPVerifier.sol'; +contract ERC20Verifier is ERC20Upgradeable, EmbeddedZKPVerifier { + uint64 public constant TRANSFER_REQUEST_ID_SIG_VALIDATOR = 1; + uint64 public constant TRANSFER_REQUEST_ID_MTP_VALIDATOR = 2; + /// @custom:storage-location erc7201:polygonid.storage.ERC20Verifier + struct ERC20VerifierStorage { + mapping(uint256 => address) idToAddress; + mapping(address => uint256) addressToId; + uint256 TOKEN_AMOUNT_FOR_AIRDROP_PER_ID; + } + // keccak256(abi.encode(uint256(keccak256("polygonid.storage.ERC20Verifier")) - 1)) & ~bytes32(uint256(0xff)) + bytes32 private constant ERC20VerifierStorageLocation = + 0x3b1c3bd751d9cd42a3739426a271cdc235017946663d56eeaf827d70f8b77000; + function _getERC20VerifierStorage() private pure returns (ERC20VerifierStorage storage $) { + assembly { + $.slot := ERC20VerifierStorageLocation + } + } + modifier beforeTransfer(address to) { + require( + isProofVerified(to, TRANSFER_REQUEST_ID_SIG_VALIDATOR) || + isProofVerified(to, TRANSFER_REQUEST_ID_MTP_VALIDATOR), + 'only identities who provided sig or mtp proof for transfer requests are allowed to receive tokens' + ); + _; + } + function initialize(string memory name, string memory symbol) public initializer { + ERC20VerifierStorage storage $ = _getERC20VerifierStorage(); + super.__ERC20_init(name, symbol); + super.__EmbeddedZKPVerifier_init(_msgSender()); + $.TOKEN_AMOUNT_FOR_AIRDROP_PER_ID = 5 * 10 ** uint256(decimals()); + } + function _beforeProofSubmit( + uint64 /* requestId */, + uint256[] memory inputs, + ICircuitValidator validator + ) internal view override { + // check that challenge input is address of sender + address addr = PrimitiveTypeUtils.uint256LEToAddress( + inputs[validator.inputIndexOf('challenge')] + ); + // this is linking between msg.sender and + require(_msgSender() == addr, 'address in proof is not a sender address'); + } + function _afterProofSubmit( + uint64 requestId, + uint256[] memory inputs, + ICircuitValidator validator + ) internal override { + ERC20VerifierStorage storage $ = _getERC20VerifierStorage(); + if ( + requestId == TRANSFER_REQUEST_ID_SIG_VALIDATOR || + requestId == TRANSFER_REQUEST_ID_MTP_VALIDATOR + ) { + // if proof is given for transfer request id ( mtp or sig ) and it's a first time we mint tokens to sender + uint256 id = inputs[1]; + if ($.idToAddress[id] == address(0) && $.addressToId[_msgSender()] == 0) { + super._mint(_msgSender(), $.TOKEN_AMOUNT_FOR_AIRDROP_PER_ID); + $.addressToId[_msgSender()] = id; + $.idToAddress[id] = _msgSender(); + } + } + } + function _update( + address from /* from */, + address to, + uint256 amount /* amount */ + ) internal override beforeTransfer(to) { + super._update(from, to, amount); + } + function getIdByAddress(address addr) public view returns (uint256) { + return _getERC20VerifierStorage().addressToId[addr]; + } + function getAddressById(uint256 id) public view returns (address) { + return _getERC20VerifierStorage().idToAddress[id]; + } + function getTokenAmountForAirdropPerId() public view returns (uint256) { + return _getERC20VerifierStorage().TOKEN_AMOUNT_FOR_AIRDROP_PER_ID; + } +} +``` + +#### Using Universal Verifier Smart Contract + +Unlike, the previous example, the `ERC20LinkedUniversalVerifier` contract does not inherit the `EmbeddedZKPVerifier` contract. Instead, it uses the `UniversalVerifier` contract to check the proof result. + +Unlike `ERC20Verifier` the `ERC20LinkedUniversalVerifier` does not need to implement the `_beforeProofSubmit` and `_afterProofSubmit` hooks as proof verification is assumed to be done directly to the `UniversalVerifier` contract by some other transaction. + +In the same way the `ERC20LinkedUniversalVerifier` contract must define at least one `TRANSFER_REQUEST_ID` to get proof statuses for this request id from the `UniversalVerifier`. + +In this example, you may mint tokens to a user via the `mint` function call. + +Any token transfers are prevented inside `beforeTokenTransfer` modifier (which is invoked via `mint -> _mint -> update` call chain) unless there is already verification proof in UniversalVerifier, which corresponds to `msg.sender` address. + +```solidity +// SPDX-License-Identifier: MIT +pragma solidity 0.8.20; +import {ERC20} from '@openzeppelin/contracts/token/ERC20/ERC20.sol'; +import {PrimitiveTypeUtils} from '@iden3/contracts/lib/PrimitiveTypeUtils.sol'; +import {ICircuitValidator} from '@iden3/contracts/interfaces/ICircuitValidator.sol'; +import {EmbeddedZKPVerifier} from '@iden3/contracts/verifiers/EmbeddedZKPVerifier.sol'; +import {UniversalVerifier} from '@iden3/contracts/verifiers/UniversalVerifier.sol'; +contract ERC20LinkedUniversalVerifier is ERC20 { + uint64 public constant TRANSFER_REQUEST_ID_SIG_VALIDATOR = 0; + uint64 public constant TRANSFER_REQUEST_ID_MTP_VALIDATOR = 1; + UniversalVerifier public verifier; + uint256 public TOKEN_AMOUNT_FOR_AIRDROP_PER_ID = 5 * 10 ** uint256(decimals()); + modifier beforeTokenTransfer(address to) { + require( + verifier.getProofStatus(to, TRANSFER_REQUEST_ID_SIG_VALIDATOR).isVerified || + verifier.getProofStatus(to, TRANSFER_REQUEST_ID_MTP_VALIDATOR).isVerified, + 'only identities who provided sig or mtp proof for transfer requests are allowed to receive tokens' + ); + _; + } + constructor( + UniversalVerifier verifier_, + string memory name_, + string memory symbol_ + ) ERC20(name_, symbol_) { + verifier = verifier_; + } + function mint(address to) public { + _mint(to, TOKEN_AMOUNT_FOR_AIRDROP_PER_ID); + } + function _update( + address from, + address to, + uint256 value + ) internal override beforeTokenTransfer(to) { + super._update(from, to, value); + } +} +``` + +### Deploy the Contract + +:::note "Hardhat" + +For this tutorial, we are using the Hardhat development environment to facilitate the contract deployment. You can learn how to get started with this tool by checking [their documentation](https://hardhat.org/hardhat-runner/docs/getting-started). + +::: + +#### Deploy your custom contract inherited from EmbeddedZKPVerifier + +Execute this Hardhat script to deploy either `ERC20Verifier`. Change the `verifierContract` variable to the desired contract name. + + +```js +import { ethers } from "hardhat"; +import { upgrades } from "hardhat"; + + +async function main() { + const verifierContract = "ERC20Verifier"; + const verifierName = "ERC20zkAirdrop"; + const verifierSymbol = "zkERC20"; + + const ERC20Verifier = await ethers.getContractFactory(verifierContract); + const erc20Verifier = await upgrades.deployProxy( + ERC20Verifier, + [verifierName, verifierSymbol] + ); + + await erc20Verifier.waitForDeployment(); + console.log(verifierName, " contract address:", await erc20Verifier.getAddress()); +} + +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error(error); + process.exit(1); + }); +``` + +#### Deploy your custom contract linked to Universal Verifier smart contract + +```js +import { ethers } from 'hardhat'; + +async function main() { + const universalVerifierAddress = ''; + const verifierName = 'ERC20LinkedUniversalVerifier'; + const verifierSymbol = 'zkERC20'; + + const verifier = await ethers.deployContract( + verifierName, + [ universalVerifierAddress, verifierName, verifierSymbol ] + ); + await verifier.waitForDeployment(); + console.log(verifierName, ' contract address:', await verifier.getAddress()); +} + +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error(error); + process.exit(1); + }); +``` + +:::note + +The contract ERC20Verifier preferably to be deployed on the Amoi test network as there is a set of supporting validator contracts. + +::: + +### Set the ZKP Request & Add the Proof Request Inside a QR Code +To set up the ZKP request and easily generate a request to present to users, please visit the "Set ZKP Request" section. + +[Set ZKP Request](/docs/verifier/on-chain-verification/set-zkp-request.md) + +### Things to Note + +> The scope section inside the JSON file must match the query previously set when calling the `setZKPRequest` function. +Note that the request resembles, in most of its parts, the one designed for [off-chain verification](/docs/verifier/verification-library/request-api-guide.md). The extra part that has been added here is the `transcation_data` that includes: + +- `contract_address`, namely the address of the Verifier contract, in this case, ERC20Verifier. +- `method_id`, namely the [Function Selector](https://solidity-by-example.org/function-selector/) of the `submitZKPResponse` function. +- `chain_id`, the ID of the chain where the Smart Contract has been deployed. +- `network`, the name of the network where the Smart contract has been deployed. + +> To display the QR code inside your frontend, you can use the `express.static` built-in middleware function together with this Static Folder or this [Code Sandbox](https://codesandbox.io/s/yp1pmpjo4z?file=/index.js). +Scanning the QR with their Polygon ID Wallet, users will be able to generate proofs and send transactions to the Smart Contract in order to request credentials for their airdrops. + +The same proof generation request can also be delivered to users via Deep Linking. In order to do so, it is necessary to [encode](https://www.base64encode.org/) the JSON file to Base64 Format. The related deep link would be `iden3comm://?i_m={{base64EncodedJsonHere}}`. For example, in this specific case the deep link would be: `iden3comm://?i_m=ewogICAgImlkIjogIjdmMzhhMTkzLTA5MTgtNGE0OC05ZmFjLTM2YWRmZGI4YjU0MiIsCiAgICAidHlwIjogImFwcGxpY2F0aW9uL2lkZW4zY29tbS1wbGFpbi1qc29uIiwKICAgICJ0eXBlIjogImh0dHBzOi8vaWRlbjMtY29tbXVuaWNhdGlvbi5pby9wcm9vZnMvMS4wL2NvbnRyYWN0LWludm9rZS1yZXF1ZXN0IiwKICAgICJ0aGlkIjogIjdmMzhhMTkzLTA5MTgtNGE0OC05ZmFjLTM2YWRmZGI4YjU0MiIsCiAgICAiYm9keSI6IHsKICAgICAgICAicmVhc29uIjogImFpcmRyb3AgcGFydGljaXBhdGlvbiIsCiAgICAgICAgInRyYW5zYWN0aW9uX2RhdGEiOiB7CiAgICAgICAgICAgICJjb250cmFjdF9hZGRyZXNzIjogIjxFUkMyMFZlcmlmaWVyQWRkcmVzcz4iLAogICAgICAgICAgICAibWV0aG9kX2lkIjogImI2ODk2N2UyIiwKICAgICAgICAgICAgImNoYWluX2lkIjogODAwMDEsCiAgICAgICAgICAgICJuZXR3b3JrIjogInBvbHlnb24tbXVtYmFpIgogICAgICAgIH0sCiAgICAgICAgInNjb3BlIjogWwogICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAiaWQiOiAxLAogICAgICAgICAgICAgICAgImNpcmN1aXRJZCI6ICJjcmVkZW50aWFsQXRvbWljUXVlcnlTaWdWMk9uQ2hhaW4iLAogICAgICAgICAgICAgICAgInF1ZXJ5IjogewogICAgICAgICAgICAgICAgICAgICJhbGxvd2VkSXNzdWVycyI6IFsKICAgICAgICAgICAgICAgICAgICAgICAgIioiCiAgICAgICAgICAgICAgICAgICAgXSwKICAgICAgICAgICAgICAgICAgICAiY29udGV4dCI6ICJodHRwczovL3Jhdy5naXRodWJ1c2VyY29udGVudC5jb20vaWRlbjMvY2xhaW0tc2NoZW1hLXZvY2FiL21haW4vc2NoZW1hcy9qc29uLWxkL2t5Yy12My5qc29uLWxkIiwKICAgICAgICAgICAgICAgICAgICAiY3JlZGVudGlhbFN1YmplY3QiOiB7CiAgICAgICAgICAgICAgICAgICAgICAgICJiaXJ0aGRheSI6IHsKICAgICAgICAgICAgICAgICAgICAgICAgICAgICIkbHQiOiAyMDAyMDEwMQogICAgICAgICAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgICAgICAgICAgfSwKICAgICAgICAgICAgICAgICAgICAidHlwZSI6ICJLWUNBZ2VDcmVkZW50aWFsIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgXQogICAgfQp9` + + +### How is the proof submission executed? + +A wallet needs to call the `submitZKPResponse` function before it can submit the proof for the requirements set in the Airdrop Participation process. +This function is defined by `IZKPVerifier` interface and therefore implemented in both `EmbeddedZKPVerifier` and `UniversalVerifier` contracts. + + +```solidity + function submitZKPResponse( + uint64 requestId, + uint256[] memory inputs, + uint256[2] memory a, + uint256[2][2] memory b, + uint256[2] memory c + ) external; +``` + +## Extend it to Your Own Logic + +Now that you have been able to create your first on-chain ZK-based application, you can extend it to accommodate any type of imaginable logic. The target Smart Contract doesn't have to be an ERC20 but it can be an ERC721, a DeFi pool, a voting Smart Contract or whatever contract you can think of. Equally, the query can be extended to any type of existing Credential and based on the different operators available inside the ZK Query Language. + +Another possibility to customize your Smart Contract involves setting different ZK requests. First of all, multiple `REQUEST_ID` must be defined inside the main Smart Contract. Therefore, the contract deployer can set a different query for each request ID and create different outcomes inside `_afterProofSubmit` according to the type of proof received. For example, an airdrop contract can verify the role of a user inside a DAO and distribute a different amount of tokens based on the role. + +## Estimated Gas Costs for On-Chain Verifier +The general gas cost depends on the code you put into `_beforeProofSubmit` and `_afterProofSubmit` hooks. The value we observed with our tests for `submitZKPResponce` functions calls was aroung 700K gas and zk proof verification function specifically costs approximately 520k gas as of January 2024. \ No newline at end of file diff --git a/docusaurus.config.js b/docusaurus.config.js index b28af74b..bb9adcc5 100755 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -63,8 +63,8 @@ module.exports = { }, algolia: { // need to update this - indexName: "devs-polygonid", - appId: "VHJ21ILMOQ", + indexName: "privado", + appId: process.env.ALGOLIA_APP_ID, apiKey: process.env.ALGOLIA_API_KEY, contextualSearch: true, algoliaOptions: { diff --git a/package-lock.json b/package-lock.json index e7a6c1c2..5cc0145a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -29,31 +29,31 @@ } }, "node_modules/@algolia/autocomplete-core": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.9.3.tgz", - "integrity": "sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==", + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.17.7.tgz", + "integrity": "sha512-BjiPOW6ks90UKl7TwMv7oNQMnzU+t/wk9mgIDi6b1tXpUek7MW0lbNOUHpvam9pe3lVCf4xPFT+lK7s+e+fs7Q==", "dependencies": { - "@algolia/autocomplete-plugin-algolia-insights": "1.9.3", - "@algolia/autocomplete-shared": "1.9.3" + "@algolia/autocomplete-plugin-algolia-insights": "1.17.7", + "@algolia/autocomplete-shared": "1.17.7" } }, "node_modules/@algolia/autocomplete-plugin-algolia-insights": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.9.3.tgz", - "integrity": "sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==", + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.17.7.tgz", + "integrity": "sha512-Jca5Ude6yUOuyzjnz57og7Et3aXjbwCSDf/8onLHSQgw1qW3ALl9mrMWaXb5FmPVkV3EtkD2F/+NkT6VHyPu9A==", "dependencies": { - "@algolia/autocomplete-shared": "1.9.3" + "@algolia/autocomplete-shared": "1.17.7" }, "peerDependencies": { "search-insights": ">= 1 < 3" } }, "node_modules/@algolia/autocomplete-preset-algolia": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.9.3.tgz", - "integrity": "sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==", + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.17.7.tgz", + "integrity": "sha512-ggOQ950+nwbWROq2MOCIL71RE0DdQZsceqrg32UqnhDz8FlO9rL8ONHNsI2R1MH0tkgVIDKI/D0sMiUchsFdWA==", "dependencies": { - "@algolia/autocomplete-shared": "1.9.3" + "@algolia/autocomplete-shared": "1.17.7" }, "peerDependencies": { "@algolia/client-search": ">= 4.9.1 < 6", @@ -61,9 +61,9 @@ } }, "node_modules/@algolia/autocomplete-shared": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz", - "integrity": "sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==", + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.17.7.tgz", + "integrity": "sha512-o/1Vurr42U/qskRSuhBH+VKxMvkkUVTLU6WZQr+L5lGZZLYWyhdzWjW0iGXY7EkwRTjBqvN2EsR81yCTGV/kmg==", "peerDependencies": { "@algolia/client-search": ">= 4.9.1 < 6", "algoliasearch": ">= 4.9.1 < 6" @@ -90,6 +90,50 @@ "@algolia/cache-common": "4.22.0" } }, + "node_modules/@algolia/client-abtesting": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.15.0.tgz", + "integrity": "sha512-FaEM40iuiv1mAipYyiptP4EyxkJ8qHfowCpEeusdHUC4C7spATJYArD2rX3AxkVeREkDIgYEOuXcwKUbDCr7Nw==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-abtesting/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-abtesting/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-abtesting/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/client-account": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.22.0.tgz", @@ -120,6 +164,50 @@ "@algolia/transporter": "4.22.0" } }, + "node_modules/@algolia/client-insights": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.15.0.tgz", + "integrity": "sha512-bDDEQGfFidDi0UQUCbxXOCdphbVAgbVmxvaV75cypBTQkJ+ABx/Npw7LkFGw1FsoVrttlrrQbwjvUB6mLVKs/w==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-insights/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-insights/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-insights/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/client-personalization": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.22.0.tgz", @@ -130,6 +218,50 @@ "@algolia/transporter": "4.22.0" } }, + "node_modules/@algolia/client-query-suggestions": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.15.0.tgz", + "integrity": "sha512-wu8GVluiZ5+il8WIRsGKu8VxMK9dAlr225h878GGtpTL6VBvwyJvAyLdZsfFIpY0iN++jiNb31q2C1PlPL+n/A==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-query-suggestions/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-query-suggestions/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/client-query-suggestions/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/client-search": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.22.0.tgz", @@ -145,6 +277,50 @@ "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, + "node_modules/@algolia/ingestion": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.15.0.tgz", + "integrity": "sha512-MkqkAxBQxtQ5if/EX2IPqFA7LothghVyvPoRNA/meS2AW2qkHwcxjuiBxv4H6mnAVEPfJlhu9rkdVz9LgCBgJg==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/ingestion/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/ingestion/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/ingestion/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/logger-common": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.22.0.tgz", @@ -158,6 +334,94 @@ "@algolia/logger-common": "4.22.0" } }, + "node_modules/@algolia/monitoring": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.15.0.tgz", + "integrity": "sha512-QPrFnnGLMMdRa8t/4bs7XilPYnoUXDY8PMQJ1sf9ZFwhUysYYhQNX34/enoO0LBjpoOY6rLpha39YQEFbzgKyQ==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/monitoring/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/monitoring/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/monitoring/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/recommend": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.15.0.tgz", + "integrity": "sha512-5eupMwSqMLDObgSMF0XG958zR6GJP3f7jHDQ3/WlzCM9/YIJiWIUoJFGsko9GYsA5xbLDHE/PhWtq4chcCdaGQ==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/recommend/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/recommend/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/recommend/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/requester-browser-xhr": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.22.0.tgz", @@ -171,6 +435,25 @@ "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.22.0.tgz", "integrity": "sha512-Y9cEH/cKjIIZgzvI1aI0ARdtR/xRrOR13g5psCxkdhpgRN0Vcorx+zePhmAa4jdQNqexpxtkUdcKYugBzMZJgQ==" }, + "node_modules/@algolia/requester-fetch": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.15.0.tgz", + "integrity": "sha512-rOZ+c0P7ajmccAvpeeNrUmEKoliYFL8aOR5qGW5pFq3oj3Iept7Y5mEtEsOBYsRt6qLnaXn4zUKf+N8nvJpcIw==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@algolia/requester-fetch/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@algolia/requester-node-http": { "version": "4.22.0", "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.22.0.tgz", @@ -2306,19 +2589,28 @@ } }, "node_modules/@docsearch/css": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.5.2.tgz", - "integrity": "sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==" + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.8.0.tgz", + "integrity": "sha512-pieeipSOW4sQ0+bE5UFC51AOZp9NGxg89wAlZ1BAQFaiRAGK1IKUaPQ0UGZeNctJXyqZ1UvBtOQh2HH+U5GtmA==" + }, + "node_modules/@docsearch/js": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.8.0.tgz", + "integrity": "sha512-PVuV629f5UcYRtBWqK7ID6vNL5647+2ADJypwTjfeBIrJfwPuHtzLy39hMGMfFK+0xgRyhTR0FZ83EkdEraBlg==", + "dependencies": { + "@docsearch/react": "3.8.0", + "preact": "^10.0.0" + } }, "node_modules/@docsearch/react": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.5.2.tgz", - "integrity": "sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.8.0.tgz", + "integrity": "sha512-WnFK720+iwTVt94CxY3u+FgX6exb3BfN5kE9xUY6uuAH/9W/UFboBZFLlrw/zxFRHoHZCOXRtOylsXF+6LHI+Q==", "dependencies": { - "@algolia/autocomplete-core": "1.9.3", - "@algolia/autocomplete-preset-algolia": "1.9.3", - "@docsearch/css": "3.5.2", - "algoliasearch": "^4.19.1" + "@algolia/autocomplete-core": "1.17.7", + "@algolia/autocomplete-preset-algolia": "1.17.7", + "@docsearch/css": "3.8.0", + "algoliasearch": "^5.12.0" }, "peerDependencies": { "@types/react": ">= 16.8.0 < 19.0.0", @@ -2341,6 +2633,101 @@ } } }, + "node_modules/@docsearch/react/node_modules/@algolia/client-analytics": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.15.0.tgz", + "integrity": "sha512-lho0gTFsQDIdCwyUKTtMuf9nCLwq9jOGlLGIeQGKDxXF7HbiAysFIu5QW/iQr1LzMgDyM9NH7K98KY+BiIFriQ==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/@algolia/client-common": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.15.0.tgz", + "integrity": "sha512-IofrVh213VLsDkPoSKMeM9Dshrv28jhDlBDLRcVJQvlL8pzue7PEB1EZ4UoJFYS3NSn7JOcJ/V+olRQzXlJj1w==", + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/@algolia/client-personalization": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.15.0.tgz", + "integrity": "sha512-LfaZqLUWxdYFq44QrasCDED5bSYOswpQjSiIL7Q5fYlefAAUO95PzBPKCfUhSwhb4rKxigHfDkd81AvEicIEoA==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/@algolia/client-search": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.15.0.tgz", + "integrity": "sha512-Z32gEMrRRpEta5UqVQA612sLdoqY3AovvUPClDfMxYrbdDAebmGDVPtSogUba1FZ4pP5dx20D3OV3reogLKsRA==", + "dependencies": { + "@algolia/client-common": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/@algolia/requester-browser-xhr": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.15.0.tgz", + "integrity": "sha512-Po/GNib6QKruC3XE+WKP1HwVSfCDaZcXu48kD+gwmtDlqHWKc7Bq9lrS0sNZ456rfCKhXksOmMfUs4wRM/Y96w==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/@algolia/requester-node-http": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.15.0.tgz", + "integrity": "sha512-b1jTpbFf9LnQHEJP5ddDJKE2sAlhYd7EVSOWgzo/27n/SfCoHfqD0VWntnWYD83PnOKvfe8auZ2+xCb0TXotrQ==", + "dependencies": { + "@algolia/client-common": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@docsearch/react/node_modules/algoliasearch": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.15.0.tgz", + "integrity": "sha512-Yf3Swz1s63hjvBVZ/9f2P1Uu48GjmjCN+Esxb6MAONMGtZB1fRX8/S1AhUTtsuTlcGovbYLxpHgc7wEzstDZBw==", + "dependencies": { + "@algolia/client-abtesting": "5.15.0", + "@algolia/client-analytics": "5.15.0", + "@algolia/client-common": "5.15.0", + "@algolia/client-insights": "5.15.0", + "@algolia/client-personalization": "5.15.0", + "@algolia/client-query-suggestions": "5.15.0", + "@algolia/client-search": "5.15.0", + "@algolia/ingestion": "1.15.0", + "@algolia/monitoring": "1.15.0", + "@algolia/recommend": "5.15.0", + "@algolia/requester-browser-xhr": "5.15.0", + "@algolia/requester-fetch": "5.15.0", + "@algolia/requester-node-http": "5.15.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/@docusaurus/core": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-2.4.3.tgz", @@ -16383,6 +16770,15 @@ "postcss": "^8.2.15" } }, + "node_modules/preact": { + "version": "10.24.3", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.24.3.tgz", + "integrity": "sha512-Z2dPnBnMUfyQfSQ+GBdsGa16hz35YmLmtTLhM169uW944hYL6xzTYkJjC07j+Wosz733pMWx0fgON3JNw1jJQA==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, "node_modules/prepend-http": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", @@ -18231,9 +18627,9 @@ } }, "node_modules/search-insights": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.13.0.tgz", - "integrity": "sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==", + "version": "2.17.3", + "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.17.3.tgz", + "integrity": "sha512-RQPdCYTa8A68uM2jwxoY842xDhvx3E5LFL1LxvxCNMev4o5mLuokczhzjAgGwUZBAmOKZknArSxLKmXtIi2AxQ==", "peer": true }, "node_modules/section-matter": { diff --git a/sidebars.js b/sidebars.js index fec776af..ab5f8317 100644 --- a/sidebars.js +++ b/sidebars.js @@ -137,7 +137,20 @@ module.exports = { "verifier/verification-library/verifier-setup", ], }, + { + type: "category", + label: "On-chain Verification", + link: { + type: "generated-index", + }, + collapsed: true, + items: [ "verifier/on-chain-verification/overview", + "verifier/on-chain-verification/tutorial", + "verifier/on-chain-verification/cross-chain", + "verifier/on-chain-verification/set-zkp-request" + ], + }, "verifier/query-builder", "verifier/verification-library/zk-query-language", "verifier/v3-circuit", diff --git a/static/img/onchain-verifier/crosschain.png b/static/img/onchain-verifier/crosschain.png new file mode 100644 index 00000000..0173f6e2 Binary files /dev/null and b/static/img/onchain-verifier/crosschain.png differ diff --git a/static/img/onchain-verifier/querybuilder-1.png b/static/img/onchain-verifier/querybuilder-1.png new file mode 100644 index 00000000..ba56d822 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-1.png differ diff --git a/static/img/onchain-verifier/querybuilder-2.png b/static/img/onchain-verifier/querybuilder-2.png new file mode 100644 index 00000000..dba0d7e9 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-2.png differ diff --git a/static/img/onchain-verifier/querybuilder-3.png b/static/img/onchain-verifier/querybuilder-3.png new file mode 100644 index 00000000..28285732 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-3.png differ diff --git a/static/img/onchain-verifier/querybuilder-4.png b/static/img/onchain-verifier/querybuilder-4.png new file mode 100644 index 00000000..e82fa7d3 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-4.png differ diff --git a/static/img/onchain-verifier/querybuilder-5.png b/static/img/onchain-verifier/querybuilder-5.png new file mode 100644 index 00000000..ccc55792 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-5.png differ diff --git a/static/img/onchain-verifier/querybuilder-6.png b/static/img/onchain-verifier/querybuilder-6.png new file mode 100644 index 00000000..bf81ee6f Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-6.png differ diff --git a/static/img/onchain-verifier/querybuilder-7.png b/static/img/onchain-verifier/querybuilder-7.png new file mode 100644 index 00000000..f44f0576 Binary files /dev/null and b/static/img/onchain-verifier/querybuilder-7.png differ diff --git a/static/img/quick-start-demo/create-credential.png b/static/img/quick-start-demo/create-credential.png index ce9db75d..55e9eb35 100644 Binary files a/static/img/quick-start-demo/create-credential.png and b/static/img/quick-start-demo/create-credential.png differ diff --git a/static/img/quick-start-demo/import-schema.png b/static/img/quick-start-demo/import-schema.png index 29c8f035..2126c6c9 100644 Binary files a/static/img/quick-start-demo/import-schema.png and b/static/img/quick-start-demo/import-schema.png differ diff --git a/static/img/quick-start-demo/qr.png b/static/img/quick-start-demo/qr.png index c849cea3..e8030e99 100644 Binary files a/static/img/quick-start-demo/qr.png and b/static/img/quick-start-demo/qr.png differ diff --git a/static/img/quick-start-demo/verifier-home.png b/static/img/quick-start-demo/verifier-home.png index 6d272563..267c644e 100644 Binary files a/static/img/quick-start-demo/verifier-home.png and b/static/img/quick-start-demo/verifier-home.png differ diff --git a/static/img/quick-start-demo/verifier-query-1.png b/static/img/quick-start-demo/verifier-query-1.png index 93b8a6a3..e01845c8 100644 Binary files a/static/img/quick-start-demo/verifier-query-1.png and b/static/img/quick-start-demo/verifier-query-1.png differ diff --git a/static/img/quick-start-demo/verifier-query-2.png b/static/img/quick-start-demo/verifier-query-2.png index 50e2ea3d..828fc8aa 100644 Binary files a/static/img/quick-start-demo/verifier-query-2.png and b/static/img/quick-start-demo/verifier-query-2.png differ diff --git a/static/img/quick-start-demo/verifier-query-3.png b/static/img/quick-start-demo/verifier-query-3.png deleted file mode 100644 index 8ba67bcb..00000000 Binary files a/static/img/quick-start-demo/verifier-query-3.png and /dev/null differ diff --git a/static/img/quick-start-demo/verifier-validation.png b/static/img/quick-start-demo/verifier-validation.png index 95fab454..d18c5559 100644 Binary files a/static/img/quick-start-demo/verifier-validation.png and b/static/img/quick-start-demo/verifier-validation.png differ diff --git a/static/img/quick-start-demo/wallet-app-verification.png b/static/img/quick-start-demo/wallet-app-verification.png new file mode 100644 index 00000000..56f136fc Binary files /dev/null and b/static/img/quick-start-demo/wallet-app-verification.png differ diff --git a/static/img/quick-start-demo/wallet-app.png b/static/img/quick-start-demo/wallet-app.png new file mode 100644 index 00000000..8ca3e112 Binary files /dev/null and b/static/img/quick-start-demo/wallet-app.png differ diff --git a/static/img/quick-start-demo/web-wallet-2.png b/static/img/quick-start-demo/web-wallet-2.png new file mode 100644 index 00000000..d4b9c550 Binary files /dev/null and b/static/img/quick-start-demo/web-wallet-2.png differ diff --git a/static/img/quick-start-demo/web-wallet-verification.png b/static/img/quick-start-demo/web-wallet-verification.png new file mode 100644 index 00000000..a1dc45bb Binary files /dev/null and b/static/img/quick-start-demo/web-wallet-verification.png differ diff --git a/static/img/quick-start-demo/web-wallet.png b/static/img/quick-start-demo/web-wallet.png new file mode 100644 index 00000000..8bdbf4c6 Binary files /dev/null and b/static/img/quick-start-demo/web-wallet.png differ diff --git a/static/img/web-wallet-1.png b/static/img/web-wallet-1.png index d356e7d0..e6f9f712 100644 Binary files a/static/img/web-wallet-1.png and b/static/img/web-wallet-1.png differ