diff --git a/ci/sawtooth-meta b/ci/sawtooth-meta index baf72c6aa5..55599bbfbb 100644 --- a/ci/sawtooth-meta +++ b/ci/sawtooth-meta @@ -37,8 +37,9 @@ FROM ubuntu:xenial COPY --from=sawtooth-meta-builder /project/ci/sawtooth*.deb /tmp -RUN echo "deb http://repo.sawtooth.me/ubuntu/ci xenial universe" >> /etc/apt/sources.list \ - && apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8AA7AF1F1091A5FD \ +RUN echo "deb http://repo.sawtooth.me/ubuntu/1.0/stable xenial universe" >> /etc/apt/sources.list \ + && (apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8AA7AF1F1091A5FD \ + || apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 8AA7AF1F1091A5FD) \ && apt-get update \ && dpkg -i /tmp/sawtooth*.deb || true \ && apt-get -f -y install \ diff --git a/docker-compose-installed.yaml b/docker-compose-installed.yaml index ec60d5d256..760d685e43 100644 --- a/docker-compose-installed.yaml +++ b/docker-compose-installed.yaml @@ -292,3 +292,10 @@ services: - no_proxy image: sawtooth-devmode-rust:${ISOLATION_ID} container_name: sawtooth-devmode-rust + + sawtooth-meta: + build: + context: . + dockerfile: ci/sawtooth-meta + image: sawtooth-meta:${ISOLATION_ID} + container_name: sawtooth-meta diff --git a/docker/compose/copy-debs.yaml b/docker/compose/copy-debs.yaml index dd5f865983..26ca34d6fb 100644 --- a/docker/compose/copy-debs.yaml +++ b/docker/compose/copy-debs.yaml @@ -213,3 +213,12 @@ services: bash -c " cp /tmp/*.deb /build/debs " + + sawtooth-meta: + image: sawtooth-meta:${ISOLATION_ID} + volumes: + - ../../build/debs:/build/debs + command: | + bash -c " + cp /tmp/*.deb /build/debs + " diff --git a/docker/kubernetes/sawtooth-kubernetes-default-poet.yaml b/docker/kubernetes/sawtooth-kubernetes-default-poet.yaml new file mode 100644 index 0000000000..462d682a29 --- /dev/null +++ b/docker/kubernetes/sawtooth-kubernetes-default-poet.yaml @@ -0,0 +1,588 @@ +--- +apiVersion: v1 +kind: List + +items: + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-0 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-0 + spec: + containers: + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-poet-validator-registry-tp + image: hyperledger/sawtooth-poet-validator-registry-tp:1.0 + command: + - bash + args: + - -c + - "poet-validator-registry-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8008 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-settings-tp + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "if [ ! -e /etc/sawtooth/keys/validator.priv ]; then \ + sawadm keygen; \ + fi \ + && if [ ! -e config-genesis.batch ]; then \ + sawset genesis -k /etc/sawtooth/keys/validator.priv -o config-genesis.batch; \ + fi \ + && if [ ! -e config.batch ]; then \ + sawset proposal create \ + -k /etc/sawtooth/keys/validator.priv \ + sawtooth.consensus.algorithm=poet \ + sawtooth.poet.report_public_key_pem=\"$(cat /etc/sawtooth/simulator_rk_pub.pem)\" \ + sawtooth.poet.valid_enclave_measurements=$(poet enclave measurement) \ + sawtooth.poet.valid_enclave_basenames=$(poet enclave basename) \ + sawtooth.poet.initial_wait_time=15 \ + sawtooth.poet.target_wait_time=15 \ + sawtooth.publisher.max_batches_per_block=200 \ + sawtooth.poet.key_block_claim_limit=100000 \ + sawtooth.poet.ztest_minimum_win_count=100000 \ + -o config.batch; \ + fi \ + && if [ ! -e poet_genesis.batch ]; then \ + poet registration create -k /etc/sawtooth/keys/validator.priv -o poet_genesis.batch; \ + fi \ + && if [ ! -e /var/lib/sawtooth/genesis.batch ]; then \ + sawadm genesis config-genesis.batch config.batch poet_genesis.batch; \ + fi \ + && if [ ! -e /root/.sawtooth/keys/my_key.priv ]; then \ + sawtooth keygen my_key; \ + fi \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800 \ + --peers tcp://$SAWTOOTH_1_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_2_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_3_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_4_SERVICE_HOST:8800" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-0 + spec: + type: ClusterIP + selector: + name: sawtooth-0 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8080" + protocol: TCP + port: 8080 + targetPort: 8080 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-1 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-1 + spec: + containers: + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-poet-validator-registry-tp + image: hyperledger/sawtooth-poet-validator-registry-tp:1.0 + command: + - bash + args: + - -c + - "poet-validator-registry-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8008 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-settings-tp-1 + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "sawadm keygen \ + && sawtooth keygen my_key \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_1_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800 \ + --peers tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_2_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_3_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_4_SERVICE_HOST:8800" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-1 + spec: + type: ClusterIP + selector: + name: sawtooth-1 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8080" + protocol: TCP + port: 8080 + targetPort: 8080 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-2 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-2 + spec: + containers: + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-poet-validator-registry-tp + image: hyperledger/sawtooth-poet-validator-registry-tp:1.0 + command: + - bash + args: + - -c + - "poet-validator-registry-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8080 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-settings-tp + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "sawadm keygen \ + && sawtooth keygen my_key \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_2_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800 \ + --peers tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_1_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_3_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_4_SERVICE_HOST:8800" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-2 + spec: + type: ClusterIP + selector: + name: sawtooth-2 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8080" + protocol: TCP + port: 8080 + targetPort: 8080 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-3 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-3 + spec: + containers: + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-poet-validator-registry-tp + image: hyperledger/sawtooth-poet-validator-registry-tp:1.0 + command: + - bash + args: + - -c + - "poet-validator-registry-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8080 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-settings-tp + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "sawadm keygen \ + && sawtooth keygen my_key \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_3_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800 \ + --peers tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_1_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_2_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_4_SERVICE_HOST:8800" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-3 + spec: + type: ClusterIP + selector: + name: sawtooth-3 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8080" + protocol: TCP + port: 8080 + targetPort: 8080 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-4 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-4 + spec: + containers: + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-poet-validator-registry-tp + image: hyperledger/sawtooth-poet-validator-registry-tp:1.0 + command: + - bash + args: + - -c + - "poet-validator-registry-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-settings-tp + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8080 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "sawadm keygen \ + && sawtooth keygen my_key \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_4_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800 \ + --peers tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_1_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_2_SERVICE_HOST:8800 \ + --peers tcp://$SAWTOOTH_3_SERVICE_HOST:8800" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-4 + spec: + type: ClusterIP + selector: + name: sawtooth-4 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8080" + protocol: TCP + port: 8080 + targetPort: 8080 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + diff --git a/docker/kubernetes/sawtooth-kubernetes-default.yaml b/docker/kubernetes/sawtooth-kubernetes-default.yaml new file mode 100644 index 0000000000..73fa009f74 --- /dev/null +++ b/docker/kubernetes/sawtooth-kubernetes-default.yaml @@ -0,0 +1,103 @@ +--- +apiVersion: v1 +kind: List + +items: + +- apiVersion: extensions/v1beta1 + kind: Deployment + metadata: + name: sawtooth-0 + spec: + replicas: 1 + template: + metadata: + labels: + name: sawtooth-0 + spec: + containers: + - name: sawtooth-settings-tp + image: hyperledger/sawtooth-settings-tp:1.0 + command: + - bash + args: + - -c + - "settings-tp -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-intkey-tp-python + image: hyperledger/sawtooth-intkey-tp-python:1.0 + command: + - bash + args: + - -c + - "intkey-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-xo-tp-python + image: hyperledger/sawtooth-xo-tp-python:1.0 + command: + - bash + args: + - -c + - "xo-tp-python -vv -C tcp://$HOSTNAME:4004" + + - name: sawtooth-validator + image: hyperledger/sawtooth-validator:1.0 + ports: + - name: tp + containerPort: 4004 + - name: validators + containerPort: 8800 + command: + - bash + args: + - -c + - "sawadm keygen \ + && sawtooth keygen my_key \ + && sawset genesis -k /root/.sawtooth/keys/my_key.priv \ + && sawadm genesis config-genesis.batch \ + && sawtooth-validator -vv \ + --endpoint tcp://$SAWTOOTH_0_SERVICE_HOST:8800 \ + --bind component:tcp://eth0:4004 \ + --bind network:tcp://eth0:8800" + + - name: sawtooth-rest-api + image: hyperledger/sawtooth-rest-api:1.0 + ports: + - name: api + containerPort: 8008 + command: + - bash + args: + - -c + - "sawtooth-rest-api -C tcp://$HOSTNAME:4004" + + - name: sawtooth-shell + image: hyperledger/sawtooth-all:1.0 + command: + - bash + args: + - -c + - "sawtooth keygen && tail -f /dev/null" + +- apiVersion: v1 + kind: Service + metadata: + name: sawtooth-0 + spec: + type: ClusterIP + selector: + name: sawtooth-0 + ports: + - name: "4004" + protocol: TCP + port: 4004 + targetPort: 4004 + - name: "8008" + protocol: TCP + port: 8008 + targetPort: 8008 + - name: "8800" + protocol: TCP + port: 8800 + targetPort: 8800 + diff --git a/docs/Makefile b/docs/Makefile index 48016b4058..73ea9e6bc3 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -89,6 +89,8 @@ html: templates cli python go rust @echo "Build finished. The HTML pages are in $(HTMLDIR)." @cp $(SAWTOOTH)/docker/compose/sawtooth-default.yaml $(HTMLDIR)/app_developers_guide/sawtooth-default.yaml @cp $(SAWTOOTH)/docker/compose/sawtooth-default-poet.yaml $(HTMLDIR)/app_developers_guide/sawtooth-default-poet.yaml + @cp $(SAWTOOTH)/docker/kubernetes/sawtooth-kubernetes-default.yaml $(HTMLDIR)/app_developers_guide/sawtooth-kubernetes-default.yaml + @cp $(SAWTOOTH)/docker/kubernetes/sawtooth-kubernetes-default-poet.yaml $(HTMLDIR)/app_developers_guide/sawtooth-kubernetes-default-poet.yaml dirhtml: templates cli $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml diff --git a/dummy b/dummy new file mode 100644 index 0000000000..a688bb0102 --- /dev/null +++ b/dummy @@ -0,0 +1,13 @@ +1. Whether we need to write tests in python to test rust code ? If so, we need to check interface for python to rust + +2. To write test code in rust we require to ramp up on the same + +3. WRT LMDB, we wanted to test how validator behaves for the below scenarios, +- Start time of validator, if LMDB data is too large (may be 50k transactiosn) +- Query response time of the validator, if LMDB data is too large (may be 50k transactiosn) +- Delete LMDB and reinstall sawtooth +- Change group and permission of LMDB +- No LMDB files +- In multivalidaor also, we can check response time for too big LMDB, update of LMDB after brining node up, permission, reinstall sawtooth etc + +4. Apart from rest api, Other exposed interfaces we need to investigate and also unit test we need to investigate diff --git a/perf/sawtooth_perf/src/batch_gen.rs b/perf/sawtooth_perf/src/batch_gen.rs index c513bd3024..933b8692b4 100644 --- a/perf/sawtooth_perf/src/batch_gen.rs +++ b/perf/sawtooth_perf/src/batch_gen.rs @@ -129,7 +129,7 @@ impl<'a> SignedBatchProducer<'a> { SignedBatchProducer { transaction_source, max_batch_size, - signer: signer, + signer, } } } @@ -200,8 +200,8 @@ impl<'a> SignedBatchIterator<'a> { ) -> Self { SignedBatchIterator { transaction_iterator: iterator, - max_batch_size: max_batch_size, - signer: signer, + max_batch_size, + signer, } } } diff --git a/perf/sawtooth_perf/src/batch_map.rs b/perf/sawtooth_perf/src/batch_map.rs index dc0fb9b803..ebb72f7c1e 100644 --- a/perf/sawtooth_perf/src/batch_map.rs +++ b/perf/sawtooth_perf/src/batch_map.rs @@ -45,15 +45,9 @@ impl BatchMap { // Idempotent method for adding a BatchList pub fn add(&mut self, batchlist: BatchList) { - batchlist - .batches - .last() - .map(|b| b.header_signature.clone()) - .map(|batch_id| { - if !self.batches_by_id.contains_key(batch_id.as_str()) { - self.batches_by_id.insert(batch_id, batchlist); - } - }); + if let Some(batch_id) = batchlist.batches.last().map(|b| b.header_signature.clone()) { + self.batches_by_id.entry(batch_id).or_insert(batchlist); + } } } diff --git a/perf/sawtooth_perf/src/batch_submit.rs b/perf/sawtooth_perf/src/batch_submit.rs index dd3750f9b3..4828ee9a96 100644 --- a/perf/sawtooth_perf/src/batch_submit.rs +++ b/perf/sawtooth_perf/src/batch_submit.rs @@ -313,7 +313,7 @@ pub struct InfiniteBatchListIterator<'a> { impl<'a> InfiniteBatchListIterator<'a> { pub fn new(batches: &'a mut Iterator) -> Self { - InfiniteBatchListIterator { batches: batches } + InfiniteBatchListIterator { batches } } } diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..6eb41a667e --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,321 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import aiohttp +from base64 import b64decode + +CONSENSUS_ALGO = b'Devmode' +FAMILY_NAME = 'intkey' +FAMILY_VERSION = '1.0' +DEFAULT_LIMIT = 100 +TRACE = False +NONCE = '' + + +class RestApiBaseTest(object): + """Base class for Rest Api tests that simplifies making assertions + for the test cases + """ + def assert_status(self, response, status): + for data in response['data']: + assert data['status'] == status + + def assert_equal(self, response, data): + assert response == data + + def assert_check_nonce(self, response): + """Asserts response has nonce parameter + """ + assert 'nonce' in response['header'] + assert response['header']['nonce'] == NONCE + + def assert_check_family(self, response): + """Asserts family name and versions in response + """ + assert 'family_name' in response['header'] + assert 'family_version' in response['header'] + assert response['header']['family_name'] == FAMILY_NAME + assert response['header']['family_version'] == FAMILY_VERSION + + def assert_check_dependency(self, response): + """Asserts transaction dependencies in response + """ + assert 'dependencies' in response['header'] + + def assert_content(self, response): + """Asserts response has inputs and outputs parameter + """ + assert 'inputs' in response['header'] + assert 'outputs' in response['header'] + + def assert_payload_algo(self, response): + """Asserts payload has been created with + proper algorithm + """ + assert 'payload_sha512' in response['header'] + + def assert_payload(self, txn, payload): + """Asserts payload is constructed properly + """ + assert 'payload' in txn + assert payload == txn['payload'] + self.assert_payload_algo(txn) + + def assert_batcher_public_key(self, response, public_key): + """Asserts batcher public key in response + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_signer_public_key(self, response, public_key): + """Asserts that signer public key is proper + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_trace(self, response): + """Asserts whether the response has trace parameter + """ + assert 'trace' in response +# assert bool(response['trace']) + assert response['trace'] == TRACE + + def assert_check_consensus(self, response): + """Asserts response has consensus as parameter + """ + assert 'consensus' in response + assert response['consensus'] == CONSENSUS_ALGO + + def assert_state_root_hash(self, response): + """Asserts the response has state root hash + """ + assert 'state_root_hash' in response + + def assert_previous_block_id(self, response): + """Asserts that response has previous block id + """ + assert 'previous_block_id' in response + + def assert_block_num(self, response): + """Asserts that response has proper block number + """ + assert 'block_num' in response + + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_valid_head(self, response, expected): + """Asserts a response has a head string with an + expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_valid_link(self, response, expected_link): + """Asserts a response has a link url string with an + expected ending + """ + assert 'link' in response + assert response['link'] == expected_link + self.assert_valid_url(response['link'], expected_link) + + def assert_valid_url(self, url, expected_link): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_link) + + def assert_transaction_ids(self, response, expected): + """Asserts a response has a link url string with an + expected ending + """ + assert 'transaction_ids' in response['header'] + assert response['header']['transaction_ids'][0] == expected + + def assert_valid_paging(self, response, expected_link): + """Asserts a response has a paging dict with the + expected values. + """ + assert 'paging' in response + paging = response['paging'] + + if 'next' in paging and expected_link is not None: + assert 'next' in paging + assert 'next_position' in paging + self.assert_valid_url(response['link'], expected_link) + else: + assert 'next' not in paging + assert paging['start'] == None + assert paging['limit'] == None + + def assert_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an + expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_valid_data(self, response): + """Asserts a response has a data list of dicts + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + self.assert_items(data, dict) + + def assert_valid_data_list(self, response, expected_length): + """Asserts a response has a data list of dicts of an + expected length. + """ + assert len(response) == expected_length + + def assert_check_block_seq(self, blocks, expected_batches, expected_txns): + """Asserts block is constructed properly after submitting batches + """ + if not isinstance(blocks, list): + blocks = [blocks] + + consensus_algo = CONSENSUS_ALGO + + ep = list(zip(blocks, expected_batches, expected_txns)) + + for block, expected_batch, expected_txn in ep: + assert isinstance(block, dict) + assert isinstance(block['header'], dict) + assert consensus_algo == b64decode(block['header']['consensus']) + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 + assert isinstance(batches, dict) + self.assert_check_batch_seq(batches, expected_batch, expected_txn) + + def assert_check_batch_seq(self, batches, expected_batches, expected_txns, + payload, signer_key): + """Asserts batch is constructed properly + """ + + if not isinstance(batches, list): + batches = [batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + for batch, expected_batch , expected_txn in zip(batches, expected_batches , expected_txns): + assert expected_batch == batch['header_signature'] + assert isinstance(batch['header'], dict) + txns = batch['transactions'] + assert isinstance(txns, list) + assert len(txns) == 1 + self.assert_items(txns, dict) + self.assert_transaction_ids(batch, expected_txn) + self.assert_signer_public_key(batch, signer_key) + self.assert_trace(batch) + self.assert_check_transaction_seq(txns, expected_txn, + payload[0], signer_key) + + + def assert_check_transaction_seq(self, txns, expected_ids, + payload, signer_key): + """Asserts transactions are constructed properly + """ + if not isinstance(txns, list): + txns = [txns] + + if not isinstance(expected_ids, list): + expected_ids = [expected_ids] + + for txn, expected_id in zip(txns, expected_ids): + assert expected_id == txn['header_signature'] + assert isinstance(txn['header'], dict) + self.assert_payload(txn, payload) + self.assert_check_family(txn) + self.assert_check_nonce(txn) + self.assert_check_dependency(txn) + self.assert_content(txn) + self.assert_signer_public_key(txn, signer_key) + self.assert_batcher_public_key(txn, signer_key) + + def assert_check_state_seq(self, state, expected): + """Asserts state is updated properly + """ + pass + + def wait_until_status(url, status_code=200, tries=5): + """Pause the program until the given url returns the required status. + + Args: + url (str): The url to query. + status_code (int, optional): The required status code. Defaults to 200. + tries (int, optional): The number of attempts to request the url for + the given status. Defaults to 5. + Raises: + AssertionError: If the status is not recieved in the given number of + tries. + """ + attempts = tries + while attempts > 0: + try: + response = urlopen(url) + if response.getcode() == status_code: + return + + except HTTPError as err: + if err.code == status_code: + return + + LOGGER.debug('failed to read url: %s', str(err)) + except URLError as err: + LOGGER.debug('failed to read url: %s', str(err)) + + sleep_time = (tries - attempts + 1) * 2 + LOGGER.debug('Retrying in %s secs', sleep_time) + time.sleep(sleep_time) + + attempts -= 1 + + raise AssertionError( + "{} is not available within {} attempts".format(url, tries)) + + def wait_for_rest_apis(endpoints, tries=5): + """Pause the program until all the given REST API endpoints are available. + + Args: + endpoints (list of str): A list of host:port strings. + tries (int, optional): The number of attempts to request the url for + availability. + """ + for endpoint in endpoints: + http = 'http://' + url = endpoint if endpoint.startswith(http) else http + endpoint + wait_until_status( + '{}/blocks'.format(url), + status_code=200, + tries=tries) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py new file mode 100644 index 0000000000..4a68272cf6 --- /dev/null +++ b/rest_api/tests/api_test/conftest.py @@ -0,0 +1,239 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import sys +import platform +import inspect +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp, _get_client_address, batch_count, transaction_count,\ + get_batch_statuses + +from payload import get_signer, create_intkey_transaction , create_batch + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +LIMIT = 100 + + +def pytest_addoption(parser): + """Contains parsers for pytest cli commands + """ + parser.addoption( + "--get", action="store_true", default=False, help="run get tests" + ) + + parser.addoption( + "--post", action="store_true", default=False, help="run post tests" + ) + + parser.addoption( + "--sn", action="store_true", default=False, help="run scenario based tests" + ) + + parser.addoption("--batch", action="store", metavar="NAME", + help="only run batch tests." + ) + + parser.addoption("--transaction", action="store", metavar="NAME", + help="only run transaction tests." + ) + + parser.addoption("--state", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("--block", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("-E", action="store", metavar="NAME", + help="only run tests matching the environment NAME." + ) + + parser.addoption("-N", action="store", metavar="NAME", + help="only run tests matching the Number." + ) + + parser.addoption("-O", action="store", metavar="NAME", + help="only run tests matching the OS release version." + ) + + +def pytest_collection_modifyitems(config, items): + """Filters tests based on markers when parameters passed + through the cli + """ + try: + num = int(config.getoption("-N")) + except: + num = None + + selected_items = [] + deselected_items = [] + if config.getoption("--get"): + for item in items: + for marker in list(item.iter_markers()): + if marker.name == 'get': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--post"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'post': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--sn"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'scenario': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + else: + selected_items = items[:num] + items[:] = selected_items + return items + +@pytest.fixture(scope="session", autouse=True) +def setup(request): + """Setup method for posting batches and returning the + response + """ + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = len(get_state_list()) + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + data['expected_batch_length'] = initial_batch_length + length_batches + data['expected_trn_length'] = initial_transaction_length + length_transactions + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['payload'] = expected_trxns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + + block_list = get_blocks() + data['block_list'] = block_list + batch_list = get_batches() + data['batch_list'] = batch_list + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] + data['transaction_ids'] = transaction_ids + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] + data['batch_ids'] = batch_ids + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + data['family_name']=[block['batches'][0]['transactions'][0]['header']['family_name'] for block in block_list['data']] + return data \ No newline at end of file diff --git a/rest_api/tests/api_test/exceptions.py b/rest_api/tests/api_test/exceptions.py new file mode 100644 index 0000000000..af131f05c1 --- /dev/null +++ b/rest_api/tests/api_test/exceptions.py @@ -0,0 +1,17 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +class RestApiError(Exception): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..d2d214ec49 --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,214 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import urllib +import json +import os +import random +import hashlib + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp, batch_count, transaction_count, get_batch_statuses + +from payload import get_signer, create_intkey_transaction , create_batch,\ + create_invalid_intkey_transaction, create_intkey_same_transaction, random_word_list, IntKeyPayload, \ + make_intkey_address, Transactions + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +LIMIT = 100 + + +data = {} + +@pytest.fixture(scope="function") +def break_genesis(request): + """Setup Function for deleting the genesis data + and restarting the validator with no genesis + + Waits for services to start again before + sending the request again + """ + _stop_validator() + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _start_validator() + + +@pytest.fixture(scope="function") +def setup_settings_tp(request): + _stop_settings_tp() + print("settings tp is connected") + + def teardown(): + print("Connecting settings tp") + _start_settings_tp() + + request.addfinalizer(teardown) + +@pytest.fixture(scope="function") +def invalid_batch(): + """Setup method for creating invalid batches + """ + signer = get_signer() + data = {} + expected_trxns = {} + expected_batches = [] + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_invalid_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['address'] = address + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + return data + +@pytest.fixture(scope="function") +def setup_valinv_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valinv_txns() + return data + +@pytest.fixture(scope="function") +def setup_invval_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invval_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invalid_txns() + return data + + + +@pytest.fixture(scope="function") +def setup_batch_multiple_transaction(): + data = {} + signer = get_signer() + transactions= [] + expected_trxns = [] + expected_batches = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + for val in range(15): + txns = create_intkey_transaction("set", [] , 50 , signer) + transactions.append(txns) + + + for txn in transactions: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxns.append(trxn_id) + + + batch_s= create_batch(transactions, signer) + post_batch_list = BatchList(batches=[batch_s]).SerializeToString() + + LOGGER.info("Submitting batches to the handlers") + + try: + response = post_batch(post_batch_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + return expected_trxns + + + + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py new file mode 100644 index 0000000000..a5a02403de --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -0,0 +1,611 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + + +from fixtures import break_genesis, invalid_batch +from utils import get_batches, get_batch_id, post_batch,\ + get_batch_statuses, post_batch_statuses,\ + _create_expected_link, _get_batch_list + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.batch] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BATCH_NOT_FOUND = 71 +STATUS_ID_QUERY_INVALID = 66 +STATUS_BODY_INVALID = 43 +STATUS_WRONG_CONTENT_TYPE = 46 +WAIT = 10 + + +class TestBatchList(RestApiBaseTest): + """This class tests the batch list with different parameters + """ + def test_api_get_batch_list(self, setup): + """Tests the batch list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/batches?head={}&start={}'.format(address,\ + expected_head, start) + + try: + response = get_batches() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = _get_batch_list(response) + + self.assert_valid_data(response) + self.assert_valid_head(response, expected_head) + self.assert_valid_data_list(batches, expected_length) + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_head(self, setup): + """Tests that GET /batches is reachable with head parameter + """ + LOGGER.info("Starting test for batch with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_head = setup['expected_head'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_bad_head(self, setup): + """Tests that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + + try: + batch_list = get_batches(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + + def test_api_get_batch_list_id(self, setup): + """Tests that GET /batches is reachable with id as parameter + """ + LOGGER.info("Starting test for batch with id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_list_bad_id(self, setup): + """Tests that GET /batches is unreachable with bad id parameter + """ + LOGGER.info("Starting test for batch with bad id parameter") + + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_list_head_and_id(self, setup): + """Tests GET /batches is reachable with head and id as parameters + """ + LOGGER.info("Starting test for batch with head and id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(head_id=expected_head , id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_paginated_batch_list(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_batches(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_batch_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_id = batch_ids[0] + start = setup['start'] + address = setup['address'] + limit = 1 + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + def test_api_get_batch_list_invalid_start(self, setup): + """Tests that GET /batches is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_batches(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + + def test_api_get_batch_list_invalid_limit(self, setup): + """Tests that GET /batches is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_batch_list_reversed(self, setup): + """verifies that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + setup_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + expected_batches = setup_batches[::-1] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + reverse = True + + try: + response = get_batches(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + batches = response['data'][:-1] + + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response) + + def test_api_get_batch_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_batches() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_batch_param_link_val(self, setup): + """Tests/ validate the batch parameters with batches, head, start and limit + """ + try: + batch_list = get_batches() + for link in batch_list: + if(link == 'link'): + assert 'head' in batch_list['link'] + assert 'start' in batch_list['link'] + assert 'limit' in batch_list['link'] + assert 'batches' in batch_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for batch and parameters are missing") + + def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list + """ + count =0 + try: + batch_list = get_batches() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Batch count not able to collect") + +class TestBatchGet(RestApiBaseTest): + def test_api_get_batch_id(self, setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + batch_ids = setup['batch_ids'] + expected_id = batch_ids[0] + payload = setup['payload'] + address = setup['address'] + + expected_link = '{}/batches/{}'.format(address, expected_batches[0]) + + try: + response = get_batch_id(expected_batches[0]) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + + def test_api_get_bad_batch_id(self, setup): + """verifies that GET /batches/{bad_batch_id} + is unreachable with bad head parameter + """ + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + +class TestBatchStatusesList(RestApiBaseTest): + """This class tests the batch status list with different parameters + """ + def test_api_post_batch_status_15ids(self, setup): + """verifies that POST /batches_statuses with more than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + data_str=json.dumps(data['batch_ids']).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_post_batch_status_10ids(self, setup): + """verifies that POST /batches_status with less than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + values = [] + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + for i in range(10): + values.append(data['batch_ids'][i]) + data_str=json.dumps(values).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_get_batch_statuses(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_many_ids(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + batches = ",".join(expected_batches) + + expected_link = '{}/batch_statuses?id={}'.format(address, batches) + + try: + response = get_batch_statuses(expected_batches) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_bad_id(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses(BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_statuses_invalid_query(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, STATUS_ID_QUERY_INVALID) + + def test_api_get_batch_statuses_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait={}'.format(address, expected_batches[0], WAIT) + + try: + response = get_batch_statuses([expected_batches[0]],WAIT) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_invalid(self, invalid_batch): + expected_batches = invalid_batch['expected_batches'] + address = invalid_batch['address'] + status = "INVALID" + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_unknown(self, setup): + address = setup['address'] + expected_batches = setup['expected_batches'] + unknown_batch = expected_batches[0] + status = "UNKNOWN" + + expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) + + try: + response = get_batch_statuses([unknown_batch]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_default_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait=300'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]],300) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py new file mode 100644 index 0000000000..cf54b22d59 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -0,0 +1,410 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_blocks, get_block_id, get_batches, get_transactions + +from base import RestApiBaseTest + + +pytestmark = [pytest.mark.get , pytest.mark.block] + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BLOCK_NOT_FOUND = 70 +HEAD_LENGTH = 128 +MAX_BATCH_IN_BLOCK = 100 +FAMILY_NAME = 'xo' + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +class TestBlockList(RestApiBaseTest): + """This class tests the blocks list with different parameters + """ + def test_api_get_block_list(self, setup): + """Tests the block list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_blocks() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + def test_api_get_block_list_head(self, setup): + """Tests that GET /blocks is reachable with head parameter + """ + LOGGER.info("Starting test for blocks with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_blocks(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_block_list_bad_head(self, setup): + """Tests that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for blocks with bad head parameter") + + try: + batch_list = get_blocks(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_id(self, setup): + """Tests that GET /blocks is reachable with id as parameter + """ + LOGGER.info("Starting test for blocks with id parameter") + + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + try: + response = get_blocks(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + assert response['head'] == expected_head, "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + + def test_api_get_block_list_bad_id(self, setup): + """Tests that GET /blocks is unreachable with bad id parameter + """ + LOGGER.info("Starting test for blocks with bad id parameter") + bad_id = 'f' + + try: + batch_list = get_blocks(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_head_and_id(self, setup): + """Tests GET /blocks is reachable with head and id as parameters + """ + LOGGER.info("Starting test for blocks with head and id parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + + response = get_blocks(head_id=expected_head , id=expected_id) + + assert response['head'] == expected_head , "head is not matching" + assert response['paging']['start'] == None , "start parameter is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + + def test_api_get_paginated_block_list(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_start_id(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_start(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_limit(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_limit(self, setup): + """Tests that GET /blocks is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + limit = 0 + + try: + response = get_blocks(limit=limit) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + def test_api_get_block_list_reversed(self, setup): + """verifies that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + reverse = True + + try: + response = get_blocks(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['head'] == expected_head , "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_block_link_val(self, setup): + """Tests/ validate the block parameters with blocks, head, start and limit + """ + try: + block_list = get_blocks() + for link in block_list: + if(link == 'link'): + assert 'head' in block_list['link'] + assert 'start' in block_list['link'] + assert 'limit' in block_list['link'] + assert 'blocks' in block_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_block_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_blocks() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_batch_id_length(self, setup): + """Tests the each batch id length should be 128 hex character long + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + expected_head = batch['header']['batch_ids'][0] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Batch id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_api_get_first_block_id_length(self, setup): + """Tests the first block id length should be 128 hex character long + """ + try: + for block_list in get_blocks(): + batch_list = get_batches() + for block in batch_list: + expected_head = batch_list['head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Block id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_post_max_batches(self, setup): + """Tests that allow max post batches in block + Handled max 100 batches post in block and handle for extra batch + """ + block_list = get_blocks()['data'] + for batchcount, _ in enumerate(block_list, start=1): + if batchcount == MAX_BATCH_IN_BLOCK: + print("Max 100 Batches are present in Block") + + def test_rest_api_check_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + block_list = get_blocks()['data'] + head_signature = [block['batches'][0]['header_signature'] for block in block_list] + for i, _ in enumerate(block_list): + head_sig = json.dumps(head_signature[i]).encode('utf8') + assert head_signature[i] is not None, "Head signature is available for all batches in block" + + def test_rest_api_check_family_version(self, setup): + """Test batch transaction family version should be present + for each transaction header + """ + block_list = get_blocks()['data'] + family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] + for i, _ in enumerate(block_list): + assert family_version[i] is not None, "family version present for all batches in block" + + def test_rest_api_check_input_output_content(self,setup): + """Test batch input and output content should be same for + each batch and unique from other + """ + block_list = get_blocks()['data'] + txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] + txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] + if(txn_input == txn_output): + return True + def test_rest_api_check_signer_public_key(self, setup): + """Tests that signer public key is calculated for a block + properly + """ + block_list = get_blocks()['data'] + signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] + assert signer_public_key is not None, "signer public key is available" + + def test_rest_api_check_blocks_count(self, setup): + """Tests blocks count from block list + """ + count =0 + try: + block_list = get_blocks() + for block in enumerate(block_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("BLock count not able to collect") + + def test_rest_api_blk_content_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + head_signature = trans['header_signature'] + except urllib.error.HTTPError as error: + LOGGER.info("Header signature is missing in some of the batches") + assert head_signature is not None, "Head signature is available for all batches in block" + +class TestBlockGet(RestApiBaseTest): + def test_api_get_block_id(self, setup): + """Tests that GET /blocks/{block_id} is reachable + """ + LOGGER.info("Starting test for blocks/{block_id}") + expected_head = setup['expected_head'] + expected_block_id = setup['block_ids'][0] + + try: + response = get_block_id(block_id=expected_block_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + def test_api_get_bad_block_id(self, setup): + """Tests that GET /blocks/{bad_block_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for blocks/{bad_block_id}") + + try: + response = get_block_id(block_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + diff --git a/rest_api/tests/api_test/get/test_rest_api_get_peers.py b/rest_api/tests/api_test/get/test_rest_api_get_peers.py new file mode 100644 index 0000000000..55fd908aaf --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_peers.py @@ -0,0 +1,48 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_peers + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.peers] + +PEER_LIST = [] + +class TestPeerList(RestApiBaseTest): + """This class tests the peer list with different parameters + """ + def test_api_get_peer_list(self, setup): + """Tests the peer list + """ + address = setup['address'] + expected_link = '{}/peers'.format(address) + + try: + response = get_peers() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_link(response, expected_link) + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py new file mode 100644 index 0000000000..749fdbf951 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -0,0 +1,137 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +# +import pytest +import logging +import json +import urllib.request +import urllib.error + +from conftest import setup +from utils import get_state_list, get_reciepts, post_receipts +from base import RestApiBaseTest +from fixtures import setup_batch_multiple_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.receipts] + +RECEIPT_NOT_FOUND = 80 +RECEIPT_WRONG_CONTENT_TYPE = 81 +RECEIPT_BODY_INVALID = 82 +RECEIPT_Id_QUERYINVALID = 83 +INVALID_RESOURCE_ID = 60 + + +class TestReceiptsList(RestApiBaseTest): + """This class tests the receipt list with different parameters + """ + def test_api_get_reciept_invalid_id(self): + """Tests the reciepts after submitting invalid transaction + """ + transaction_id="s" + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the get reciepts for multiple transaction. + """ + transaction_list="" + li=setup_batch_multiple_transaction + for txn in li: + transaction_list=txn+","+transaction_list + + trans_list = str(transaction_list)[:-1] + try: + response = get_reciepts(trans_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'],reversed(li)): + assert str(res['id']) == txn + + def test_api_get_reciepts_single_transactions(self,setup): + """Tests get reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_single_transactions(self,setup): + """Test post reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_invalid_transactions(self): + """test reciepts post for invalid transaction""" + + expected_transaction="few" + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_BODY_INVALID + assert response['error']['title'] == 'Bad Receipts Request' + + def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the post reciepts response for multiple transaction. + """ + + transaction_list=setup_batch_multiple_transaction + + json_list=json.dumps(transaction_list).encode() + + try: + response= post_receipts(json_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'], transaction_list): + assert str(res['id']) == txn \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py new file mode 100644 index 0000000000..e3a40a8005 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -0,0 +1,488 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_state_list, get_state_address +from fixtures import invalid_batch + + +from base import RestApiBaseTest + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get, pytest.mark.state] + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +BAD_ADDRESS = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +STATE_ADDRESS_LENGTH = 70 +STATE_NOT_FOUND = 75 +INVALID_STATE_ADDRESS = 62 +HEAD_LENGTH = 128 + + +class TestStateList(RestApiBaseTest): + """This class tests the state list with different parameters + """ + def test_api_get_state_list(self, setup): + """Tests the state list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_state_list() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + state_list = response['data'][:-1] + + self.assert_valid_head(response , expected_head) + + def test_api_get_state_list_invalid_batch(self, invalid_batch): + """Tests that transactions are submitted and committed for + each block that are created by submitting invalid intkey batches + """ + batches = invalid_batch['expected_batches'] + try: + response = get_state_list() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_state_list_head(self, setup): + """Tests that GET /state is reachable with head parameter + """ + LOGGER.info("Starting test for state with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_head(self, setup): + """Tests that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + bad_head = 'f' + + try: + batch_list = get_state_list(head_id=bad_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + + def test_api_get_state_list_address(self, setup): + """Tests that GET /state is reachable with address parameter + """ + LOGGER.info("Starting test for state with address parameter") + expected_head = setup['expected_head'] + address = setup['state_address'][0] + + try: + response = get_state_list(address=address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_address(self, setup): + """Tests that GET /state is unreachable with bad address parameter + """ + LOGGER.info("Starting test for state with bad address parameter") + bad_address = 'f' + + try: + batch_list = get_state_list(address=bad_address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + def test_api_get_paginated_state_list(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + def test_api_get_paginated_state_list_limit(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_paginated_state_list_start(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_state_list_bad_paging(self, setup): + """Tests GET /state is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for state with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + + def test_api_get_state_list_invalid_start(self, setup): + """Tests that GET /state is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for state with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_state_list(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + + def test_api_get_state_list_invalid_limit(self, setup): + """Tests that GET /state is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for state with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + def test_api_get_state_list_reversed(self, setup): + """verifies that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_state_list(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_state_data_address_prefix_namespace(self, setup): + """Tests the state data address with 6 hex characters long + namespace prefix + """ + try: + for state in get_state_list()['data']: + #Access each address using namespace prefix + namespace = state['address'][:6] + res=get_state_list(address=namespace) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access related state address using namespace prefix") + + def test_api_get_state_data_head_wildcard_character(self, setup): + """Tests the state head with wildcard_character ***STL-1345*** + """ + pass +# try: +# for _ in get_state_list()['data']: +# expected_head = setup['expected_head'][:6] +# addressList = list(expected_head) +# addressList[2]='?' +# expected_head = ''.join(addressList) +# print("\nVALUE is: ", expected_head) +# res=get_state_list(head_id=expected_head) +# except urllib.error.HTTPError as error: +# LOGGER.info("Not able to access ") +# data = json.loads(error.fp.read().decode('utf-8')) +# if data: +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 60 +# assert data['error']['title'] == 'Invalid Resource Id' + + + def test_api_get_state_data_head_partial_character(self, setup): + """Tests the state head with partial head address ***STL-1345*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 60 + assert data['error']['title'] == 'Invalid Resource Id' + + def test_api_get_state_data_address_partial_character(self, setup): + """Tests the state address with partial head address ***STL-1346*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 62 + assert data['error']['title'] == 'Invalid State Address' + + + def test_api_get_state_data_address_length(self, setup): + """Tests the state data address length is 70 hex character long + with proper prefix namespace + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + except urllib.error.HTTPError as error: + LOGGER.info("State address is not 70 character long") + assert address == STATE_ADDRESS_LENGTH + + + def test_api_get_state_data_address_with_odd_hex_value(self, setup): + """Tests the state data address fail with odd hex character + address + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + if(address%2 == 0): + pass + except urllib.error.HTTPError as error: + LOGGER.info("Odd state address is not correct") + + def test_api_get_state_data_address_with_reduced_length(self, setup): + """Tests the state data address with reduced even length hex character long + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[:-4] + get_state_list(address = nhex) + except urllib.error.HTTPError as error: + LOGGER.info("Reduced length data address failed to processed") + + + def test_api_get_state_data_address_64_Hex(self, setup): + """Tests the state data address with 64 hex give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:70] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except urllib.error.HTTPError as error: + LOGGER.info("state data address with 64 hex characters not processed ") + + + def test_api_get_state_data_address_alter_bytes(self, setup): + """Tests the state data address with alter bytes give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:8] + naddress = get_state_list(address = nhex) + addressList = list(naddress) + addressList[2]='z' + naddress = ''.join(addressList) + except urllib.error.HTTPError as error: + LOGGER.info("state data address with altered bytes not processed ") + + + def test_api_get_state_link_val(self, setup): + """Tests/ validate the state parameters with state, head, start and limit + """ + try: + state_list = get_state_list() + for link in state_list: + if(link == 'link'): + assert 'head' in state_list['link'] + assert 'start' in state_list['link'] + assert 'limit' in state_list['link'] + assert 'state' in state_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_state_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_state_list() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_state_head_length(self, setup): + """Tests the each state head length should be 128 hex character long + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("State Head length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_state_count(self, setup): + """Tests state count from state list + """ + count = 0 + try: + state_list = get_state_list()['data'] + for batch in enumerate(state_list): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("State count not able to collect") + + +class TestStateGet(RestApiBaseTest): + def test_api_get_state_address(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['state_address'][0] + try: + response = get_state_address(address=address) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_bad_address(self, setup): + """Tests /state/{bad_state_address} + """ + try: + response = get_state_address(address=BAD_ADDRESS) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_STATE_ADDRESS) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py new file mode 100644 index 0000000000..6480a6f94f --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -0,0 +1,379 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from fixtures import break_genesis + +from utils import get_transactions, get_transaction_id + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.transactions] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +TRANSACTION_NOT_FOUND = 72 +HEAD_LENGTH = 128 + + +class TestTransactionList(RestApiBaseTest): + def test_api_get_transaction_list(self, setup): + """Tests the transaction list after submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + +# self.assert_check_transaction_seq(txns, expected_txns, +# payload, signer_key) +# self.assert_valid_head(response , expected_head) +# self.assert_valid_paging(response) + + + def test_api_get_transaction_list_head(self, setup): + """Tests that GET /transactions is reachable with head parameter + """ + LOGGER.info("Starting test for transactions with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_transaction_list_bad_head(self, setup): + """Tests that GET /transactions is unreachable with bad head parameter + """ + LOGGER.info("Starting test for transactions with bad head parameter") + + try: + response = get_transactions(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_id(self, setup): + """Tests that GET /transactions is reachable with id as parameter + """ + LOGGER.info("Starting test for transactions with id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + def test_api_get_transaction_list_bad_id(self, setup): + """Tests that GET /transactions is unreachable with bad id parameter + """ + LOGGER.info("Starting test for transactions with bad id parameter") + bad_id = 'f' + + try: + response = get_transactions(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_head_and_id(self, setup): + """Tests GET /transactions is reachable with head and id as parameters + """ + LOGGER.info("Starting test for transactions with head and id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(head_id=expected_head , id=expected_id) + except: + LOGGER.info("Rest Api not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_paginated_transaction_list(self, setup): + """Tests GET /transactions is reachbale using paging parameters + """ + LOGGER.info("Starting test for transactions with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_bad_paging(self, setup): + """Tests GET /transactions is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for transactions with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_transaction_list_invalid_start(self, setup): + """Tests that GET /transactions is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for transactions with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_transactions(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_list_invalid_limit(self, setup): + """Tests that GET /transactions is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for transactions with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_transactions(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + + def test_api_get_transaction_list_reversed(self, setup): + """verifies that GET /transactions with list reversed + """ + LOGGER.info("Starting test for transactions with list reversed") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_transactions(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_transactions_link_val(self, setup): + """Tests/ validate the transactions parameters with transactions, head, start and limit + """ + try: + transactions_list = get_transactions() + for link in transactions_list: + if(link == 'link'): + assert 'head' in transactions_list['link'] + assert 'start' in transactions_list['link'] + assert 'limit' in transactions_list['link'] + assert 'transactions' in transactions_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for transactions and parameters are missing") + + def test_api_get_transactions_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_transactions() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_transaction_id_length(self, setup): + """Tests the transaction id length should be 128 hex character long + """ + try: + transaction_list = get_transactions() + for trans in transaction_list['data']: + transaction_ids = trans['header_signature'] + head_len = len(transaction_ids) + except urllib.error.HTTPError as error: + LOGGER.info("Transaction id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_transactions_count(self, setup): + """Tests transaction count from transaction list + """ + count =0 + try: + batch_list = get_transactions() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Transaction count not able to collect") + +class TesttransactionGet(RestApiBaseTest): + def test_api_get_transaction_id(self, setup): + """Tests that GET /transactions/{transaction_id} is reachable + """ + LOGGER.info("Starting test for transaction/{transaction_id}") + expected_head = setup['expected_head'] + expected_id = setup['transaction_ids'][0] + address = setup['address'] + expected_length = 1 + + expected_link = '{}/transactions/{}'.format(address,expected_id) + + try: + response = get_transaction_id(transaction_id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_link(response, expected_link) + assert bool(response['data']) == True + + def test_api_get_transaction_bad_id(self, setup): + """Tests that GET /transactions/{transaction_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for transactions/{transaction_id}") + try: + response = get_transaction_id(transaction_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py new file mode 100644 index 0000000000..6a7c73281a --- /dev/null +++ b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py @@ -0,0 +1,128 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time +import paramiko +import sys +import threading +import os +import signal + + +from google.protobuf.json_format import MessageToDict + +from base import RestApiBaseTest +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus, _stop_validator\ + +from workload import Workload +from ssh import SSH +from thread import Workload_thread, SSH_thread, Consensus_Thread,\ + wait_for_event, wait_for_event_timeout + + + +logging.basicConfig(level=logging.INFO, + format='[%(levelname)s] (%(threadName)-10s) %(message)s', + ) + +WAIT_TIME = 10 +PORT =22 +USERNAME = 'test' +PASSWORD = 'aditya9971' + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.mul + + +class TestMultiple(RestApiBaseTest): + def test_rest_api_mul_val_intk(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + logging.info('Starting Test for Intkey payload') + + logging.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + logging.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) + + def test_rest_api_mul_val_Node(self): + """Tests that leaf nodes are brought up/down in a network + and checks are performed on the respective nodes + """ + leaf_nodes = ['10.223.155.134', '10.223.155.25'] + threads = [] + + workload_thread = Workload_thread() + workload_thread.setName('workload_thread') + workload_thread.start() + + consensus_thread = Consensus_Thread(leaf_nodes) + consensus_thread.setName('consensus_thread') + consensus_thread.setDaemon(True) + consensus_thread.start() + + for node in leaf_nodes: + ssh_thread = SSH_thread(node,PORT,USERNAME,PASSWORD) + ssh_thread.setName('ssh_thread') + threads.append(ssh_thread) + + for thread in threads: + thread.start() + thread.join() + + consensus_thread.join() + workload_thread.join() + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py new file mode 100644 index 0000000000..8d8f9ef560 --- /dev/null +++ b/rest_api/tests/api_test/payload.py @@ -0,0 +1,415 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import base64 +import argparse +import cbor +import hashlib +import os +import time +import random +import string +import urllib + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict +from utils import batch_count, transaction_count, get_batch_statuses, post_batch + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + +class Transactions: + + def __init__(self, invalidtype): + self.signer = get_signer() + self.data = {} + self.invalidtype = invalidtype + + def get_batch_valinv_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invval_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_one_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_txns_commit_data(self, txns, signer, data): + """Setup method for posting batches and returning the + response + """ + expected_trxn_ids = [] + expected_batch_ids = [] + expected_trxns = {} + expected_batches = [] + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + + LOGGER.info("Creating intkey transactions with set operations") + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = dict['header_signature'] + expected_trxn_ids.append(trxn_id) + + self.data['expected_trxn_ids'] = expected_trxn_ids + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 3trn/batch") + + batches = [create_batch(txns, signer)] + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + length_batches = len(expected_batches) + length_transactions = len(expected_trxn_ids) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + try: + for batch in post_batch_list: + response = post_batch(batch) + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + json_data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(json_data['error']['title']) + LOGGER.info(json_data['error']['message']) + LOGGER.info(json_data['error']['code']) + + self.data['response'] = response['data'][0]['status'] + self.data['initial_batch_length'] = initial_batch_length + self.data['initial_trn_length'] = initial_transaction_length + self.data['expected_batch_length'] = initial_batch_length + length_batches + self.data['expected_trn_length'] = initial_transaction_length + length_transactions + return self.data + + def create_intkey_transaction(self, verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data + + def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidtye): + words = random_word_list(count) + name=random.choice(words) + + if invalidtye=="addr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + elif invalidtye=="min": + payload = IntKeyPayload( + verb=verb,name=name,value=-1) + addr = make_intkey_address(name) + + elif invalidtye=="str": + payload = IntKeyPayload( + verb=verb,name=name,value="str") + addr = make_intkey_address(name) + + elif invalidtye=="max": + payload = IntKeyPayload( + verb=verb,name=name,value=4294967296) + addr = make_intkey_address(name) + + elif invalidtye=="attr": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + + data = self.get_txns_data(addr,deps, payload) + return data + + def get_txns_data(self, addr, deps, payload): + + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + + + +def create_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_invalid_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_same_transaction(verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + +def random_word(): + return ''.join([random.choice(string.ascii_letters) for _ in range(0, 6)]) + + +def random_word_list(count): + if os.path.isfile('/usr/share/dict/words'): + with open('/usr/share/dict/words', 'r') as fd: + return [x.strip() for x in fd.readlines()[0:count]] + else: + return [random_word() for _ in range(0, count)] \ No newline at end of file diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py new file mode 100644 index 0000000000..63c4035566 --- /dev/null +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -0,0 +1,408 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib + +from google.protobuf.json_format import MessageToDict + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains, post_batch_no_endpoint + + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction + +from base import RestApiBaseTest +from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +BAD_PROTOBUF = b'BAD_PROTOBUF' +EMPTY_BATCH = b'' +NO_BATCHES_SUBMITTED = 34 +BAD_PROTOBUF_SUBMITTED = 35 +BATCH_QUEUE_FULL = 31 +INVALID_BATCH = 30 +WRONG_CONTENT_TYPE = 43 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.post +data = {} + +class TestPost(RestApiBaseTest): + + def test_rest_api_post_batch(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey batches + with set operations + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()['data']) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + data = error.fp.read().decode('utf-8') + LOGGER.info(data) + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch submission failed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + final_state_length = len(get_state_list()['data']) + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert final_state_length == initial_state_length + len(expected_batch_ids) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + def test_rest_api_no_batches(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=EMPTY_BATCH) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, NO_BATCHES_SUBMITTED) + + def test_rest_api_bad_protobuf(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=BAD_PROTOBUF) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, BAD_PROTOBUF_SUBMITTED) + + def test_rest_api_post_wrong_header(self,setup): + """Tests rest api by posting with wrong header + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="True") + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_post_same_txns(self, setup): + """Tests the rest-api by submitting multiple transactions with same key + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + assert response['data'][0]['status'] == "INVALID" + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_multiple_txns_batches(self, setup): + """Tests rest-api state by submitting multiple + transactions in multiple batches + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txns], signer)] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + response = get_state_list() + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 17 + assert e.code == 400 + final_state_length = len(get_state_list()) + assert initial_state_length == final_state_length + + def test_api_post_batch_different_signer(self, setup): + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch(batc) + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 30 + assert data['error']['title'] =='Submitted Batches Invalid' + + def test_rest_api_post_no_endpoint(self, setup): + + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + batch= create_batch(translist,signer_trans) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch_no_endpoint(batc) + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + errcode = e.code + assert errcode == 404 + +class TestPostMulTxns(RestApiBaseTest): + def test_txn_invalid_addr(self, setup_invalid_txns): + initial_batch_length = setup_invalid_txns['initial_batch_length'] + expected_batch_length = setup_invalid_txns['expected_batch_length'] + initial_trn_length = setup_invalid_txns['initial_trn_length'] + expected_trn_length = setup_invalid_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns['response'] == 'INVALID' + + def test_txn_valid_invalid_txns(self, setup_valinv_txns): + #data=Txns.setup_batch_valinv_txns() + initial_batch_length = setup_valinv_txns['initial_batch_length'] + expected_batch_length = setup_valinv_txns['expected_batch_length'] + initial_trn_length = setup_valinv_txns['initial_trn_length'] + expected_trn_length = setup_valinv_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_valinv_txns['response'] == 'INVALID' + + def test_txn_invalid_valid_txns(self, setup_invval_txns): + initial_batch_length = setup_invval_txns['initial_batch_length'] + expected_batch_length = setup_invval_txns['expected_batch_length'] + initial_trn_length = setup_invval_txns['initial_trn_length'] + expected_trn_length = setup_invval_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invval_txns['response'] == 'INVALID' + + + + + + + diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini new file mode 100644 index 0000000000..a444254e4b --- /dev/null +++ b/rest_api/tests/api_test/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +addopts = -s -v --json-report --json-report-file=report.json +python_files = test_rest*.py +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +log_file = pytest-logs.txt +log_file_date_format = %Y-%m-%d %H:%M:%S +log_file_format = %(asctime)s %(levelname)s %(message)s +log_format = %(asctime)s %(levelname)s %(message)s diff --git a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py new file mode 100644 index 0000000000..bc4510c05d --- /dev/null +++ b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py @@ -0,0 +1,130 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time + +from google.protobuf.json_format import MessageToDict + +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) +WAIT = 300 + +WORKLOAD_TIME = 5 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +INTKEY_PREFIX = '1cf126' +XO_PREFIX = '5b7349' + + +pytestmark = pytest.mark.scenario + +class TestScenario(RestApiBaseTest): + def test_rest_api_mul_val_intk_xo(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + LOGGER.info('Starting Test for Intkey and Xo as payload') + + LOGGER.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + LOGGER.info("Creating keys for xo users") + + for username in ('aditya', 'singh'): + _send_cmd('sawtooth keygen {} --force'.format(username)) + + + LOGGER.info("Submitting xo batches to the handlers") + + + xo_cmds = ( + 'xo create game-1 --username aditya', + 'xo take game-1 1 --username singh', + 'xo take game-1 4 --username aditya', + 'xo take game-1 2 --username singh', + ) + + for cmd in xo_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + xo_cli_cmds = ( + 'xo list', + 'xo show game-1', + ) + + for cmd in xo_cli_cmds: + _send_cmd( + '{} --url {}'.format( + cmd, + _get_client_address())) + + xo_delete_cmds = ( + 'xo delete game-1 --username aditya', + ) + + for cmd in xo_delete_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) diff --git a/rest_api/tests/api_test/ssh.py b/rest_api/tests/api_test/ssh.py new file mode 100644 index 0000000000..1f01284da4 --- /dev/null +++ b/rest_api/tests/api_test/ssh.py @@ -0,0 +1,36 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import paramiko + + +class SSH(): + def do_ssh(self,hostname,port,username,password): + try: + ssh=paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect(hostname,port,username,password) + except paramiko.AuthenticationException: + print("Failed to connect to {} due to wrong username/password".format(hostname)) + exit(1) + except: + print("Failed to connect to {}".format(hostname)) + exit(2) + + command = 'ps aux | grep sawtooth' + stdin,stdout,stderr=ssh.exec_command(command) + outlines=stdout.readlines() + resp=''.join(outlines) + ssh.close() \ No newline at end of file diff --git a/rest_api/tests/api_test/thread.py b/rest_api/tests/api_test/thread.py new file mode 100644 index 0000000000..e58ed4a121 --- /dev/null +++ b/rest_api/tests/api_test/thread.py @@ -0,0 +1,116 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import queue +import threading +import os +import logging + + +from workload import Workload +from ssh import SSH +from utils import _get_node_chains + +logging.basicConfig(level=logging.INFO, + format='(%(threadName)-10s) %(message)s', + ) + + +def wait_for_event(e): + """Wait for the event to be set before doing anything""" + logging.debug('wait_for_event starting') + event_is_set = e.wait() + logging.debug('event set: %s', event_is_set) + + +def wait_for_event_timeout(e, t): + """Wait t seconds and then timeout""" + while not e.isSet(): + logging.debug('wait_for_event_timeout starting') + event_is_set = e.wait(t) + logging.debug('event set: %s', event_is_set) + if event_is_set: + logging.debug('processing event') + else: + logging.debug('doing other work') + + +class Workload_thread(threading.Thread): + def __init__(self): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + + def run(self): + logging.info('Starting Workload') + workload = Workload() + workload.do_workload() + return + + def stop(self): + pass + + +class SSH_thread(threading.Thread): + def __init__(self, hostname, port, username, password): + threading.Thread.__init__(self) + self.hostname = hostname + self.port = port + self.username = username + self.password = password + + def run(self): + logging.info('starting ssh thread') + logging.info('Logging into Validation Network') + self.ssh() + logging.info('Exiting ssh thread') + return + + def ssh(self): + logging.info('creating ssh object') + ssh = SSH() + logging.info('performing ssh') + ssh.do_ssh(self.hostname, self.port, self.username, self.password) + + def stop_validator(self): + loggin.info("stopping validator service") + + def start_validator(self): + loggin.info("starting validator service") + + +class Consensus_Thread(threading.Thread): + def __init__(self, nodes): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + self.nodes = nodes + + def run(self): + logging.info('starting consensus thread') + logging.info('calculating block list from the nodes') + chains = self.calculate_block_list() + self.compare_chains(chains) + return + + def calculate_block_list(self): + logging.info('getting block list from the nodes') + node_list = ['http://10.223.155.43:8008'] + chains = _get_node_chains(node_list) + return chains + + def compare_chains(self, chains): + logging.info('comparing chains for equality') + + + def calculate_sync_time(self): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..3cf6a3c2b1 --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,450 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +from urllib.request import urlopen +from urllib.error import HTTPError +from urllib.error import URLError +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os +import time +import socket +import netifaces + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/blocks?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/blocks?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/blocks?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/blocks?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/blocks?id=%s'% id) + return response + if reverse: + response = query_rest_api('/blocks?reverse') + return response + else: + response = query_rest_api('/blocks') + return response + + +def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch_id(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response + +def get_block_id(block_id): + response = query_rest_api('/blocks/%s' % block_id) + return response + +def get_transaction_id(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response + +def get_peers(): + response = query_rest_api('/peers') + return response + +def get_transactions(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/transactions?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/transactions?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/transactions?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/transactions?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/transactions?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/transactions?id=%s'% id) + return response + if reverse: + response = query_rest_api('/transactions?reverse') + return response + else: + response = query_rest_api('/transactions') + return response + +def get_state_list(head_id=None , address=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , address]): + response = query_rest_api('/state?head={}&address={}'.format(head_id , address)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if address is not None: + response = query_rest_api('/state?address=%s'% address) + return response + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state_address(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def post_batch_no_endpoint(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "hostname -I | awk '{print $1}'" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _start_settings_tp(): + LOGGER.info('Starting settings-tp') + cmd = " sudo -u sawtooth settings-tp -vv " + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_settings_tp(): + LOGGER.info('Stopping the settings-tp') + cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _create_genesis(): + LOGGER.info("creating the genesis data") + _create_genesis_batch() + os.chdir("/home/aditya") + cmd = "sawadm genesis config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _create_genesis_batch(): + LOGGER.info("creating the config genesis batch") + os.chdir("/home/aditya") + cmd = "sawset genesis --force" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def post_batch_statuses(batch): + headers = {'content-type': 'application/json'} + response = query_rest_api( + '/batch_statuses', data=batch, headers=headers) + return response + +def get_batch_statuses(batch_ids=None, wait=None): + try: + batches = ",".join(batch_ids) + except: + batches = None + + if batches: + if wait == 'default': + response = query_rest_api('/batch_statuses?wait&id={}'.format(batches)) + return response + elif wait: + response = query_rest_api('/batch_statuses?id={}&wait={}'.format(batches,wait)) + return response + else: + response = query_rest_api('/batch_statuses?id=%s' % batches) + return response + else: + response = query_rest_api('/batch_statuses') + return response + +def get_state_limit(limit): + response = query_rest_api('/state?limit=%s' % limit) + return response + + +def get_reciepts(reciept_id): + response = query_rest_api('/receipts?id=%s' % reciept_id) + return response + +def post_receipts(receipts): + headers = {'Content-Type': 'application/json'} + response = query_rest_api('/receipts', data=receipts, headers=headers) + return response + +def batch_count(): + batch_list = get_batches() + count = len(batch_list['data']) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + batch_list = get_batches(start=next_position) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + count += len(batch_list['data']) + return count + +def transaction_count(): + transaction_list = get_transactions() + count = len(transaction_list['data']) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + transaction_list = get_transactions(start=next_position) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + count += len(transaction_list['data']) + return count + +def _create_expected_link(expected_ids): + for id in expected_ids: + link = '{}/batch_statuses?id={},{}'.format(address, id) + return link + +def _get_batch_list(response): + batch_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_batches(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + batch_list += data_list + + return batch_list + + +def _get_transaction_list(response): + transaction_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_transactions(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + transaction_list += data_list + + return transaction_list diff --git a/rest_api/tests/api_test/validators_down.sh b/rest_api/tests/api_test/validators_down.sh new file mode 100644 index 0000000000..ffc1472ced --- /dev/null +++ b/rest_api/tests/api_test/validators_down.sh @@ -0,0 +1,3 @@ + #!/bin/bash + sudo kill -9 $(ps aux | grep 'sawtooth' | awk '{print $2}') + echo "$(ps aux | grep 'sawtooth')" diff --git a/rest_api/tests/api_test/validators_up.sh b/rest_api/tests/api_test/validators_up.sh new file mode 100644 index 0000000000..41529247f0 --- /dev/null +++ b/rest_api/tests/api_test/validators_up.sh @@ -0,0 +1,6 @@ + #!/bin/bash + +sudo -u sawtooth sawtooth-validator -vv & +sudo -u sawtooth settings-tp -vv & +sudo -u sawtooth intkey-tp-python -C tcp://127.0.0.1:4004 -v & +sudo -u sawtooth xo-tp-python -C tcp://127.0.0.1:4004 -v & diff --git a/rest_api/tests/api_test/workload.py b/rest_api/tests/api_test/workload.py new file mode 100644 index 0000000000..7dbfd41591 --- /dev/null +++ b/rest_api/tests/api_test/workload.py @@ -0,0 +1,29 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import subprocess +import logging + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +class Workload(): + def do_workload(self): + LOGGER.info('Starting Intkey Workload') +# cmd = "intkey workload --url 10.223.155.43:8008 --rate 1 -d 1" +# subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + def stop_workload(self): + pass \ No newline at end of file diff --git a/sdk/examples/devmode_rust/src/engine.rs b/sdk/examples/devmode_rust/src/engine.rs index 21c6666a1c..f3257a39e0 100644 --- a/sdk/examples/devmode_rust/src/engine.rs +++ b/sdk/examples/devmode_rust/src/engine.rs @@ -27,13 +27,23 @@ use sawtooth_sdk::consensus::{engine::*, service::Service}; const DEFAULT_WAIT_TIME: u64 = 0; +#[derive(Default)] +struct LogGuard { + not_ready_to_summarize: bool, + not_ready_to_finalize: bool, +} + pub struct DevmodeService { service: Box, + log_guard: LogGuard, } impl DevmodeService { pub fn new(service: Box) -> Self { - DevmodeService { service } + DevmodeService { + service, + log_guard: LogGuard::default(), + } } fn get_chain_head(&mut self) -> Block { @@ -63,18 +73,26 @@ impl DevmodeService { debug!("Finalizing block"); let mut summary = self.service.summarize_block(); while let Err(Error::BlockNotReady) = summary { - warn!("Block not ready to summarize"); + if !self.log_guard.not_ready_to_summarize { + self.log_guard.not_ready_to_summarize = true; + warn!("Block not ready to summarize"); + } sleep(time::Duration::from_secs(1)); summary = self.service.summarize_block(); } + self.log_guard.not_ready_to_summarize = false; let consensus: Vec = create_consensus(&summary.expect("Failed to summarize block")); let mut block_id = self.service.finalize_block(consensus.clone()); while let Err(Error::BlockNotReady) = block_id { - warn!("Block not ready to finalize"); + if !self.log_guard.not_ready_to_finalize { + self.log_guard.not_ready_to_finalize = true; + warn!("Block not ready to finalize"); + } sleep(time::Duration::from_secs(1)); block_id = self.service.finalize_block(consensus.clone()); } + self.log_guard.not_ready_to_finalize = false; block_id.expect("Failed to finalize block") } diff --git a/validator/src/database/lmdb_ffi.rs b/validator/src/database/lmdb_ffi.rs index 6adb38aa33..9ac3874e8d 100644 --- a/validator/src/database/lmdb_ffi.rs +++ b/validator/src/database/lmdb_ffi.rs @@ -34,7 +34,7 @@ pub enum ErrorCode { } #[no_mangle] -pub extern "C" fn lmdb_database_new( +pub unsafe extern "C" fn lmdb_database_new( path: *const c_char, file_size: usize, indexes_ptr: *mut py_ffi::PyObject, @@ -44,7 +44,7 @@ pub extern "C" fn lmdb_database_new( return ErrorCode::NullPointerProvided; } - let indexes: Vec = unsafe { + let indexes: Vec = { let py = Python::assume_gil_acquired(); let py_obj = PyObject::from_borrowed_ptr(py, indexes_ptr); let py_list: PyList = py_obj.extract(py).unwrap(); @@ -54,11 +54,9 @@ pub extern "C" fn lmdb_database_new( .collect() }; - let db_path = unsafe { - match CStr::from_ptr(path).to_str() { - Ok(s) => s, - Err(_) => return ErrorCode::InvalidFilePath, - } + let db_path = match CStr::from_ptr(path).to_str() { + Ok(s) => s, + Err(_) => return ErrorCode::InvalidFilePath, }; let ctx = match LmdbContext::new(Path::new(&db_path), indexes.len(), Some(file_size)) { @@ -74,9 +72,8 @@ pub extern "C" fn lmdb_database_new( match LmdbDatabase::new(ctx, &indexes) { Ok(db) => { - unsafe { - *db_ptr = Box::into_raw(Box::new(db)) as *const c_void; - } + *db_ptr = Box::into_raw(Box::new(db)) as *const c_void; + ErrorCode::Success } Err(err) => { @@ -87,11 +84,11 @@ pub extern "C" fn lmdb_database_new( } #[no_mangle] -pub extern "C" fn lmdb_database_drop(lmdb_database: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn lmdb_database_drop(lmdb_database: *mut c_void) -> ErrorCode { if lmdb_database.is_null() { return ErrorCode::NullPointerProvided; } - unsafe { Box::from_raw(lmdb_database as *mut LmdbDatabase) }; + Box::from_raw(lmdb_database as *mut LmdbDatabase); ErrorCode::Success } diff --git a/validator/src/journal/block_scheduler.rs b/validator/src/journal/block_scheduler.rs new file mode 100644 index 0000000000..b37caca8ad --- /dev/null +++ b/validator/src/journal/block_scheduler.rs @@ -0,0 +1,445 @@ +/* + * Copyright 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ------------------------------------------------------------------------------ + */ + +use std::collections::{HashMap, HashSet}; +use std::sync::{Arc, Mutex}; + +use block::Block; +use journal::block_validator::BlockStatusStore; +use journal::block_wrapper::BlockStatus; +use journal::{block_manager::BlockManager, NULL_BLOCK_IDENTIFIER}; +use metrics; + +lazy_static! { + static ref COLLECTOR: metrics::MetricsCollectorHandle = + metrics::get_collector("sawtooth_validator.block_validator"); +} + +#[derive(Clone)] +pub struct BlockScheduler { + state: Arc>>, +} + +impl BlockScheduler { + pub fn new(block_manager: BlockManager, block_status_store: B) -> Self { + BlockScheduler { + state: Arc::new(Mutex::new(BlockSchedulerState { + block_manager, + block_status_store, + pending: HashSet::new(), + processing: HashSet::new(), + descendants_by_previous_id: HashMap::new(), + })), + } + } + + /// Schedule the blocks, returning those that are directly ready to + /// validate + pub fn schedule(&self, blocks: Vec) -> Vec { + self.state + .lock() + .expect("The BlockScheduler Mutex was poisoned") + .schedule(blocks) + } + + /// Mark the block associated with block_id as having completed block + /// validation, returning any blocks that are not available for processing + pub fn done(&self, block_id: &str) -> Vec { + self.state + .lock() + .expect("The BlockScheduler Mutex was poisoned") + .done(block_id) + } + + pub fn contains(&self, block_id: &str) -> bool { + self.state + .lock() + .expect("The BlockScheduler Mutex was poisoned") + .contains(block_id) + } +} + +struct BlockSchedulerState { + pub block_manager: BlockManager, + pub block_status_store: B, + pub pending: HashSet, + pub processing: HashSet, + pub descendants_by_previous_id: HashMap>, +} + +impl BlockSchedulerState { + fn schedule(&mut self, blocks: Vec) -> Vec { + let mut ready = vec![]; + for block in blocks { + if self.processing.contains(&block.header_signature) { + debug!( + "During block scheduling, block already in process: {}", + &block.header_signature + ); + continue; + } + + if self.pending.contains(&block.header_signature) { + debug!( + "During block scheduling, block already in pending: {}", + &block.header_signature + ); + continue; + } + + if self.processing.contains(&block.previous_block_id) { + debug!( + "During block scheduling, previous block {} in process, adding block {} to pending", + &block.previous_block_id, + &block.header_signature); + self.add_block_to_pending(block); + continue; + } + + if self.pending.contains(&block.previous_block_id) { + debug!( + "During block scheduling, previous block {} is pending, adding block {} to pending", + &block.previous_block_id, + &block.header_signature); + + self.add_block_to_pending(block); + continue; + } + + if &block.previous_block_id != NULL_BLOCK_IDENTIFIER + && self.block_status_store.status(&block.previous_block_id) == BlockStatus::Unknown + { + info!( + "During block scheduling, predecessor of block {} status is unknown. Scheduling all blocks since last predecessor with known status", + &block.header_signature); + + let blocks_previous_to_previous = self.block_manager + .branch(&block.previous_block_id) + .expect("Block id of block previous to block being scheduled is unknown to the block manager"); + self.add_block_to_pending(block); + + let mut to_be_scheduled = vec![]; + for predecessor in blocks_previous_to_previous { + eprintln!("{}", &predecessor.header_signature); + if self + .block_status_store + .status(&predecessor.header_signature) + != BlockStatus::Unknown + { + break; + } + to_be_scheduled.push(predecessor); + } + + to_be_scheduled.reverse(); + + for block in self.schedule(to_be_scheduled) { + if !ready.contains(&block) { + self.processing.insert(block.header_signature.clone()); + ready.push(block); + } + } + } else { + debug!("Adding block {} for processing", &block.header_signature); + + self.processing.insert(block.header_signature.clone()); + ready.push(block); + } + } + self.update_gauges(); + ready + } + + fn done(&mut self, block_id: &str) -> Vec { + self.processing.remove(block_id); + let ready = self + .descendants_by_previous_id + .remove(block_id) + .unwrap_or(vec![]); + + for blk in &ready { + self.pending.remove(&blk.header_signature); + } + self.update_gauges(); + ready + } + + fn contains(&self, block_id: &str) -> bool { + self.pending.contains(block_id) || self.processing.contains(block_id) + } + + fn add_block_to_pending(&mut self, block: Block) { + self.pending.insert(block.header_signature.clone()); + if let Some(ref mut waiting_descendants) = self + .descendants_by_previous_id + .get_mut(&block.previous_block_id) + { + if !waiting_descendants.contains(&block) { + waiting_descendants.push(block); + } + return; + } + + self.descendants_by_previous_id + .insert(block.previous_block_id.clone(), vec![block]); + } + + fn update_gauges(&self) { + let mut blocks_processing = COLLECTOR.gauge("BlockScheduler.blocks_processing", None, None); + blocks_processing.set_value(self.processing.len()); + let mut blocks_pending = COLLECTOR.gauge("BlockScheduler.blocks_pending", None, None); + blocks_pending.set_value(self.pending.len()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use journal::NULL_BLOCK_IDENTIFIER; + use std::sync::{Arc, Mutex}; + + #[test] + fn test_block_scheduler_simple() { + let block_manager = BlockManager::new(); + let block_status_store = MockStore::new(); + let block_a = create_block("A", NULL_BLOCK_IDENTIFIER, 0); + let block_a1 = create_block("A1", "A", 1); + let block_a2 = create_block("A2", "A", 1); + let block_b2 = create_block("B2", "A2", 2); + + let block_unknown = create_block("UNKNOWN", "A", 1); + let block_b = create_block("B", "UNKNOWN", 2); + block_manager + .put(vec![block_a.clone(), block_unknown.clone()]) + .expect("The block manager failed to `put` a branch"); + + let block_scheduler = BlockScheduler::new(block_manager, block_status_store); + + assert_eq!( + block_scheduler.schedule(vec![ + block_a.clone(), + block_a1.clone(), + block_a2.clone(), + block_b2.clone(), + ]), + vec![block_a.clone()] + ); + + assert_eq!( + block_scheduler.done(&block_a.header_signature), + vec![block_a1, block_a2] + ); + + assert_eq!(block_scheduler.schedule(vec![block_b]), vec![block_unknown]); + } + + #[test] + fn test_block_scheduler_multiple_forks() { + let block_manager = BlockManager::new(); + let block_status_store: Arc>> = + Arc::new(Mutex::new(HashMap::new())); + + let block_a = create_block("A", NULL_BLOCK_IDENTIFIER, 0); + let block_b = create_block("B", "A", 1); + let block_c1 = create_block("C1", "B", 2); + let block_c2 = create_block("C2", "B", 2); + let block_c3 = create_block("C3", "B", 2); + let block_d1 = create_block("D11", "C1", 3); + let block_d2 = create_block("D12", "C1", 3); + let block_d3 = create_block("D13", "C1", 3); + + block_manager + .put(vec![ + block_a.clone(), + block_b.clone(), + block_c1.clone(), + block_d1.clone(), + ]) + .expect("The block manager failed to `put` a branch"); + block_manager + .put(vec![block_b.clone(), block_c2.clone()]) + .expect("The block manager failed to put a branch"); + + block_manager + .put(vec![block_b.clone(), block_c3.clone()]) + .expect("The block manager failed to put a block"); + + block_manager + .put(vec![block_c1.clone(), block_d2.clone()]) + .expect("The block manager failed to `put` a branch"); + + block_manager + .put(vec![block_c1.clone(), block_d3.clone()]) + .expect("The block manager failed to put a branch"); + + let block_scheduler = BlockScheduler::new(block_manager, block_status_store); + + assert_eq!( + block_scheduler.schedule(vec![block_a.clone()]), + vec![block_a.clone()], + "The genesis block's predecessor does not need to be validated" + ); + + assert_eq!( + block_scheduler.schedule(vec![ + block_b.clone(), + block_c1.clone(), + block_c2.clone(), + block_c3.clone(), + ]), + vec![], + "Block A has not been validated yet" + ); + + assert_eq!( + block_scheduler.done(&block_a.header_signature), + vec![block_b.clone()], + "Marking Block A as complete, makes Block B available" + ); + + assert_eq!( + block_scheduler.schedule(vec![block_d1.clone(), block_d2.clone(), block_d3.clone()]), + vec![], + "None of Blocks D1, D2, D3 are available" + ); + + assert_eq!( + block_scheduler.done(&block_b.header_signature), + vec![block_c1.clone(), block_c2.clone(), block_c3.clone()], + "Marking Block B as complete, makes Block C1, C2, C3 available" + ); + + assert_eq!( + block_scheduler.done(&block_c2.header_signature), + vec![], + "No Blocks are available" + ); + + assert_eq!( + block_scheduler.done(&block_c3.header_signature), + vec![], + "No Blocks are available" + ); + + assert_eq!( + block_scheduler.done(&block_c1.header_signature), + vec![block_d1.clone(), block_d2.clone(), block_d3.clone()], + "Blocks D1, D2, D3 are available" + ); + } + + #[test] + fn test_cache_misses() { + let block_manager = BlockManager::new(); + let block_status_store: Arc>> = + Arc::new(Mutex::new(HashMap::new())); + + let block_a = create_block("A", NULL_BLOCK_IDENTIFIER, 0); + let block_b = create_block("B", "A", 1); + let block_c1 = create_block("C1", "B", 2); + let block_c2 = create_block("C2", "B", 2); + let block_c3 = create_block("C3", "B", 2); + + block_manager + .put(vec![block_a.clone(), block_b.clone(), block_c1.clone()]) + .expect("Block manager errored trying to put a branch"); + + block_manager + .put(vec![block_b.clone(), block_c2.clone()]) + .expect("Block manager errored trying to put a branch"); + + block_manager + .put(vec![block_b.clone(), block_c3.clone()]) + .expect("Block manager errored trying to put a branch"); + + let block_scheduler = BlockScheduler::new(block_manager, Arc::clone(&block_status_store)); + + assert_eq!( + block_scheduler.schedule(vec![block_a.clone(), block_b.clone()]), + vec![block_a.clone()], + "Block A is ready, but block b is not" + ); + + block_status_store + .lock() + .expect("Mutex was poisoned") + .insert(block_a.header_signature.clone(), BlockStatus::Valid); + + assert_eq!( + block_scheduler.done(&block_a.header_signature), + vec![block_b.clone()], + "Now Block B is ready" + ); + + // We are not inserting a status for block b so there will be a later miss + + assert_eq!( + block_scheduler.done(&block_b.header_signature), + vec![], + "Block B is done and there are no further blocks" + ); + + // Now a cache miss + + assert_eq!( + block_scheduler.schedule(vec![block_c1.clone(), block_c2.clone(), block_c3.clone()]), + vec![block_b.clone()], + "Since there was a cache miss, block b must be scheduled again" + ); + } + + fn create_block(header_signature: &str, previous_block_id: &str, block_num: u64) -> Block { + Block { + header_signature: header_signature.into(), + batches: vec![], + state_root_hash: "".into(), + consensus: vec![], + batch_ids: vec![], + signer_public_key: "".into(), + previous_block_id: previous_block_id.into(), + block_num, + header_bytes: vec![], + } + } + + impl BlockStatusStore for Arc>> { + fn status(&self, block_id: &str) -> BlockStatus { + self.lock() + .expect("Mutex was poisoned") + .get(block_id) + .cloned() + .unwrap_or(BlockStatus::Unknown) + } + } + + struct MockStore {} + + impl MockStore { + fn new() -> Self { + MockStore {} + } + } + + impl BlockStatusStore for MockStore { + fn status(&self, block_id: &str) -> BlockStatus { + if block_id == "UNKNOWN" { + return BlockStatus::Unknown; + } + BlockStatus::Valid + } + } +} diff --git a/validator/src/journal/block_validator.rs b/validator/src/journal/block_validator.rs index 05943552f3..dac42251e1 100644 --- a/validator/src/journal/block_validator.rs +++ b/validator/src/journal/block_validator.rs @@ -42,6 +42,10 @@ pub trait BlockValidator: Sync + Send + Clone { fn process_pending(&self, block: &Block, response_sender: Sender); } +pub trait BlockStatusStore { + fn status(&self, block_id: &str) -> BlockStatus; +} + #[derive(Clone, Debug)] pub struct BlockValidationResult { pub block_id: String, diff --git a/validator/src/journal/chain_ffi.rs b/validator/src/journal/chain_ffi.rs index 804ba8c52c..dd6917caae 100644 --- a/validator/src/journal/chain_ffi.rs +++ b/validator/src/journal/chain_ffi.rs @@ -59,7 +59,7 @@ macro_rules! check_null { } #[no_mangle] -pub extern "C" fn chain_controller_new( +pub unsafe extern "C" fn chain_controller_new( block_store: *mut py_ffi::PyObject, block_manager: *const c_void, block_validator: *mut py_ffi::PyObject, @@ -83,21 +83,18 @@ pub extern "C" fn chain_controller_new( data_directory ); - let data_dir = unsafe { - match CStr::from_ptr(data_directory).to_str() { - Ok(s) => s, - Err(_) => return ErrorCode::InvalidDataDir, - } + let data_dir = match CStr::from_ptr(data_directory).to_str() { + Ok(s) => s, + Err(_) => return ErrorCode::InvalidDataDir, }; - let py = unsafe { Python::assume_gil_acquired() }; + let py = Python::assume_gil_acquired(); - let py_block_store_reader = unsafe { PyObject::from_borrowed_ptr(py, block_store) }; - let py_block_validator = unsafe { PyObject::from_borrowed_ptr(py, block_validator) }; - let py_observers = unsafe { PyObject::from_borrowed_ptr(py, observers) }; - let chain_head_lock_ref = - unsafe { (chain_head_lock as *const ChainHeadLock).as_ref().unwrap() }; - let py_consensus_notifier = unsafe { PyObject::from_borrowed_ptr(py, consensus_notifier) }; + let py_block_store_reader = PyObject::from_borrowed_ptr(py, block_store); + let py_block_validator = PyObject::from_borrowed_ptr(py, block_validator); + let py_observers = PyObject::from_borrowed_ptr(py, observers); + let chain_head_lock_ref = (chain_head_lock as *const ChainHeadLock).as_ref().unwrap(); + let py_consensus_notifier = PyObject::from_borrowed_ptr(py, consensus_notifier); let observer_wrappers = if let Ok(py_list) = py_observers.extract::(py) { let mut res: Vec> = Vec::with_capacity(py_list.len(py)); @@ -109,8 +106,8 @@ pub extern "C" fn chain_controller_new( return ErrorCode::InvalidPythonObject; }; - let block_manager = unsafe { (*(block_manager as *const BlockManager)).clone() }; - let state_database = unsafe { (*(state_database as *const LmdbDatabase)).clone() }; + let block_manager = (*(block_manager as *const BlockManager)).clone(); + let state_database = (*(state_database as *const LmdbDatabase)).clone(); let state_pruning_manager = StatePruningManager::new(state_database); @@ -127,28 +124,24 @@ pub extern "C" fn chain_controller_new( Duration::from_secs(fork_cache_keep_time as u64), ); - unsafe { - *chain_controller_ptr = Box::into_raw(Box::new(chain_controller)) as *const c_void; - } + *chain_controller_ptr = Box::into_raw(Box::new(chain_controller)) as *const c_void; ErrorCode::Success } #[no_mangle] -pub extern "C" fn chain_controller_drop(chain_controller: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn chain_controller_drop(chain_controller: *mut c_void) -> ErrorCode { check_null!(chain_controller); - unsafe { Box::from_raw(chain_controller as *mut ChainController) }; + Box::from_raw(chain_controller as *mut ChainController); ErrorCode::Success } #[no_mangle] -pub extern "C" fn chain_controller_start(chain_controller: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn chain_controller_start(chain_controller: *mut c_void) -> ErrorCode { check_null!(chain_controller); - unsafe { - (*(chain_controller as *mut ChainController)).start(); - } + (*(chain_controller as *mut ChainController)).start(); ErrorCode::Success } @@ -175,12 +168,11 @@ pub unsafe extern "C" fn chain_controller_block_validation_result( } #[no_mangle] -pub extern "C" fn chain_controller_stop(chain_controller: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn chain_controller_stop(chain_controller: *mut c_void) -> ErrorCode { check_null!(chain_controller); - unsafe { - (*(chain_controller as *mut ChainController)).stop(); - } + (*(chain_controller as *mut ChainController)).stop(); + ErrorCode::Success } @@ -218,22 +210,18 @@ chain_controller_block_ffi!(chain_controller_fail_block, fail_block, block, &blo chain_controller_block_ffi!(chain_controller_commit_block, commit_block, block, block); #[no_mangle] -pub extern "C" fn chain_controller_queue_block( +pub unsafe extern "C" fn chain_controller_queue_block( chain_controller: *mut c_void, block_id: *const c_char, ) -> ErrorCode { check_null!(chain_controller, block_id); - let block_id = unsafe { - match CStr::from_ptr(block_id).to_str() { - Ok(s) => s, - Err(_) => return ErrorCode::InvalidBlockId, - } + let block_id = match CStr::from_ptr(block_id).to_str() { + Ok(s) => s, + Err(_) => return ErrorCode::InvalidBlockId, }; - unsafe { - (*(chain_controller as *mut ChainController)).queue_block(block_id); - } + (*(chain_controller as *mut ChainController)).queue_block(block_id); ErrorCode::Success } @@ -241,26 +229,22 @@ pub extern "C" fn chain_controller_queue_block( /// This is only exposed for the current python tests, it should be removed /// when proper rust tests are written for the ChainController #[no_mangle] -pub extern "C" fn chain_controller_on_block_received( +pub unsafe extern "C" fn chain_controller_on_block_received( chain_controller: *mut c_void, block_id: *const c_char, ) -> ErrorCode { check_null!(chain_controller, block_id); - let block_id = unsafe { - match CStr::from_ptr(block_id).to_str() { - Ok(s) => s, - Err(_) => return ErrorCode::InvalidBlockId, - } + let block_id = match CStr::from_ptr(block_id).to_str() { + Ok(s) => s, + Err(_) => return ErrorCode::InvalidBlockId, }; - unsafe { - if let Err(err) = (*(chain_controller as *mut ChainController)) - .on_block_received(block_id.into()) - { - error!("ChainController.on_block_received error: {:?}", err); - return ErrorCode::Unknown; - } + if let Err(err) = (*(chain_controller as *mut ChainController)) + .on_block_received(block_id.into()) + { + error!("ChainController.on_block_received error: {:?}", err); + return ErrorCode::Unknown; } ErrorCode::Success @@ -301,16 +285,16 @@ pub unsafe extern "C" fn chain_controller_chain_head( } #[no_mangle] -pub extern "C" fn sender_drop(sender: *const c_void) -> ErrorCode { +pub unsafe extern "C" fn sender_drop(sender: *const c_void) -> ErrorCode { check_null!(sender); - unsafe { Box::from_raw(sender as *mut Sender) }; + Box::from_raw(sender as *mut Sender); ErrorCode::Success } #[no_mangle] -pub extern "C" fn sender_send( +pub unsafe extern "C" fn sender_send( sender: *const c_void, validation_result: *mut py_ffi::PyObject, ) -> ErrorCode { @@ -319,19 +303,17 @@ pub extern "C" fn sender_send( let gil_guard = Python::acquire_gil(); let py = gil_guard.python(); - let py_result = unsafe { PyObject::from_borrowed_ptr(py, validation_result) }; + let py_result = PyObject::from_borrowed_ptr(py, validation_result); let result: BlockValidationResult = py_result.extract(py).expect("Unable to extract block"); - unsafe { - let sender = (*(sender as *mut Sender)).clone(); - py.allow_threads(move || match sender.send(result) { - Ok(_) => ErrorCode::Success, - Err(err) => { - error!("Unable to send validation result: {:?}", err); - ErrorCode::Unknown - } - }) - } + let sender = (*(sender as *mut Sender)).clone(); + py.allow_threads(move || match sender.send(result) { + Ok(_) => ErrorCode::Success, + Err(err) => { + error!("Unable to send validation result: {:?}", err); + ErrorCode::Unknown + } + }) } struct PyBlockValidator { diff --git a/validator/src/journal/incoming_batch_queue_ffi.rs b/validator/src/journal/incoming_batch_queue_ffi.rs index 10584683bd..80f1ce8360 100644 --- a/validator/src/journal/incoming_batch_queue_ffi.rs +++ b/validator/src/journal/incoming_batch_queue_ffi.rs @@ -39,7 +39,7 @@ pub enum ErrorCode { } #[no_mangle] -pub extern "C" fn incoming_batch_sender_send( +pub unsafe extern "C" fn incoming_batch_sender_send( sender_ptr: *mut c_void, pyobj_ptr: *mut py_ffi::PyObject, ) -> ErrorCode { @@ -48,7 +48,7 @@ pub extern "C" fn incoming_batch_sender_send( let gil = Python::acquire_gil(); let py = gil.python(); let batch: Batch = { - let pyobj = unsafe { PyObject::from_borrowed_ptr(py, pyobj_ptr) }; + let pyobj = PyObject::from_borrowed_ptr(py, pyobj_ptr); match pyobj.extract(py) { Ok(batch) => batch, @@ -58,7 +58,7 @@ pub extern "C" fn incoming_batch_sender_send( } }; - let mut sender = unsafe { (*(sender_ptr as *mut IncomingBatchSender)).clone() }; + let mut sender = (*(sender_ptr as *mut IncomingBatchSender)).clone(); py.allow_threads(move || match sender.put(batch) { Ok(()) => ErrorCode::Success, @@ -67,35 +67,34 @@ pub extern "C" fn incoming_batch_sender_send( } #[no_mangle] -pub extern "C" fn incoming_batch_sender_has_batch( +pub unsafe extern "C" fn incoming_batch_sender_has_batch( sender_ptr: *mut c_void, batch_id: *const c_char, has: *mut bool, ) -> ErrorCode { check_null!(sender_ptr, batch_id); - let batch_id = match unsafe { CStr::from_ptr(batch_id).to_str() } { + let batch_id = match CStr::from_ptr(batch_id).to_str() { Ok(s) => s, Err(_) => return ErrorCode::InvalidInput, }; - unsafe { - *has = (*(sender_ptr as *mut IncomingBatchSender)) - .has_batch(batch_id) - .unwrap_or_else(|e| { - warn!("Unable to check for batch {:?}", e); - false - }); - } + + *has = (*(sender_ptr as *mut IncomingBatchSender)) + .has_batch(batch_id) + .unwrap_or_else(|e| { + warn!("Unable to check for batch {:?}", e); + false + }); ErrorCode::Success } #[no_mangle] -pub extern "C" fn incoming_batch_sender_drop(sender_ptr: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn incoming_batch_sender_drop(sender_ptr: *mut c_void) -> ErrorCode { if sender_ptr.is_null() { return ErrorCode::NullPointerProvided; } - unsafe { Box::from_raw(sender_ptr as *mut IncomingBatchSender) }; + Box::from_raw(sender_ptr as *mut IncomingBatchSender); ErrorCode::Success } diff --git a/validator/src/journal/mod.rs b/validator/src/journal/mod.rs index c1a7d34e70..0803429c9f 100644 --- a/validator/src/journal/mod.rs +++ b/validator/src/journal/mod.rs @@ -19,6 +19,7 @@ pub const NULL_BLOCK_IDENTIFIER: &str = "0000000000000000"; pub mod block_manager; pub mod block_manager_ffi; +mod block_scheduler; pub mod block_store; pub mod block_validator; pub mod block_wrapper; diff --git a/validator/src/journal/publisher_ffi.rs b/validator/src/journal/publisher_ffi.rs index a36b32155d..9cf7f45725 100644 --- a/validator/src/journal/publisher_ffi.rs +++ b/validator/src/journal/publisher_ffi.rs @@ -49,7 +49,7 @@ macro_rules! check_null { } #[no_mangle] -pub extern "C" fn block_publisher_new( +pub unsafe extern "C" fn block_publisher_new( block_manager_ptr: *const c_void, transaction_executor_ptr: *mut py_ffi::PyObject, batch_committed_ptr: *mut py_ffi::PyObject, @@ -85,25 +85,23 @@ pub extern "C" fn block_publisher_new( batch_injector_factory_ptr ); - let py = unsafe { Python::assume_gil_acquired() }; - - let block_manager = unsafe { (*(block_manager_ptr as *mut BlockManager)).clone() }; - let transaction_executor = unsafe { PyObject::from_borrowed_ptr(py, transaction_executor_ptr) }; - let batch_committed = unsafe { PyObject::from_borrowed_ptr(py, batch_committed_ptr) }; - let transaction_committed = - unsafe { PyObject::from_borrowed_ptr(py, transaction_committed_ptr) }; - let state_view_factory = unsafe { PyObject::from_borrowed_ptr(py, state_view_factory_ptr) }; - let settings_cache = unsafe { PyObject::from_borrowed_ptr(py, settings_cache_ptr) }; - let block_sender = unsafe { PyObject::from_borrowed_ptr(py, block_sender_ptr) }; - let batch_sender = unsafe { PyObject::from_borrowed_ptr(py, batch_sender_ptr) }; - let chain_head = unsafe { PyObject::from_borrowed_ptr(py, chain_head_ptr) }; - let identity_signer = unsafe { PyObject::from_borrowed_ptr(py, identity_signer_ptr) }; - let data_dir = unsafe { PyObject::from_borrowed_ptr(py, data_dir_ptr) }; - let config_dir = unsafe { PyObject::from_borrowed_ptr(py, config_dir_ptr) }; - let permission_verifier = unsafe { PyObject::from_borrowed_ptr(py, permission_verifier_ptr) }; - let batch_observers = unsafe { PyObject::from_borrowed_ptr(py, batch_observers_ptr) }; - let batch_injector_factory = - unsafe { PyObject::from_borrowed_ptr(py, batch_injector_factory_ptr) }; + let py = Python::assume_gil_acquired(); + + let block_manager = (*(block_manager_ptr as *mut BlockManager)).clone(); + let transaction_executor = PyObject::from_borrowed_ptr(py, transaction_executor_ptr); + let batch_committed = PyObject::from_borrowed_ptr(py, batch_committed_ptr); + let transaction_committed = PyObject::from_borrowed_ptr(py, transaction_committed_ptr); + let state_view_factory = PyObject::from_borrowed_ptr(py, state_view_factory_ptr); + let settings_cache = PyObject::from_borrowed_ptr(py, settings_cache_ptr); + let block_sender = PyObject::from_borrowed_ptr(py, block_sender_ptr); + let batch_sender = PyObject::from_borrowed_ptr(py, batch_sender_ptr); + let chain_head = PyObject::from_borrowed_ptr(py, chain_head_ptr); + let identity_signer = PyObject::from_borrowed_ptr(py, identity_signer_ptr); + let data_dir = PyObject::from_borrowed_ptr(py, data_dir_ptr); + let config_dir = PyObject::from_borrowed_ptr(py, config_dir_ptr); + let permission_verifier = PyObject::from_borrowed_ptr(py, permission_verifier_ptr); + let batch_observers = PyObject::from_borrowed_ptr(py, batch_observers_ptr); + let batch_injector_factory = PyObject::from_borrowed_ptr(py, batch_injector_factory_ptr); let chain_head = if chain_head == Python::None(py) { None @@ -169,103 +167,95 @@ pub extern "C" fn block_publisher_new( settings_view_class, ); - unsafe { - *block_publisher_ptr = Box::into_raw(Box::new(publisher)) as *const c_void; - } + *block_publisher_ptr = Box::into_raw(Box::new(publisher)) as *const c_void; ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_drop(publisher: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn block_publisher_drop(publisher: *mut c_void) -> ErrorCode { check_null!(publisher); - unsafe { Box::from_raw(publisher as *mut BlockPublisher) }; + Box::from_raw(publisher as *mut BlockPublisher); ErrorCode::Success } // block_publisher_on_batch_received is used in tests #[no_mangle] -pub extern "C" fn block_publisher_on_batch_received( +pub unsafe extern "C" fn block_publisher_on_batch_received( publisher: *mut c_void, batch: *mut py_ffi::PyObject, ) -> ErrorCode { check_null!(publisher, batch); let gil = Python::acquire_gil(); let py = gil.python(); - let batch = unsafe { - PyObject::from_borrowed_ptr(py, batch) - .extract::(py) - .unwrap() - }; - let publisher = unsafe { (*(publisher as *mut BlockPublisher)).clone() }; + let batch = PyObject::from_borrowed_ptr(py, batch) + .extract::(py) + .unwrap(); + let publisher = (*(publisher as *mut BlockPublisher)).clone(); py.allow_threads(move || publisher.publisher.on_batch_received(batch)); ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_start( +pub unsafe extern "C" fn block_publisher_start( publisher: *mut c_void, incoming_batch_sender: *mut *const c_void, ) -> ErrorCode { check_null!(publisher); - let batch_tx = unsafe { (*(publisher as *mut BlockPublisher)).start() }; + let batch_tx = (*(publisher as *mut BlockPublisher)).start(); let batch_tx_ptr: *mut IncomingBatchSender = Box::into_raw(Box::new(batch_tx)); - unsafe { - *incoming_batch_sender = batch_tx_ptr as *const c_void; - } + + *incoming_batch_sender = batch_tx_ptr as *const c_void; + ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_stop(publisher: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn block_publisher_stop(publisher: *mut c_void) -> ErrorCode { check_null!(publisher); - unsafe { (*(publisher as *mut BlockPublisher)).stop() } + (*(publisher as *mut BlockPublisher)).stop(); ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_chain_head_lock( +pub unsafe extern "C" fn block_publisher_chain_head_lock( publisher_ptr: *mut c_void, chain_head_lock_ptr: *mut *const c_void, ) -> ErrorCode { check_null!(publisher_ptr); - let chain_head_lock = - Box::new(unsafe { (*(publisher_ptr as *mut BlockPublisher)).chain_head_lock() }); - unsafe { - *chain_head_lock_ptr = Box::into_raw(chain_head_lock) as *const c_void; - }; + let chain_head_lock = Box::new((*(publisher_ptr as *mut BlockPublisher)).chain_head_lock()); + + *chain_head_lock_ptr = Box::into_raw(chain_head_lock) as *const c_void; ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_pending_batch_info( +pub unsafe extern "C" fn block_publisher_pending_batch_info( publisher: *mut c_void, length: *mut i32, limit: *mut i32, ) -> ErrorCode { check_null!(publisher); - unsafe { - let info = (*(publisher as *mut BlockPublisher)).pending_batch_info(); - *length = info.0; - *limit = info.1; - } + + let info = (*(publisher as *mut BlockPublisher)).pending_batch_info(); + *length = info.0; + *limit = info.1; + ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_initialize_block( +pub unsafe extern "C" fn block_publisher_initialize_block( publisher: *mut c_void, previous_block: *mut py_ffi::PyObject, ) -> ErrorCode { let gil = Python::acquire_gil(); let py = gil.python(); - let block = unsafe { - PyObject::from_borrowed_ptr(py, previous_block) - .extract::(py) - .unwrap() - }; + let block = PyObject::from_borrowed_ptr(py, previous_block) + .extract::(py) + .unwrap(); - let publisher = unsafe { (*(publisher as *mut BlockPublisher)).clone() }; + let publisher = (*(publisher as *mut BlockPublisher)).clone(); py.allow_threads(move || match publisher.initialize_block(block) { Err(InitializeBlockError::BlockInProgress) => ErrorCode::BlockInProgress, Err(InitializeBlockError::MissingPredecessor) => ErrorCode::MissingPredecessor, @@ -274,7 +264,7 @@ pub extern "C" fn block_publisher_initialize_block( } #[no_mangle] -pub extern "C" fn block_publisher_finalize_block( +pub unsafe extern "C" fn block_publisher_finalize_block( publisher: *mut c_void, consensus: *const u8, consensus_len: usize, @@ -283,23 +273,23 @@ pub extern "C" fn block_publisher_finalize_block( result_len: *mut usize, ) -> ErrorCode { check_null!(publisher, consensus); - let consensus = unsafe { slice::from_raw_parts(consensus, consensus_len).to_vec() }; - match unsafe { (*(publisher as *mut BlockPublisher)).finalize_block(consensus, force) } { + let consensus = slice::from_raw_parts(consensus, consensus_len).to_vec(); + match (*(publisher as *mut BlockPublisher)).finalize_block(consensus, force) { Err(FinalizeBlockError::BlockNotInitialized) => ErrorCode::BlockNotInitialized, Err(FinalizeBlockError::BlockEmpty) => ErrorCode::BlockEmpty, - Ok(block_id) => unsafe { + Ok(block_id) => { *result = block_id.as_ptr(); *result_len = block_id.as_bytes().len(); mem::forget(block_id); ErrorCode::Success - }, + } } } #[no_mangle] -pub extern "C" fn block_publisher_summarize_block( +pub unsafe extern "C" fn block_publisher_summarize_block( publisher: *mut c_void, force: bool, result: *mut *const u8, @@ -307,29 +297,29 @@ pub extern "C" fn block_publisher_summarize_block( ) -> ErrorCode { check_null!(publisher); - match unsafe { (*(publisher as *mut BlockPublisher)).summarize_block(force) } { + match (*(publisher as *mut BlockPublisher)).summarize_block(force) { Err(FinalizeBlockError::BlockEmpty) => ErrorCode::BlockEmpty, Err(FinalizeBlockError::BlockNotInitialized) => ErrorCode::BlockNotInitialized, - Ok(consensus) => unsafe { + Ok(consensus) => { *result = consensus.as_ptr(); *result_len = consensus.as_slice().len(); - mem::forget(result); + mem::forget(consensus); ErrorCode::Success - }, + } } } // convert_on_chain_updated_args is used in tests -pub fn convert_on_chain_updated_args( +pub unsafe fn convert_on_chain_updated_args( py: Python, chain_head_ptr: *mut py_ffi::PyObject, committed_batches_ptr: *mut py_ffi::PyObject, uncommitted_batches_ptr: *mut py_ffi::PyObject, ) -> (Block, Vec, Vec) { - let chain_head = unsafe { PyObject::from_borrowed_ptr(py, chain_head_ptr) }; - let py_committed_batches = unsafe { PyObject::from_borrowed_ptr(py, committed_batches_ptr) }; + let chain_head = PyObject::from_borrowed_ptr(py, chain_head_ptr); + let py_committed_batches = PyObject::from_borrowed_ptr(py, committed_batches_ptr); let committed_batches: Vec = if py_committed_batches == Python::None(py) { Vec::new() } else { @@ -340,8 +330,7 @@ pub fn convert_on_chain_updated_args( .map(|pyobj| pyobj.extract::(py).unwrap()) .collect() }; - let py_uncommitted_batches = - unsafe { PyObject::from_borrowed_ptr(py, uncommitted_batches_ptr) }; + let py_uncommitted_batches = PyObject::from_borrowed_ptr(py, uncommitted_batches_ptr); let uncommitted_batches: Vec = if py_uncommitted_batches == Python::None(py) { Vec::new() } else { @@ -361,7 +350,7 @@ pub fn convert_on_chain_updated_args( // block_publisher_on_chain_updated is used in tests #[no_mangle] -pub extern "C" fn block_publisher_on_chain_updated( +pub unsafe extern "C" fn block_publisher_on_chain_updated( publisher: *mut c_void, chain_head_ptr: *mut py_ffi::PyObject, committed_batches_ptr: *mut py_ffi::PyObject, @@ -385,7 +374,7 @@ pub extern "C" fn block_publisher_on_chain_updated( ) }; - let mut publisher = unsafe { (*(publisher as *mut BlockPublisher)).clone() }; + let mut publisher = (*(publisher as *mut BlockPublisher)).clone(); py.allow_threads(move || { publisher.publisher.on_chain_updated_internal( chain_head, @@ -398,30 +387,28 @@ pub extern "C" fn block_publisher_on_chain_updated( } #[no_mangle] -pub extern "C" fn block_publisher_has_batch( +pub unsafe extern "C" fn block_publisher_has_batch( publisher: *mut c_void, batch_id: *const c_char, has: *mut bool, ) -> ErrorCode { check_null!(publisher, batch_id); - let batch_id = match unsafe { CStr::from_ptr(batch_id).to_str() } { + let batch_id = match CStr::from_ptr(batch_id).to_str() { Ok(s) => s, Err(_) => return ErrorCode::InvalidInput, }; - unsafe { - *has = (*(publisher as *mut BlockPublisher)).has_batch(batch_id); - } + + *has = (*(publisher as *mut BlockPublisher)).has_batch(batch_id); + ErrorCode::Success } #[no_mangle] -pub extern "C" fn block_publisher_cancel_block(publisher: *mut c_void) -> ErrorCode { +pub unsafe extern "C" fn block_publisher_cancel_block(publisher: *mut c_void) -> ErrorCode { check_null!(publisher); - unsafe { - match (*(publisher as *mut BlockPublisher)).cancel_block() { - Ok(_) => ErrorCode::Success, - Err(_) => ErrorCode::BlockNotInProgress, - } + match (*(publisher as *mut BlockPublisher)).cancel_block() { + Ok(_) => ErrorCode::Success, + Err(_) => ErrorCode::BlockNotInProgress, } }