From b7e07f91ed666ef243dd7a640c1fc0d234ad50b4 Mon Sep 17 00:00:00 2001 From: aditya singh Date: Wed, 22 Aug 2018 09:36:10 -0700 Subject: [PATCH 01/64] Implemented Structure For Rest Api Testing Contains base class for all test classes Contains conftest.py for pytest cli and session fixtures Contains utils.py for utility methods required in calling rest api endpoints and other functions for testing the endpoints Contains Test files inside test folders for rest api testing Includes thread module for various threads for multi validator testing Includes ssh, payload and workload modules for performing ssh, creating payload and generating workload using threads Signed-off-by: aditya singh --- rest_api/tests/api_test/base.py | 321 +++++++++ rest_api/tests/api_test/conftest.py | 239 +++++++ rest_api/tests/api_test/exceptions.py | 17 + rest_api/tests/api_test/fixtures.py | 179 +++++ .../api_test/get/test_rest_api_get_batch.py | 611 ++++++++++++++++++ .../api_test/get/test_rest_api_get_block.py | 410 ++++++++++++ .../api_test/get/test_rest_api_get_peers.py | 48 ++ .../get/test_rest_api_get_receipts.py | 137 ++++ .../api_test/get/test_rest_api_get_state.py | 488 ++++++++++++++ .../get/test_rest_api_get_transaction.py | 379 +++++++++++ .../mul/test_rest_api_mul_validator.py | 128 ++++ rest_api/tests/api_test/payload.py | 215 ++++++ .../tests/api_test/post/test_rest_api_post.py | 376 +++++++++++ rest_api/tests/api_test/pytest.ini | 10 + .../scenario/test_rest_api_scenario.py | 130 ++++ rest_api/tests/api_test/ssh.py | 36 ++ rest_api/tests/api_test/thread.py | 116 ++++ rest_api/tests/api_test/utils.py | 438 +++++++++++++ rest_api/tests/api_test/validators_down.sh | 3 + rest_api/tests/api_test/validators_up.sh | 6 + rest_api/tests/api_test/workload.py | 29 + 21 files changed, 4316 insertions(+) create mode 100644 rest_api/tests/api_test/base.py create mode 100644 rest_api/tests/api_test/conftest.py create mode 100644 rest_api/tests/api_test/exceptions.py create mode 100644 rest_api/tests/api_test/fixtures.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_batch.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_block.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_peers.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_receipts.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_state.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_transaction.py create mode 100644 rest_api/tests/api_test/mul/test_rest_api_mul_validator.py create mode 100644 rest_api/tests/api_test/payload.py create mode 100644 rest_api/tests/api_test/post/test_rest_api_post.py create mode 100644 rest_api/tests/api_test/pytest.ini create mode 100644 rest_api/tests/api_test/scenario/test_rest_api_scenario.py create mode 100644 rest_api/tests/api_test/ssh.py create mode 100644 rest_api/tests/api_test/thread.py create mode 100644 rest_api/tests/api_test/utils.py create mode 100644 rest_api/tests/api_test/validators_down.sh create mode 100644 rest_api/tests/api_test/validators_up.sh create mode 100644 rest_api/tests/api_test/workload.py diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..6eb41a667e --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,321 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import aiohttp +from base64 import b64decode + +CONSENSUS_ALGO = b'Devmode' +FAMILY_NAME = 'intkey' +FAMILY_VERSION = '1.0' +DEFAULT_LIMIT = 100 +TRACE = False +NONCE = '' + + +class RestApiBaseTest(object): + """Base class for Rest Api tests that simplifies making assertions + for the test cases + """ + def assert_status(self, response, status): + for data in response['data']: + assert data['status'] == status + + def assert_equal(self, response, data): + assert response == data + + def assert_check_nonce(self, response): + """Asserts response has nonce parameter + """ + assert 'nonce' in response['header'] + assert response['header']['nonce'] == NONCE + + def assert_check_family(self, response): + """Asserts family name and versions in response + """ + assert 'family_name' in response['header'] + assert 'family_version' in response['header'] + assert response['header']['family_name'] == FAMILY_NAME + assert response['header']['family_version'] == FAMILY_VERSION + + def assert_check_dependency(self, response): + """Asserts transaction dependencies in response + """ + assert 'dependencies' in response['header'] + + def assert_content(self, response): + """Asserts response has inputs and outputs parameter + """ + assert 'inputs' in response['header'] + assert 'outputs' in response['header'] + + def assert_payload_algo(self, response): + """Asserts payload has been created with + proper algorithm + """ + assert 'payload_sha512' in response['header'] + + def assert_payload(self, txn, payload): + """Asserts payload is constructed properly + """ + assert 'payload' in txn + assert payload == txn['payload'] + self.assert_payload_algo(txn) + + def assert_batcher_public_key(self, response, public_key): + """Asserts batcher public key in response + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_signer_public_key(self, response, public_key): + """Asserts that signer public key is proper + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_trace(self, response): + """Asserts whether the response has trace parameter + """ + assert 'trace' in response +# assert bool(response['trace']) + assert response['trace'] == TRACE + + def assert_check_consensus(self, response): + """Asserts response has consensus as parameter + """ + assert 'consensus' in response + assert response['consensus'] == CONSENSUS_ALGO + + def assert_state_root_hash(self, response): + """Asserts the response has state root hash + """ + assert 'state_root_hash' in response + + def assert_previous_block_id(self, response): + """Asserts that response has previous block id + """ + assert 'previous_block_id' in response + + def assert_block_num(self, response): + """Asserts that response has proper block number + """ + assert 'block_num' in response + + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_valid_head(self, response, expected): + """Asserts a response has a head string with an + expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_valid_link(self, response, expected_link): + """Asserts a response has a link url string with an + expected ending + """ + assert 'link' in response + assert response['link'] == expected_link + self.assert_valid_url(response['link'], expected_link) + + def assert_valid_url(self, url, expected_link): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_link) + + def assert_transaction_ids(self, response, expected): + """Asserts a response has a link url string with an + expected ending + """ + assert 'transaction_ids' in response['header'] + assert response['header']['transaction_ids'][0] == expected + + def assert_valid_paging(self, response, expected_link): + """Asserts a response has a paging dict with the + expected values. + """ + assert 'paging' in response + paging = response['paging'] + + if 'next' in paging and expected_link is not None: + assert 'next' in paging + assert 'next_position' in paging + self.assert_valid_url(response['link'], expected_link) + else: + assert 'next' not in paging + assert paging['start'] == None + assert paging['limit'] == None + + def assert_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an + expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_valid_data(self, response): + """Asserts a response has a data list of dicts + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + self.assert_items(data, dict) + + def assert_valid_data_list(self, response, expected_length): + """Asserts a response has a data list of dicts of an + expected length. + """ + assert len(response) == expected_length + + def assert_check_block_seq(self, blocks, expected_batches, expected_txns): + """Asserts block is constructed properly after submitting batches + """ + if not isinstance(blocks, list): + blocks = [blocks] + + consensus_algo = CONSENSUS_ALGO + + ep = list(zip(blocks, expected_batches, expected_txns)) + + for block, expected_batch, expected_txn in ep: + assert isinstance(block, dict) + assert isinstance(block['header'], dict) + assert consensus_algo == b64decode(block['header']['consensus']) + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 + assert isinstance(batches, dict) + self.assert_check_batch_seq(batches, expected_batch, expected_txn) + + def assert_check_batch_seq(self, batches, expected_batches, expected_txns, + payload, signer_key): + """Asserts batch is constructed properly + """ + + if not isinstance(batches, list): + batches = [batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + for batch, expected_batch , expected_txn in zip(batches, expected_batches , expected_txns): + assert expected_batch == batch['header_signature'] + assert isinstance(batch['header'], dict) + txns = batch['transactions'] + assert isinstance(txns, list) + assert len(txns) == 1 + self.assert_items(txns, dict) + self.assert_transaction_ids(batch, expected_txn) + self.assert_signer_public_key(batch, signer_key) + self.assert_trace(batch) + self.assert_check_transaction_seq(txns, expected_txn, + payload[0], signer_key) + + + def assert_check_transaction_seq(self, txns, expected_ids, + payload, signer_key): + """Asserts transactions are constructed properly + """ + if not isinstance(txns, list): + txns = [txns] + + if not isinstance(expected_ids, list): + expected_ids = [expected_ids] + + for txn, expected_id in zip(txns, expected_ids): + assert expected_id == txn['header_signature'] + assert isinstance(txn['header'], dict) + self.assert_payload(txn, payload) + self.assert_check_family(txn) + self.assert_check_nonce(txn) + self.assert_check_dependency(txn) + self.assert_content(txn) + self.assert_signer_public_key(txn, signer_key) + self.assert_batcher_public_key(txn, signer_key) + + def assert_check_state_seq(self, state, expected): + """Asserts state is updated properly + """ + pass + + def wait_until_status(url, status_code=200, tries=5): + """Pause the program until the given url returns the required status. + + Args: + url (str): The url to query. + status_code (int, optional): The required status code. Defaults to 200. + tries (int, optional): The number of attempts to request the url for + the given status. Defaults to 5. + Raises: + AssertionError: If the status is not recieved in the given number of + tries. + """ + attempts = tries + while attempts > 0: + try: + response = urlopen(url) + if response.getcode() == status_code: + return + + except HTTPError as err: + if err.code == status_code: + return + + LOGGER.debug('failed to read url: %s', str(err)) + except URLError as err: + LOGGER.debug('failed to read url: %s', str(err)) + + sleep_time = (tries - attempts + 1) * 2 + LOGGER.debug('Retrying in %s secs', sleep_time) + time.sleep(sleep_time) + + attempts -= 1 + + raise AssertionError( + "{} is not available within {} attempts".format(url, tries)) + + def wait_for_rest_apis(endpoints, tries=5): + """Pause the program until all the given REST API endpoints are available. + + Args: + endpoints (list of str): A list of host:port strings. + tries (int, optional): The number of attempts to request the url for + availability. + """ + for endpoint in endpoints: + http = 'http://' + url = endpoint if endpoint.startswith(http) else http + endpoint + wait_until_status( + '{}/blocks'.format(url), + status_code=200, + tries=tries) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py new file mode 100644 index 0000000000..4a68272cf6 --- /dev/null +++ b/rest_api/tests/api_test/conftest.py @@ -0,0 +1,239 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import sys +import platform +import inspect +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp, _get_client_address, batch_count, transaction_count,\ + get_batch_statuses + +from payload import get_signer, create_intkey_transaction , create_batch + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +LIMIT = 100 + + +def pytest_addoption(parser): + """Contains parsers for pytest cli commands + """ + parser.addoption( + "--get", action="store_true", default=False, help="run get tests" + ) + + parser.addoption( + "--post", action="store_true", default=False, help="run post tests" + ) + + parser.addoption( + "--sn", action="store_true", default=False, help="run scenario based tests" + ) + + parser.addoption("--batch", action="store", metavar="NAME", + help="only run batch tests." + ) + + parser.addoption("--transaction", action="store", metavar="NAME", + help="only run transaction tests." + ) + + parser.addoption("--state", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("--block", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("-E", action="store", metavar="NAME", + help="only run tests matching the environment NAME." + ) + + parser.addoption("-N", action="store", metavar="NAME", + help="only run tests matching the Number." + ) + + parser.addoption("-O", action="store", metavar="NAME", + help="only run tests matching the OS release version." + ) + + +def pytest_collection_modifyitems(config, items): + """Filters tests based on markers when parameters passed + through the cli + """ + try: + num = int(config.getoption("-N")) + except: + num = None + + selected_items = [] + deselected_items = [] + if config.getoption("--get"): + for item in items: + for marker in list(item.iter_markers()): + if marker.name == 'get': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--post"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'post': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--sn"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'scenario': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + else: + selected_items = items[:num] + items[:] = selected_items + return items + +@pytest.fixture(scope="session", autouse=True) +def setup(request): + """Setup method for posting batches and returning the + response + """ + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = len(get_state_list()) + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + data['expected_batch_length'] = initial_batch_length + length_batches + data['expected_trn_length'] = initial_transaction_length + length_transactions + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['payload'] = expected_trxns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + + block_list = get_blocks() + data['block_list'] = block_list + batch_list = get_batches() + data['batch_list'] = batch_list + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] + data['transaction_ids'] = transaction_ids + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] + data['batch_ids'] = batch_ids + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + data['family_name']=[block['batches'][0]['transactions'][0]['header']['family_name'] for block in block_list['data']] + return data \ No newline at end of file diff --git a/rest_api/tests/api_test/exceptions.py b/rest_api/tests/api_test/exceptions.py new file mode 100644 index 0000000000..af131f05c1 --- /dev/null +++ b/rest_api/tests/api_test/exceptions.py @@ -0,0 +1,17 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +class RestApiError(Exception): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..489cfc1b48 --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,179 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , wait_for_rest_apis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp + +from payload import get_signer, create_intkey_transaction , create_batch,\ + create_invalid_intkey_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + + +@pytest.fixture(scope="function") +def break_genesis(request): + """Setup Function for deleting the genesis data + and restarting the validator with no genesis + + Waits for services to start again before + sending the request again + """ + _stop_validator() + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _start_validator() + + +@pytest.fixture(scope="function") +def setup_settings_tp(request): + _stop_settings_tp() + print("settings tp is connected") + + def teardown(): + print("Connecting settings tp") + _start_settings_tp() + + request.addfinalizer(teardown) + +@pytest.fixture(scope="function") +def invalid_batch(): + """Setup method for creating invalid batches + """ + signer = get_signer() + data = {} + expected_trxns = {} + expected_batches = [] + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_invalid_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['address'] = address + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + return data + + +@pytest.fixture(scope="function") +def setup_batch_multiple_transaction(): + data = {} + signer = get_signer() + transactions= [] + expected_trxns = [] + expected_batches = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + for val in range(15): + txns = create_intkey_transaction("set", [] , 50 , signer) + transactions.append(txns) + + + for txn in transactions: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxns.append(trxn_id) + + + batch_s= create_batch(transactions, signer) + post_batch_list = BatchList(batches=[batch_s]).SerializeToString() + + LOGGER.info("Submitting batches to the handlers") + + try: + response = post_batch(post_batch_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + return expected_trxns + + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py new file mode 100644 index 0000000000..a5a02403de --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -0,0 +1,611 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + + +from fixtures import break_genesis, invalid_batch +from utils import get_batches, get_batch_id, post_batch,\ + get_batch_statuses, post_batch_statuses,\ + _create_expected_link, _get_batch_list + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.batch] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BATCH_NOT_FOUND = 71 +STATUS_ID_QUERY_INVALID = 66 +STATUS_BODY_INVALID = 43 +STATUS_WRONG_CONTENT_TYPE = 46 +WAIT = 10 + + +class TestBatchList(RestApiBaseTest): + """This class tests the batch list with different parameters + """ + def test_api_get_batch_list(self, setup): + """Tests the batch list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/batches?head={}&start={}'.format(address,\ + expected_head, start) + + try: + response = get_batches() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = _get_batch_list(response) + + self.assert_valid_data(response) + self.assert_valid_head(response, expected_head) + self.assert_valid_data_list(batches, expected_length) + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_head(self, setup): + """Tests that GET /batches is reachable with head parameter + """ + LOGGER.info("Starting test for batch with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_head = setup['expected_head'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_bad_head(self, setup): + """Tests that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + + try: + batch_list = get_batches(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + + def test_api_get_batch_list_id(self, setup): + """Tests that GET /batches is reachable with id as parameter + """ + LOGGER.info("Starting test for batch with id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_list_bad_id(self, setup): + """Tests that GET /batches is unreachable with bad id parameter + """ + LOGGER.info("Starting test for batch with bad id parameter") + + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_list_head_and_id(self, setup): + """Tests GET /batches is reachable with head and id as parameters + """ + LOGGER.info("Starting test for batch with head and id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(head_id=expected_head , id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_paginated_batch_list(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_batches(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_batch_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_id = batch_ids[0] + start = setup['start'] + address = setup['address'] + limit = 1 + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + def test_api_get_batch_list_invalid_start(self, setup): + """Tests that GET /batches is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_batches(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + + def test_api_get_batch_list_invalid_limit(self, setup): + """Tests that GET /batches is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_batch_list_reversed(self, setup): + """verifies that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + setup_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + expected_batches = setup_batches[::-1] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + reverse = True + + try: + response = get_batches(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + batches = response['data'][:-1] + + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response) + + def test_api_get_batch_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_batches() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_batch_param_link_val(self, setup): + """Tests/ validate the batch parameters with batches, head, start and limit + """ + try: + batch_list = get_batches() + for link in batch_list: + if(link == 'link'): + assert 'head' in batch_list['link'] + assert 'start' in batch_list['link'] + assert 'limit' in batch_list['link'] + assert 'batches' in batch_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for batch and parameters are missing") + + def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list + """ + count =0 + try: + batch_list = get_batches() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Batch count not able to collect") + +class TestBatchGet(RestApiBaseTest): + def test_api_get_batch_id(self, setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + batch_ids = setup['batch_ids'] + expected_id = batch_ids[0] + payload = setup['payload'] + address = setup['address'] + + expected_link = '{}/batches/{}'.format(address, expected_batches[0]) + + try: + response = get_batch_id(expected_batches[0]) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + + def test_api_get_bad_batch_id(self, setup): + """verifies that GET /batches/{bad_batch_id} + is unreachable with bad head parameter + """ + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + +class TestBatchStatusesList(RestApiBaseTest): + """This class tests the batch status list with different parameters + """ + def test_api_post_batch_status_15ids(self, setup): + """verifies that POST /batches_statuses with more than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + data_str=json.dumps(data['batch_ids']).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_post_batch_status_10ids(self, setup): + """verifies that POST /batches_status with less than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + values = [] + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + for i in range(10): + values.append(data['batch_ids'][i]) + data_str=json.dumps(values).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_get_batch_statuses(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_many_ids(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + batches = ",".join(expected_batches) + + expected_link = '{}/batch_statuses?id={}'.format(address, batches) + + try: + response = get_batch_statuses(expected_batches) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_bad_id(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses(BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_statuses_invalid_query(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, STATUS_ID_QUERY_INVALID) + + def test_api_get_batch_statuses_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait={}'.format(address, expected_batches[0], WAIT) + + try: + response = get_batch_statuses([expected_batches[0]],WAIT) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_invalid(self, invalid_batch): + expected_batches = invalid_batch['expected_batches'] + address = invalid_batch['address'] + status = "INVALID" + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_unknown(self, setup): + address = setup['address'] + expected_batches = setup['expected_batches'] + unknown_batch = expected_batches[0] + status = "UNKNOWN" + + expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) + + try: + response = get_batch_statuses([unknown_batch]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_default_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait=300'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]],300) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py new file mode 100644 index 0000000000..cf54b22d59 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -0,0 +1,410 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_blocks, get_block_id, get_batches, get_transactions + +from base import RestApiBaseTest + + +pytestmark = [pytest.mark.get , pytest.mark.block] + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BLOCK_NOT_FOUND = 70 +HEAD_LENGTH = 128 +MAX_BATCH_IN_BLOCK = 100 +FAMILY_NAME = 'xo' + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +class TestBlockList(RestApiBaseTest): + """This class tests the blocks list with different parameters + """ + def test_api_get_block_list(self, setup): + """Tests the block list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_blocks() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + def test_api_get_block_list_head(self, setup): + """Tests that GET /blocks is reachable with head parameter + """ + LOGGER.info("Starting test for blocks with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_blocks(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_block_list_bad_head(self, setup): + """Tests that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for blocks with bad head parameter") + + try: + batch_list = get_blocks(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_id(self, setup): + """Tests that GET /blocks is reachable with id as parameter + """ + LOGGER.info("Starting test for blocks with id parameter") + + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + try: + response = get_blocks(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + assert response['head'] == expected_head, "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + + def test_api_get_block_list_bad_id(self, setup): + """Tests that GET /blocks is unreachable with bad id parameter + """ + LOGGER.info("Starting test for blocks with bad id parameter") + bad_id = 'f' + + try: + batch_list = get_blocks(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_head_and_id(self, setup): + """Tests GET /blocks is reachable with head and id as parameters + """ + LOGGER.info("Starting test for blocks with head and id parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + + response = get_blocks(head_id=expected_head , id=expected_id) + + assert response['head'] == expected_head , "head is not matching" + assert response['paging']['start'] == None , "start parameter is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + + def test_api_get_paginated_block_list(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_start_id(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_start(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_limit(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_limit(self, setup): + """Tests that GET /blocks is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + limit = 0 + + try: + response = get_blocks(limit=limit) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + def test_api_get_block_list_reversed(self, setup): + """verifies that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + reverse = True + + try: + response = get_blocks(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['head'] == expected_head , "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_block_link_val(self, setup): + """Tests/ validate the block parameters with blocks, head, start and limit + """ + try: + block_list = get_blocks() + for link in block_list: + if(link == 'link'): + assert 'head' in block_list['link'] + assert 'start' in block_list['link'] + assert 'limit' in block_list['link'] + assert 'blocks' in block_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_block_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_blocks() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_batch_id_length(self, setup): + """Tests the each batch id length should be 128 hex character long + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + expected_head = batch['header']['batch_ids'][0] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Batch id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_api_get_first_block_id_length(self, setup): + """Tests the first block id length should be 128 hex character long + """ + try: + for block_list in get_blocks(): + batch_list = get_batches() + for block in batch_list: + expected_head = batch_list['head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Block id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_post_max_batches(self, setup): + """Tests that allow max post batches in block + Handled max 100 batches post in block and handle for extra batch + """ + block_list = get_blocks()['data'] + for batchcount, _ in enumerate(block_list, start=1): + if batchcount == MAX_BATCH_IN_BLOCK: + print("Max 100 Batches are present in Block") + + def test_rest_api_check_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + block_list = get_blocks()['data'] + head_signature = [block['batches'][0]['header_signature'] for block in block_list] + for i, _ in enumerate(block_list): + head_sig = json.dumps(head_signature[i]).encode('utf8') + assert head_signature[i] is not None, "Head signature is available for all batches in block" + + def test_rest_api_check_family_version(self, setup): + """Test batch transaction family version should be present + for each transaction header + """ + block_list = get_blocks()['data'] + family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] + for i, _ in enumerate(block_list): + assert family_version[i] is not None, "family version present for all batches in block" + + def test_rest_api_check_input_output_content(self,setup): + """Test batch input and output content should be same for + each batch and unique from other + """ + block_list = get_blocks()['data'] + txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] + txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] + if(txn_input == txn_output): + return True + def test_rest_api_check_signer_public_key(self, setup): + """Tests that signer public key is calculated for a block + properly + """ + block_list = get_blocks()['data'] + signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] + assert signer_public_key is not None, "signer public key is available" + + def test_rest_api_check_blocks_count(self, setup): + """Tests blocks count from block list + """ + count =0 + try: + block_list = get_blocks() + for block in enumerate(block_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("BLock count not able to collect") + + def test_rest_api_blk_content_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + head_signature = trans['header_signature'] + except urllib.error.HTTPError as error: + LOGGER.info("Header signature is missing in some of the batches") + assert head_signature is not None, "Head signature is available for all batches in block" + +class TestBlockGet(RestApiBaseTest): + def test_api_get_block_id(self, setup): + """Tests that GET /blocks/{block_id} is reachable + """ + LOGGER.info("Starting test for blocks/{block_id}") + expected_head = setup['expected_head'] + expected_block_id = setup['block_ids'][0] + + try: + response = get_block_id(block_id=expected_block_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + def test_api_get_bad_block_id(self, setup): + """Tests that GET /blocks/{bad_block_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for blocks/{bad_block_id}") + + try: + response = get_block_id(block_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + diff --git a/rest_api/tests/api_test/get/test_rest_api_get_peers.py b/rest_api/tests/api_test/get/test_rest_api_get_peers.py new file mode 100644 index 0000000000..55fd908aaf --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_peers.py @@ -0,0 +1,48 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_peers + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.peers] + +PEER_LIST = [] + +class TestPeerList(RestApiBaseTest): + """This class tests the peer list with different parameters + """ + def test_api_get_peer_list(self, setup): + """Tests the peer list + """ + address = setup['address'] + expected_link = '{}/peers'.format(address) + + try: + response = get_peers() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_link(response, expected_link) + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py new file mode 100644 index 0000000000..749fdbf951 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -0,0 +1,137 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +# +import pytest +import logging +import json +import urllib.request +import urllib.error + +from conftest import setup +from utils import get_state_list, get_reciepts, post_receipts +from base import RestApiBaseTest +from fixtures import setup_batch_multiple_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.receipts] + +RECEIPT_NOT_FOUND = 80 +RECEIPT_WRONG_CONTENT_TYPE = 81 +RECEIPT_BODY_INVALID = 82 +RECEIPT_Id_QUERYINVALID = 83 +INVALID_RESOURCE_ID = 60 + + +class TestReceiptsList(RestApiBaseTest): + """This class tests the receipt list with different parameters + """ + def test_api_get_reciept_invalid_id(self): + """Tests the reciepts after submitting invalid transaction + """ + transaction_id="s" + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the get reciepts for multiple transaction. + """ + transaction_list="" + li=setup_batch_multiple_transaction + for txn in li: + transaction_list=txn+","+transaction_list + + trans_list = str(transaction_list)[:-1] + try: + response = get_reciepts(trans_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'],reversed(li)): + assert str(res['id']) == txn + + def test_api_get_reciepts_single_transactions(self,setup): + """Tests get reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_single_transactions(self,setup): + """Test post reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_invalid_transactions(self): + """test reciepts post for invalid transaction""" + + expected_transaction="few" + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_BODY_INVALID + assert response['error']['title'] == 'Bad Receipts Request' + + def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the post reciepts response for multiple transaction. + """ + + transaction_list=setup_batch_multiple_transaction + + json_list=json.dumps(transaction_list).encode() + + try: + response= post_receipts(json_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'], transaction_list): + assert str(res['id']) == txn \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py new file mode 100644 index 0000000000..e3a40a8005 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -0,0 +1,488 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_state_list, get_state_address +from fixtures import invalid_batch + + +from base import RestApiBaseTest + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get, pytest.mark.state] + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +BAD_ADDRESS = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +STATE_ADDRESS_LENGTH = 70 +STATE_NOT_FOUND = 75 +INVALID_STATE_ADDRESS = 62 +HEAD_LENGTH = 128 + + +class TestStateList(RestApiBaseTest): + """This class tests the state list with different parameters + """ + def test_api_get_state_list(self, setup): + """Tests the state list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_state_list() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + state_list = response['data'][:-1] + + self.assert_valid_head(response , expected_head) + + def test_api_get_state_list_invalid_batch(self, invalid_batch): + """Tests that transactions are submitted and committed for + each block that are created by submitting invalid intkey batches + """ + batches = invalid_batch['expected_batches'] + try: + response = get_state_list() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_state_list_head(self, setup): + """Tests that GET /state is reachable with head parameter + """ + LOGGER.info("Starting test for state with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_head(self, setup): + """Tests that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + bad_head = 'f' + + try: + batch_list = get_state_list(head_id=bad_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + + def test_api_get_state_list_address(self, setup): + """Tests that GET /state is reachable with address parameter + """ + LOGGER.info("Starting test for state with address parameter") + expected_head = setup['expected_head'] + address = setup['state_address'][0] + + try: + response = get_state_list(address=address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_address(self, setup): + """Tests that GET /state is unreachable with bad address parameter + """ + LOGGER.info("Starting test for state with bad address parameter") + bad_address = 'f' + + try: + batch_list = get_state_list(address=bad_address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + def test_api_get_paginated_state_list(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + def test_api_get_paginated_state_list_limit(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_paginated_state_list_start(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_state_list_bad_paging(self, setup): + """Tests GET /state is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for state with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + + def test_api_get_state_list_invalid_start(self, setup): + """Tests that GET /state is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for state with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_state_list(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + + def test_api_get_state_list_invalid_limit(self, setup): + """Tests that GET /state is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for state with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + def test_api_get_state_list_reversed(self, setup): + """verifies that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_state_list(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_state_data_address_prefix_namespace(self, setup): + """Tests the state data address with 6 hex characters long + namespace prefix + """ + try: + for state in get_state_list()['data']: + #Access each address using namespace prefix + namespace = state['address'][:6] + res=get_state_list(address=namespace) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access related state address using namespace prefix") + + def test_api_get_state_data_head_wildcard_character(self, setup): + """Tests the state head with wildcard_character ***STL-1345*** + """ + pass +# try: +# for _ in get_state_list()['data']: +# expected_head = setup['expected_head'][:6] +# addressList = list(expected_head) +# addressList[2]='?' +# expected_head = ''.join(addressList) +# print("\nVALUE is: ", expected_head) +# res=get_state_list(head_id=expected_head) +# except urllib.error.HTTPError as error: +# LOGGER.info("Not able to access ") +# data = json.loads(error.fp.read().decode('utf-8')) +# if data: +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 60 +# assert data['error']['title'] == 'Invalid Resource Id' + + + def test_api_get_state_data_head_partial_character(self, setup): + """Tests the state head with partial head address ***STL-1345*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 60 + assert data['error']['title'] == 'Invalid Resource Id' + + def test_api_get_state_data_address_partial_character(self, setup): + """Tests the state address with partial head address ***STL-1346*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 62 + assert data['error']['title'] == 'Invalid State Address' + + + def test_api_get_state_data_address_length(self, setup): + """Tests the state data address length is 70 hex character long + with proper prefix namespace + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + except urllib.error.HTTPError as error: + LOGGER.info("State address is not 70 character long") + assert address == STATE_ADDRESS_LENGTH + + + def test_api_get_state_data_address_with_odd_hex_value(self, setup): + """Tests the state data address fail with odd hex character + address + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + if(address%2 == 0): + pass + except urllib.error.HTTPError as error: + LOGGER.info("Odd state address is not correct") + + def test_api_get_state_data_address_with_reduced_length(self, setup): + """Tests the state data address with reduced even length hex character long + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[:-4] + get_state_list(address = nhex) + except urllib.error.HTTPError as error: + LOGGER.info("Reduced length data address failed to processed") + + + def test_api_get_state_data_address_64_Hex(self, setup): + """Tests the state data address with 64 hex give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:70] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except urllib.error.HTTPError as error: + LOGGER.info("state data address with 64 hex characters not processed ") + + + def test_api_get_state_data_address_alter_bytes(self, setup): + """Tests the state data address with alter bytes give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:8] + naddress = get_state_list(address = nhex) + addressList = list(naddress) + addressList[2]='z' + naddress = ''.join(addressList) + except urllib.error.HTTPError as error: + LOGGER.info("state data address with altered bytes not processed ") + + + def test_api_get_state_link_val(self, setup): + """Tests/ validate the state parameters with state, head, start and limit + """ + try: + state_list = get_state_list() + for link in state_list: + if(link == 'link'): + assert 'head' in state_list['link'] + assert 'start' in state_list['link'] + assert 'limit' in state_list['link'] + assert 'state' in state_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_state_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_state_list() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_state_head_length(self, setup): + """Tests the each state head length should be 128 hex character long + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("State Head length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_state_count(self, setup): + """Tests state count from state list + """ + count = 0 + try: + state_list = get_state_list()['data'] + for batch in enumerate(state_list): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("State count not able to collect") + + +class TestStateGet(RestApiBaseTest): + def test_api_get_state_address(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['state_address'][0] + try: + response = get_state_address(address=address) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_bad_address(self, setup): + """Tests /state/{bad_state_address} + """ + try: + response = get_state_address(address=BAD_ADDRESS) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_STATE_ADDRESS) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py new file mode 100644 index 0000000000..6480a6f94f --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -0,0 +1,379 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from fixtures import break_genesis + +from utils import get_transactions, get_transaction_id + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.transactions] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +TRANSACTION_NOT_FOUND = 72 +HEAD_LENGTH = 128 + + +class TestTransactionList(RestApiBaseTest): + def test_api_get_transaction_list(self, setup): + """Tests the transaction list after submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + +# self.assert_check_transaction_seq(txns, expected_txns, +# payload, signer_key) +# self.assert_valid_head(response , expected_head) +# self.assert_valid_paging(response) + + + def test_api_get_transaction_list_head(self, setup): + """Tests that GET /transactions is reachable with head parameter + """ + LOGGER.info("Starting test for transactions with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_transaction_list_bad_head(self, setup): + """Tests that GET /transactions is unreachable with bad head parameter + """ + LOGGER.info("Starting test for transactions with bad head parameter") + + try: + response = get_transactions(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_id(self, setup): + """Tests that GET /transactions is reachable with id as parameter + """ + LOGGER.info("Starting test for transactions with id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + def test_api_get_transaction_list_bad_id(self, setup): + """Tests that GET /transactions is unreachable with bad id parameter + """ + LOGGER.info("Starting test for transactions with bad id parameter") + bad_id = 'f' + + try: + response = get_transactions(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_head_and_id(self, setup): + """Tests GET /transactions is reachable with head and id as parameters + """ + LOGGER.info("Starting test for transactions with head and id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(head_id=expected_head , id=expected_id) + except: + LOGGER.info("Rest Api not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_paginated_transaction_list(self, setup): + """Tests GET /transactions is reachbale using paging parameters + """ + LOGGER.info("Starting test for transactions with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_bad_paging(self, setup): + """Tests GET /transactions is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for transactions with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_transaction_list_invalid_start(self, setup): + """Tests that GET /transactions is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for transactions with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_transactions(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_list_invalid_limit(self, setup): + """Tests that GET /transactions is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for transactions with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_transactions(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + + def test_api_get_transaction_list_reversed(self, setup): + """verifies that GET /transactions with list reversed + """ + LOGGER.info("Starting test for transactions with list reversed") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_transactions(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_transactions_link_val(self, setup): + """Tests/ validate the transactions parameters with transactions, head, start and limit + """ + try: + transactions_list = get_transactions() + for link in transactions_list: + if(link == 'link'): + assert 'head' in transactions_list['link'] + assert 'start' in transactions_list['link'] + assert 'limit' in transactions_list['link'] + assert 'transactions' in transactions_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for transactions and parameters are missing") + + def test_api_get_transactions_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_transactions() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_transaction_id_length(self, setup): + """Tests the transaction id length should be 128 hex character long + """ + try: + transaction_list = get_transactions() + for trans in transaction_list['data']: + transaction_ids = trans['header_signature'] + head_len = len(transaction_ids) + except urllib.error.HTTPError as error: + LOGGER.info("Transaction id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_transactions_count(self, setup): + """Tests transaction count from transaction list + """ + count =0 + try: + batch_list = get_transactions() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Transaction count not able to collect") + +class TesttransactionGet(RestApiBaseTest): + def test_api_get_transaction_id(self, setup): + """Tests that GET /transactions/{transaction_id} is reachable + """ + LOGGER.info("Starting test for transaction/{transaction_id}") + expected_head = setup['expected_head'] + expected_id = setup['transaction_ids'][0] + address = setup['address'] + expected_length = 1 + + expected_link = '{}/transactions/{}'.format(address,expected_id) + + try: + response = get_transaction_id(transaction_id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_link(response, expected_link) + assert bool(response['data']) == True + + def test_api_get_transaction_bad_id(self, setup): + """Tests that GET /transactions/{transaction_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for transactions/{transaction_id}") + try: + response = get_transaction_id(transaction_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py new file mode 100644 index 0000000000..6a7c73281a --- /dev/null +++ b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py @@ -0,0 +1,128 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time +import paramiko +import sys +import threading +import os +import signal + + +from google.protobuf.json_format import MessageToDict + +from base import RestApiBaseTest +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus, _stop_validator\ + +from workload import Workload +from ssh import SSH +from thread import Workload_thread, SSH_thread, Consensus_Thread,\ + wait_for_event, wait_for_event_timeout + + + +logging.basicConfig(level=logging.INFO, + format='[%(levelname)s] (%(threadName)-10s) %(message)s', + ) + +WAIT_TIME = 10 +PORT =22 +USERNAME = 'test' +PASSWORD = 'aditya9971' + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.mul + + +class TestMultiple(RestApiBaseTest): + def test_rest_api_mul_val_intk(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + logging.info('Starting Test for Intkey payload') + + logging.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + logging.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) + + def test_rest_api_mul_val_Node(self): + """Tests that leaf nodes are brought up/down in a network + and checks are performed on the respective nodes + """ + leaf_nodes = ['10.223.155.134', '10.223.155.25'] + threads = [] + + workload_thread = Workload_thread() + workload_thread.setName('workload_thread') + workload_thread.start() + + consensus_thread = Consensus_Thread(leaf_nodes) + consensus_thread.setName('consensus_thread') + consensus_thread.setDaemon(True) + consensus_thread.start() + + for node in leaf_nodes: + ssh_thread = SSH_thread(node,PORT,USERNAME,PASSWORD) + ssh_thread.setName('ssh_thread') + threads.append(ssh_thread) + + for thread in threads: + thread.start() + thread.join() + + consensus_thread.join() + workload_thread.join() + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py new file mode 100644 index 0000000000..9a61c158e3 --- /dev/null +++ b/rest_api/tests/api_test/payload.py @@ -0,0 +1,215 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import base64 +import argparse +import cbor +import hashlib +import os +import time +import random +import string + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + + + +def create_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_invalid_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_same_transaction(verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + +def random_word(): + return ''.join([random.choice(string.ascii_letters) for _ in range(0, 6)]) + + +def random_word_list(count): + if os.path.isfile('/usr/share/dict/words'): + with open('/usr/share/dict/words', 'r') as fd: + return [x.strip() for x in fd.readlines()[0:count]] + else: + return [random_word() for _ in range(0, count)] \ No newline at end of file diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py new file mode 100644 index 0000000000..415da2c6af --- /dev/null +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -0,0 +1,376 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib + +from google.protobuf.json_format import MessageToDict + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains + + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +BAD_PROTOBUF = b'BAD_PROTOBUF' +EMPTY_BATCH = b'' +NO_BATCHES_SUBMITTED = 34 +BAD_PROTOBUF_SUBMITTED = 35 +BATCH_QUEUE_FULL = 31 +INVALID_BATCH = 30 +WRONG_CONTENT_TYPE = 43 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.post + + +class TestPost(RestApiBaseTest): + def test_rest_api_post_batch(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey batches + with set operations + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()['data']) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + data = error.fp.read().decode('utf-8') + LOGGER.info(data) + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch submission failed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + final_state_length = len(get_state_list()['data']) + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert final_state_length == initial_state_length + len(expected_batch_ids) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + def test_rest_api_no_batches(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=EMPTY_BATCH) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, NO_BATCHES_SUBMITTED) + + def test_rest_api_bad_protobuf(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=BAD_PROTOBUF) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, BAD_PROTOBUF_SUBMITTED) + + def test_rest_api_post_wrong_header(self,setup): + """Tests rest api by posting with wrong header + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="True") + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_post_same_txns(self, setup): + """Tests the rest-api by submitting multiple transactions with same key + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + assert response['data'][0]['status'] == "INVALID" + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_multiple_txns_batches(self, setup): + """Tests rest-api state by submitting multiple + transactions in multiple batches + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txns], signer)] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + response = get_state_list() + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 17 + assert e.code == 400 + final_state_length = len(get_state_list()) + assert initial_state_length == final_state_length + + def test_api_post_batch_different_signer(self, setup): + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch(batc) + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 30 + assert data['error']['title'] =='Submitted Batches Invalid' + + def test_api_post_batch_different_signer(self, setup): + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch(batc) + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 30 + assert data['error']['title'] =='Submitted Batches Invalid' + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini new file mode 100644 index 0000000000..a444254e4b --- /dev/null +++ b/rest_api/tests/api_test/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +addopts = -s -v --json-report --json-report-file=report.json +python_files = test_rest*.py +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +log_file = pytest-logs.txt +log_file_date_format = %Y-%m-%d %H:%M:%S +log_file_format = %(asctime)s %(levelname)s %(message)s +log_format = %(asctime)s %(levelname)s %(message)s diff --git a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py new file mode 100644 index 0000000000..bc4510c05d --- /dev/null +++ b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py @@ -0,0 +1,130 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time + +from google.protobuf.json_format import MessageToDict + +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) +WAIT = 300 + +WORKLOAD_TIME = 5 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +INTKEY_PREFIX = '1cf126' +XO_PREFIX = '5b7349' + + +pytestmark = pytest.mark.scenario + +class TestScenario(RestApiBaseTest): + def test_rest_api_mul_val_intk_xo(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + LOGGER.info('Starting Test for Intkey and Xo as payload') + + LOGGER.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + LOGGER.info("Creating keys for xo users") + + for username in ('aditya', 'singh'): + _send_cmd('sawtooth keygen {} --force'.format(username)) + + + LOGGER.info("Submitting xo batches to the handlers") + + + xo_cmds = ( + 'xo create game-1 --username aditya', + 'xo take game-1 1 --username singh', + 'xo take game-1 4 --username aditya', + 'xo take game-1 2 --username singh', + ) + + for cmd in xo_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + xo_cli_cmds = ( + 'xo list', + 'xo show game-1', + ) + + for cmd in xo_cli_cmds: + _send_cmd( + '{} --url {}'.format( + cmd, + _get_client_address())) + + xo_delete_cmds = ( + 'xo delete game-1 --username aditya', + ) + + for cmd in xo_delete_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) diff --git a/rest_api/tests/api_test/ssh.py b/rest_api/tests/api_test/ssh.py new file mode 100644 index 0000000000..1f01284da4 --- /dev/null +++ b/rest_api/tests/api_test/ssh.py @@ -0,0 +1,36 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import paramiko + + +class SSH(): + def do_ssh(self,hostname,port,username,password): + try: + ssh=paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect(hostname,port,username,password) + except paramiko.AuthenticationException: + print("Failed to connect to {} due to wrong username/password".format(hostname)) + exit(1) + except: + print("Failed to connect to {}".format(hostname)) + exit(2) + + command = 'ps aux | grep sawtooth' + stdin,stdout,stderr=ssh.exec_command(command) + outlines=stdout.readlines() + resp=''.join(outlines) + ssh.close() \ No newline at end of file diff --git a/rest_api/tests/api_test/thread.py b/rest_api/tests/api_test/thread.py new file mode 100644 index 0000000000..e58ed4a121 --- /dev/null +++ b/rest_api/tests/api_test/thread.py @@ -0,0 +1,116 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import queue +import threading +import os +import logging + + +from workload import Workload +from ssh import SSH +from utils import _get_node_chains + +logging.basicConfig(level=logging.INFO, + format='(%(threadName)-10s) %(message)s', + ) + + +def wait_for_event(e): + """Wait for the event to be set before doing anything""" + logging.debug('wait_for_event starting') + event_is_set = e.wait() + logging.debug('event set: %s', event_is_set) + + +def wait_for_event_timeout(e, t): + """Wait t seconds and then timeout""" + while not e.isSet(): + logging.debug('wait_for_event_timeout starting') + event_is_set = e.wait(t) + logging.debug('event set: %s', event_is_set) + if event_is_set: + logging.debug('processing event') + else: + logging.debug('doing other work') + + +class Workload_thread(threading.Thread): + def __init__(self): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + + def run(self): + logging.info('Starting Workload') + workload = Workload() + workload.do_workload() + return + + def stop(self): + pass + + +class SSH_thread(threading.Thread): + def __init__(self, hostname, port, username, password): + threading.Thread.__init__(self) + self.hostname = hostname + self.port = port + self.username = username + self.password = password + + def run(self): + logging.info('starting ssh thread') + logging.info('Logging into Validation Network') + self.ssh() + logging.info('Exiting ssh thread') + return + + def ssh(self): + logging.info('creating ssh object') + ssh = SSH() + logging.info('performing ssh') + ssh.do_ssh(self.hostname, self.port, self.username, self.password) + + def stop_validator(self): + loggin.info("stopping validator service") + + def start_validator(self): + loggin.info("starting validator service") + + +class Consensus_Thread(threading.Thread): + def __init__(self, nodes): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + self.nodes = nodes + + def run(self): + logging.info('starting consensus thread') + logging.info('calculating block list from the nodes') + chains = self.calculate_block_list() + self.compare_chains(chains) + return + + def calculate_block_list(self): + logging.info('getting block list from the nodes') + node_list = ['http://10.223.155.43:8008'] + chains = _get_node_chains(node_list) + return chains + + def compare_chains(self, chains): + logging.info('comparing chains for equality') + + + def calculate_sync_time(self): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..4e3355c132 --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,438 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +from urllib.request import urlopen +from urllib.error import HTTPError +from urllib.error import URLError +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os +import time +import socket +import netifaces + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/blocks?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/blocks?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/blocks?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/blocks?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/blocks?id=%s'% id) + return response + if reverse: + response = query_rest_api('/blocks?reverse') + return response + else: + response = query_rest_api('/blocks') + return response + + +def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch_id(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response + +def get_block_id(block_id): + response = query_rest_api('/blocks/%s' % block_id) + return response + +def get_transaction_id(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response + +def get_peers(): + response = query_rest_api('/peers') + return response + +def get_transactions(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/transactions?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/transactions?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/transactions?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/transactions?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/transactions?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/transactions?id=%s'% id) + return response + if reverse: + response = query_rest_api('/transactions?reverse') + return response + else: + response = query_rest_api('/transactions') + return response + +def get_state_list(head_id=None , address=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , address]): + response = query_rest_api('/state?head={}&address={}'.format(head_id , address)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if address is not None: + response = query_rest_api('/state?address=%s'% address) + return response + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state_address(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "hostname -I | awk '{print $1}'" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _start_settings_tp(): + LOGGER.info('Starting settings-tp') + cmd = " sudo -u sawtooth settings-tp -vv " + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_settings_tp(): + LOGGER.info('Stopping the settings-tp') + cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _create_genesis(): + LOGGER.info("creating the genesis data") + _create_genesis_batch() + os.chdir("/home/aditya") + cmd = "sawadm genesis config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _create_genesis_batch(): + LOGGER.info("creating the config genesis batch") + os.chdir("/home/aditya") + cmd = "sawset genesis --force" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def post_batch_statuses(batch): + headers = {'content-type': 'application/json'} + response = query_rest_api( + '/batch_statuses', data=batch, headers=headers) + return response + +def get_batch_statuses(batch_ids=None, wait=None): + try: + batches = ",".join(batch_ids) + except: + batches = None + + if batches: + if wait == 'default': + response = query_rest_api('/batch_statuses?wait&id={}'.format(batches)) + return response + elif wait: + response = query_rest_api('/batch_statuses?id={}&wait={}'.format(batches,wait)) + return response + else: + response = query_rest_api('/batch_statuses?id=%s' % batches) + return response + else: + response = query_rest_api('/batch_statuses') + return response + +def get_state_limit(limit): + response = query_rest_api('/state?limit=%s' % limit) + return response + + +def get_reciepts(reciept_id): + response = query_rest_api('/receipts?id=%s' % reciept_id) + return response + +def post_receipts(receipts): + headers = {'Content-Type': 'application/json'} + response = query_rest_api('/receipts', data=receipts, headers=headers) + return response + +def batch_count(): + batch_list = get_batches() + count = len(batch_list['data']) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + batch_list = get_batches(start=next_position) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + count += len(batch_list['data']) + return count + +def transaction_count(): + transaction_list = get_transactions() + count = len(transaction_list['data']) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + transaction_list = get_transactions(start=next_position) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + count += len(transaction_list['data']) + return count + +def _create_expected_link(expected_ids): + for id in expected_ids: + link = '{}/batch_statuses?id={},{}'.format(address, id) + return link + +def _get_batch_list(response): + batch_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_batches(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + batch_list += data_list + + return batch_list + + +def _get_transaction_list(response): + transaction_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_transactions(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + transaction_list += data_list + + return transaction_list diff --git a/rest_api/tests/api_test/validators_down.sh b/rest_api/tests/api_test/validators_down.sh new file mode 100644 index 0000000000..ffc1472ced --- /dev/null +++ b/rest_api/tests/api_test/validators_down.sh @@ -0,0 +1,3 @@ + #!/bin/bash + sudo kill -9 $(ps aux | grep 'sawtooth' | awk '{print $2}') + echo "$(ps aux | grep 'sawtooth')" diff --git a/rest_api/tests/api_test/validators_up.sh b/rest_api/tests/api_test/validators_up.sh new file mode 100644 index 0000000000..41529247f0 --- /dev/null +++ b/rest_api/tests/api_test/validators_up.sh @@ -0,0 +1,6 @@ + #!/bin/bash + +sudo -u sawtooth sawtooth-validator -vv & +sudo -u sawtooth settings-tp -vv & +sudo -u sawtooth intkey-tp-python -C tcp://127.0.0.1:4004 -v & +sudo -u sawtooth xo-tp-python -C tcp://127.0.0.1:4004 -v & diff --git a/rest_api/tests/api_test/workload.py b/rest_api/tests/api_test/workload.py new file mode 100644 index 0000000000..7dbfd41591 --- /dev/null +++ b/rest_api/tests/api_test/workload.py @@ -0,0 +1,29 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import subprocess +import logging + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +class Workload(): + def do_workload(self): + LOGGER.info('Starting Intkey Workload') +# cmd = "intkey workload --url 10.223.155.43:8008 --rate 1 -d 1" +# subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + def stop_workload(self): + pass \ No newline at end of file From b88de72a1432cf63502a0e54720ebdfb14a75fb4 Mon Sep 17 00:00:00 2001 From: aditya singh Date: Mon, 3 Sep 2018 06:02:08 -0700 Subject: [PATCH 02/64] changes to fixtures --- rest_api/tests/api_test/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index 489cfc1b48..65f35e9336 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -40,7 +40,7 @@ from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ get_state_list , _delete_genesis , _start_validator, \ - _stop_validator , _create_genesis , wait_for_rest_apis , _get_client_address, \ + _stop_validator , _create_genesis, _get_client_address, \ _stop_settings_tp, _start_settings_tp from payload import get_signer, create_intkey_transaction , create_batch,\ From b18379d0de9cff939d71c8d798a4a6d46d40bba7 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Mon, 3 Sep 2018 23:51:11 +0530 Subject: [PATCH 03/64] Added common code for txns and 4 test cases on the same Signed-off-by: sandeeplandt --- rest_api/tests/api_test/fixtures.py | 45 +++- rest_api/tests/api_test/payload.py | 200 ++++++++++++++++++ .../tests/api_test/post/test_rest_api_post.py | 66 ++++-- rest_api/tests/api_test/utils.py | 14 +- 4 files changed, 302 insertions(+), 23 deletions(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index 65f35e9336..d2d214ec49 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -18,6 +18,8 @@ import urllib import json import os +import random +import hashlib from sawtooth_signing import create_context from sawtooth_signing import CryptoFactory @@ -40,17 +42,20 @@ from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ get_state_list , _delete_genesis , _start_validator, \ - _stop_validator , _create_genesis, _get_client_address, \ - _stop_settings_tp, _start_settings_tp + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp, batch_count, transaction_count, get_batch_statuses from payload import get_signer, create_intkey_transaction , create_batch,\ - create_invalid_intkey_transaction - + create_invalid_intkey_transaction, create_intkey_same_transaction, random_word_list, IntKeyPayload, \ + make_intkey_address, Transactions LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) - +LIMIT = 100 + + +data = {} @pytest.fixture(scope="function") def break_genesis(request): @@ -132,6 +137,34 @@ def invalid_batch(): return data +@pytest.fixture(scope="function") +def setup_valinv_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valinv_txns() + return data + +@pytest.fixture(scope="function") +def setup_invval_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invval_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invalid_txns() + return data + + @pytest.fixture(scope="function") def setup_batch_multiple_transaction(): @@ -176,4 +209,6 @@ def setup_batch_multiple_transaction(): + + \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index 9a61c158e3..8d8f9ef560 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -24,6 +24,7 @@ import time import random import string +import urllib from sawtooth_signing import create_context @@ -44,6 +45,7 @@ from google.protobuf.message import DecodeError from google.protobuf.json_format import MessageToDict +from utils import batch_count, transaction_count, get_batch_statuses, post_batch INTKEY_ADDRESS_PREFIX = hashlib.sha512( 'intkey'.encode('utf-8')).hexdigest()[0:6] @@ -80,6 +82,204 @@ def sha512(self): self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() return self._sha512 +class Transactions: + + def __init__(self, invalidtype): + self.signer = get_signer() + self.data = {} + self.invalidtype = invalidtype + + def get_batch_valinv_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invval_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_one_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_txns_commit_data(self, txns, signer, data): + """Setup method for posting batches and returning the + response + """ + expected_trxn_ids = [] + expected_batch_ids = [] + expected_trxns = {} + expected_batches = [] + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + + LOGGER.info("Creating intkey transactions with set operations") + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = dict['header_signature'] + expected_trxn_ids.append(trxn_id) + + self.data['expected_trxn_ids'] = expected_trxn_ids + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 3trn/batch") + + batches = [create_batch(txns, signer)] + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + length_batches = len(expected_batches) + length_transactions = len(expected_trxn_ids) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + try: + for batch in post_batch_list: + response = post_batch(batch) + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + json_data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(json_data['error']['title']) + LOGGER.info(json_data['error']['message']) + LOGGER.info(json_data['error']['code']) + + self.data['response'] = response['data'][0]['status'] + self.data['initial_batch_length'] = initial_batch_length + self.data['initial_trn_length'] = initial_transaction_length + self.data['expected_batch_length'] = initial_batch_length + length_batches + self.data['expected_trn_length'] = initial_transaction_length + length_transactions + return self.data + + def create_intkey_transaction(self, verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data + + def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidtye): + words = random_word_list(count) + name=random.choice(words) + + if invalidtye=="addr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + elif invalidtye=="min": + payload = IntKeyPayload( + verb=verb,name=name,value=-1) + addr = make_intkey_address(name) + + elif invalidtye=="str": + payload = IntKeyPayload( + verb=verb,name=name,value="str") + addr = make_intkey_address(name) + + elif invalidtye=="max": + payload = IntKeyPayload( + verb=verb,name=name,value=4294967296) + addr = make_intkey_address(name) + + elif invalidtye=="attr": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + + data = self.get_txns_data(addr,deps, payload) + return data + + def get_txns_data(self, addr, deps, payload): + + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + def create_intkey_transaction(verb, deps, count, signer): diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index 415da2c6af..63c4035566 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -41,13 +41,15 @@ from utils import post_batch, get_state_list , get_blocks , get_transactions, \ get_batches , get_state_address, check_for_consensus,\ - _get_node_list, _get_node_chains + _get_node_list, _get_node_chains, post_batch_no_endpoint from payload import get_signer, create_intkey_transaction, create_batch,\ create_intkey_same_transaction from base import RestApiBaseTest +from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns + LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) @@ -63,9 +65,10 @@ BLOCK_TO_CHECK_CONSENSUS = 1 pytestmark = pytest.mark.post - +data = {} class TestPost(RestApiBaseTest): + def test_rest_api_post_batch(self): """Tests that transactions are submitted and committed for each block that are created by submitting intkey batches @@ -352,25 +355,54 @@ def test_api_post_batch_different_signer(self, setup): LOGGER.info(data['error']['message']) assert data['error']['code'] == 30 assert data['error']['title'] =='Submitted Batches Invalid' - - def test_api_post_batch_different_signer(self, setup): + + def test_rest_api_post_no_endpoint(self, setup): + signer_trans = get_signer() intkey=create_intkey_transaction("set",[],50,signer_trans) translist=[intkey] - signer_batch = get_signer() - batch= create_batch(translist,signer_batch) + batch= create_batch(translist,signer_trans) batch_list=[BatchList(batches=[batch]).SerializeToString()] for batc in batch_list: try: - response = post_batch(batc) - print(response) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - assert data['error']['code'] == 30 - assert data['error']['title'] =='Submitted Batches Invalid' - + response = post_batch_no_endpoint(batc) + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + errcode = e.code + assert errcode == 404 - \ No newline at end of file +class TestPostMulTxns(RestApiBaseTest): + def test_txn_invalid_addr(self, setup_invalid_txns): + initial_batch_length = setup_invalid_txns['initial_batch_length'] + expected_batch_length = setup_invalid_txns['expected_batch_length'] + initial_trn_length = setup_invalid_txns['initial_trn_length'] + expected_trn_length = setup_invalid_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns['response'] == 'INVALID' + + def test_txn_valid_invalid_txns(self, setup_valinv_txns): + #data=Txns.setup_batch_valinv_txns() + initial_batch_length = setup_valinv_txns['initial_batch_length'] + expected_batch_length = setup_valinv_txns['expected_batch_length'] + initial_trn_length = setup_valinv_txns['initial_trn_length'] + expected_trn_length = setup_valinv_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_valinv_txns['response'] == 'INVALID' + + def test_txn_invalid_valid_txns(self, setup_invval_txns): + initial_batch_length = setup_invval_txns['initial_batch_length'] + expected_batch_length = setup_invval_txns['expected_batch_length'] + initial_trn_length = setup_invval_txns['initial_trn_length'] + expected_trn_length = setup_invval_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invval_txns['response'] == 'INVALID' + + + + + + + diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index 4e3355c132..d1eed5e01c 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -177,6 +177,18 @@ def post_batch(batch, headers="None"): response = submit_request('{}&wait={}'.format(response['link'], WAIT)) return response +def post_batch_no_endpoint(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + def query_rest_api(suffix='', data=None, headers=None): if headers is None: headers = {} @@ -274,7 +286,7 @@ def _make_http_address(node_number): node_number = node.replace('8800' , '8008') return node_number -def _get_client_address(): +def _get_client_address(): command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' From af6c0ebfb7f7c0b57bb53861011125ae429eab06 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Thu, 6 Sep 2018 17:19:25 +0530 Subject: [PATCH 04/64] Added tests for sprint 19 Signed-off-by: sandeeplandt --- rest_api/tests/api_test/fixtures.py | 53 +++++++++++ .../api_test/get/test_rest_api_get_block.py | 41 +++++++++ .../mul/test_rest_api_mul_validator.py | 2 +- rest_api/tests/api_test/payload.py | 91 +++++++++++++++++-- .../tests/api_test/post/test_rest_api_post.py | 77 ++++++++++++++-- rest_api/tests/api_test/utils.py | 3 +- 6 files changed, 250 insertions(+), 17 deletions(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index d2d214ec49..5ed0b380f8 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -163,7 +163,60 @@ def setup_invalid_txns(request): Txns=Transactions(invalidtype="addr") data = Txns.get_batch_invalid_txns() return data + +@pytest.fixture(scope="function") +def setup_invalid_invaddr(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="invaddr") + data = Txns.get_batch_invalid_txns() + return data +@pytest.fixture(scope="function") +def setup_same_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_same_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_min(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="min") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_max(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="max") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_valid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_fn(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="fn") + data = Txns.get_batch_invalid_txns_fam_name() + return data @pytest.fixture(scope="function") diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py index cf54b22d59..8b10a4bd1c 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_block.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -407,4 +407,45 @@ def test_api_get_bad_block_id(self, setup): response = json.loads(error.fp.read().decode('utf-8')) LOGGER.info(response['error']['title']) LOGGER.info(response['error']['message']) + + def test_api_blk_debug_flag_set_proper(self, setup): + """Tests that block debug flag should set proper + for true and false value + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for trans in batch_list['data']: + trace = trans['trace'] + except urllib.error.HTTPError as error: + LOGGER.info("Debug flag is not set for tracing") + assert trace is not None, "Debug flag is set for tracing" + + def test_api_blk_payload_present_unique(self, setup): + """Tests that block payload is should be present + and unique for each batch in the block + """ + prev_line = '' + try: + with open ('payload.txt', 'w') as f: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + payload = trans['payload'] + f.write(payload) + with open('payload.txt', 'r') as f: + payloads = f.readlines() + for payload in payloads: + if prev_line < payload: + LOGGER.info("Payload is unique in each transaction") + prev_line = payload + except urllib.error.HTTPError as error: + LOGGER.info("Payload is missing in some of the transactions") + assert payload is not None, "Payload is unique and available for all transactions in batch" + + diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py index 6a7c73281a..5c6733387e 100644 --- a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py +++ b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py @@ -125,4 +125,4 @@ def test_rest_api_mul_val_Node(self): workload_thread.join() - \ No newline at end of file + diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index 8d8f9ef560..6241a692ff 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -45,7 +45,7 @@ from google.protobuf.message import DecodeError from google.protobuf.json_format import MessageToDict -from utils import batch_count, transaction_count, get_batch_statuses, post_batch +from utils import batch_count, transaction_count, get_batch_statuses, post_batch, get_reciepts,get_transactions, get_state_list INTKEY_ADDRESS_PREFIX = hashlib.sha512( 'intkey'.encode('utf-8')).hexdigest()[0:6] @@ -148,6 +148,32 @@ def get_batch_valid_txns(self): ] self.data = self.get_txns_commit_data(txns,self.signer, self.data) return self.data + + def get_batch_same_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns_fam_name(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data def get_txns_commit_data(self, txns, signer, data): """Setup method for posting batches and returning the @@ -174,6 +200,9 @@ def get_txns_commit_data(self, txns, signer, data): self.data['expected_trxn_ids'] = expected_trxn_ids expected_trxns['trxn_id'] = [dict['header_signature']] expected_trxns['payload'] = [dict['payload']] + #print(expected_trxns['trxn_id']) + print(expected_trxns['payload']) + LOGGER.info("Creating batches for transactions 3trn/batch") @@ -188,6 +217,7 @@ def get_txns_commit_data(self, txns, signer, data): expected_batches.append(batch_id) length_batches = len(expected_batches) length_transactions = len(expected_trxn_ids) + data['expected_txns'] = expected_trxns['trxn_id'][::-1] post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] try: @@ -195,14 +225,21 @@ def get_txns_commit_data(self, txns, signer, data): response = post_batch(batch) batch_id = dict['header_signature'] expected_batches.append(batch_id) + self.data['response'] = response['data'][0]['status'] except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") json_data = json.loads(error.fp.read().decode('utf-8')) + #print(json_data['error']['code']) + #print(json_data['error']['message']) LOGGER.info(json_data['error']['title']) LOGGER.info(json_data['error']['message']) - LOGGER.info(json_data['error']['code']) - - self.data['response'] = response['data'][0]['status'] + LOGGER.info(json_data['error']['code']) + self.data['code'] = json_data['error']['code'] + #receipts = get_reciepts(expected_trxns['trxn_id']) + #print(receipts) + self.state_addresses = [state['address'] for state in get_state_list()['data']] + self.data['state_address'] = self.state_addresses self.data['initial_batch_length'] = initial_batch_length self.data['initial_trn_length'] = initial_transaction_length self.data['expected_batch_length'] = initial_batch_length + length_batches @@ -213,11 +250,19 @@ def create_intkey_transaction(self, verb, deps, count, signer): words = random_word_list(count) name=random.choice(words) payload = IntKeyPayload( - verb=verb,name=name,value=1) + verb=verb,name=name,value=21) addr = make_intkey_address(name) data = self.get_txns_data(addr,deps, payload) return data + def create_intkey_same_transaction(self, verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidtye): words = random_word_list(count) @@ -232,6 +277,16 @@ def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidty addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( name.encode('utf-8')).hexdigest()[-64:] + + if invalidtye=="invaddr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-62:] elif invalidtye=="min": payload = IntKeyPayload( @@ -252,7 +307,31 @@ def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidty payload = IntKeyPayload( verb="verb",name=name,value=1) addr = make_intkey_address(name) - + + elif invalidtye=="fn": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='abcd', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + return transaction + data = self.get_txns_data(addr,deps, payload) return data diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index 63c4035566..911a818149 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -41,14 +41,15 @@ from utils import post_batch, get_state_list , get_blocks , get_transactions, \ get_batches , get_state_address, check_for_consensus,\ - _get_node_list, _get_node_chains, post_batch_no_endpoint + _get_node_list, _get_node_chains, post_batch_no_endpoint, get_reciepts from payload import get_signer, create_intkey_transaction, create_batch,\ create_intkey_same_transaction from base import RestApiBaseTest -from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns +from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns, setup_same_txns, setup_invalid_txns_min, setup_invalid_txns_max,\ + setup_valid_txns, setup_invalid_txns_fn, setup_invalid_invaddr LOGGER = logging.getLogger(__name__) @@ -370,8 +371,9 @@ def test_rest_api_post_no_endpoint(self, setup): errdata = e.file.read().decode("utf-8") errcode = e.code assert errcode == 404 - + class TestPostMulTxns(RestApiBaseTest): + def test_txn_invalid_addr(self, setup_invalid_txns): initial_batch_length = setup_invalid_txns['initial_batch_length'] expected_batch_length = setup_invalid_txns['expected_batch_length'] @@ -381,6 +383,24 @@ def test_txn_invalid_addr(self, setup_invalid_txns): assert initial_trn_length < expected_trn_length assert setup_invalid_txns['response'] == 'INVALID' + def test_txn_invalid_min(self, setup_invalid_txns_min): + initial_batch_length = setup_invalid_txns_min['initial_batch_length'] + expected_batch_length = setup_invalid_txns_min['expected_batch_length'] + initial_trn_length = setup_invalid_txns_min['initial_trn_length'] + expected_trn_length = setup_invalid_txns_min['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_min['response'] == 'INVALID' + + def test_txn_invalid_max(self, setup_invalid_txns_max): + initial_batch_length = setup_invalid_txns_max['initial_batch_length'] + expected_batch_length = setup_invalid_txns_max['expected_batch_length'] + initial_trn_length = setup_invalid_txns_max['initial_trn_length'] + expected_trn_length = setup_invalid_txns_max['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_max['response'] == 'INVALID' + def test_txn_valid_invalid_txns(self, setup_valinv_txns): #data=Txns.setup_batch_valinv_txns() initial_batch_length = setup_valinv_txns['initial_batch_length'] @@ -391,7 +411,7 @@ def test_txn_valid_invalid_txns(self, setup_valinv_txns): assert initial_trn_length < expected_trn_length assert setup_valinv_txns['response'] == 'INVALID' - def test_txn_invalid_valid_txns(self, setup_invval_txns): + def test_txn_invalid_valid_txns(self, setup_invval_txns): initial_batch_length = setup_invval_txns['initial_batch_length'] expected_batch_length = setup_invval_txns['expected_batch_length'] initial_trn_length = setup_invval_txns['initial_trn_length'] @@ -399,10 +419,49 @@ def test_txn_invalid_valid_txns(self, setup_invval_txns): assert initial_batch_length < expected_batch_length assert initial_trn_length < expected_trn_length assert setup_invval_txns['response'] == 'INVALID' - - - - + + def test_txn_same_txns(self, setup_same_txns): + initial_batch_length = setup_same_txns['initial_batch_length'] + expected_batch_length = setup_same_txns['expected_batch_length'] + initial_trn_length = setup_same_txns['initial_trn_length'] + expected_trn_length = setup_same_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_same_txns['code'] == 30 + + def test_api_sent_commit_txns(self, setup_valid_txns): + expected_transaction=setup_valid_txns['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + assert transaction_id == response['data'][0]['id'] + assert response['data'][0]['state_changes'][0]['type'] == "SET" + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + assert response['error']['title'] == 'Invalid Resource Id' + + def test_txn_invalid_family_name(self, setup_invalid_txns_fn): + initial_batch_length = setup_invalid_txns_fn['initial_batch_length'] + expected_batch_length = setup_invalid_txns_fn['expected_batch_length'] + initial_trn_length = setup_invalid_txns_fn['initial_trn_length'] + expected_trn_length = setup_invalid_txns_fn['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_fn['code'] == 17 - + def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): + initial_batch_length = setup_invalid_invaddr['initial_batch_length'] + expected_batch_length = setup_invalid_invaddr['expected_batch_length'] + initial_trn_length = setup_invalid_invaddr['initial_trn_length'] + expected_trn_length = setup_invalid_invaddr['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_invaddr['code'] == 17 + + \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index d1eed5e01c..d4639c3d26 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -290,7 +290,7 @@ def _get_client_address(): command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - + def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" @@ -448,3 +448,4 @@ def _get_transaction_list(response): transaction_list += data_list return transaction_list + From 3778dcc468c327776f37a295f93713bed6d069bc Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Fri, 7 Sep 2018 11:01:28 +0530 Subject: [PATCH 05/64] Deleted multi validator tests Signed-off-by: sandeeplandt --- .../mul/test_rest_api_mul_validator.py | 128 ------------------ 1 file changed, 128 deletions(-) delete mode 100644 rest_api/tests/api_test/mul/test_rest_api_mul_validator.py diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py deleted file mode 100644 index 5c6733387e..0000000000 --- a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2018 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------ -import pytest -import logging -import json -import urllib.request -import urllib.error -import base64 -import argparse -import cbor -import subprocess -import shlex -import requests -import time -import paramiko -import sys -import threading -import os -import signal - - -from google.protobuf.json_format import MessageToDict - -from base import RestApiBaseTest -from payload import get_signer, create_intkey_transaction , create_batch -from utils import _get_client_address, _send_cmd, _get_node_list, \ - _get_node_chain, check_for_consensus, _stop_validator\ - -from workload import Workload -from ssh import SSH -from thread import Workload_thread, SSH_thread, Consensus_Thread,\ - wait_for_event, wait_for_event_timeout - - - -logging.basicConfig(level=logging.INFO, - format='[%(levelname)s] (%(threadName)-10s) %(message)s', - ) - -WAIT_TIME = 10 -PORT =22 -USERNAME = 'test' -PASSWORD = 'aditya9971' - -BLOCK_TO_CHECK_CONSENSUS = 1 - -pytestmark = pytest.mark.mul - - -class TestMultiple(RestApiBaseTest): - def test_rest_api_mul_val_intk(self): - """Tests that transactions are submitted and committed for - each block that are created by submitting intkey and XO batches - """ - signer = get_signer() - expected_trxns = {} - expected_batches = [] - node_list = [{_get_client_address()}] - - logging.info('Starting Test for Intkey payload') - - logging.info("Creating intkey batches") - - txns = [ - create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), - ] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - expected_trxns['trxn_id'] = [dict['header_signature']] - expected_trxns['payload'] = [dict['payload']] - - logging.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - node_list = _get_node_list() - - chains = _get_node_chain(node_list) - check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) - - def test_rest_api_mul_val_Node(self): - """Tests that leaf nodes are brought up/down in a network - and checks are performed on the respective nodes - """ - leaf_nodes = ['10.223.155.134', '10.223.155.25'] - threads = [] - - workload_thread = Workload_thread() - workload_thread.setName('workload_thread') - workload_thread.start() - - consensus_thread = Consensus_Thread(leaf_nodes) - consensus_thread.setName('consensus_thread') - consensus_thread.setDaemon(True) - consensus_thread.start() - - for node in leaf_nodes: - ssh_thread = SSH_thread(node,PORT,USERNAME,PASSWORD) - ssh_thread.setName('ssh_thread') - threads.append(ssh_thread) - - for thread in threads: - thread.start() - thread.join() - - consensus_thread.join() - workload_thread.join() - - - From 48036fb888b87082a4c6bd3fa79058ece712b551 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Thu, 6 Sep 2018 17:19:25 +0530 Subject: [PATCH 06/64] Added tests for sprint 19 Signed-off-by: sandeeplandt --- rest_api/tests/api_test/fixtures.py | 53 +++++++++++ .../api_test/get/test_rest_api_get_block.py | 41 +++++++++ .../mul/test_rest_api_mul_validator.py | 2 +- rest_api/tests/api_test/payload.py | 91 +++++++++++++++++-- .../tests/api_test/post/test_rest_api_post.py | 77 ++++++++++++++-- rest_api/tests/api_test/utils.py | 3 +- 6 files changed, 250 insertions(+), 17 deletions(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index d2d214ec49..5ed0b380f8 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -163,7 +163,60 @@ def setup_invalid_txns(request): Txns=Transactions(invalidtype="addr") data = Txns.get_batch_invalid_txns() return data + +@pytest.fixture(scope="function") +def setup_invalid_invaddr(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="invaddr") + data = Txns.get_batch_invalid_txns() + return data +@pytest.fixture(scope="function") +def setup_same_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_same_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_min(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="min") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_max(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="max") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_valid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_fn(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="fn") + data = Txns.get_batch_invalid_txns_fam_name() + return data @pytest.fixture(scope="function") diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py index cf54b22d59..8b10a4bd1c 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_block.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -407,4 +407,45 @@ def test_api_get_bad_block_id(self, setup): response = json.loads(error.fp.read().decode('utf-8')) LOGGER.info(response['error']['title']) LOGGER.info(response['error']['message']) + + def test_api_blk_debug_flag_set_proper(self, setup): + """Tests that block debug flag should set proper + for true and false value + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for trans in batch_list['data']: + trace = trans['trace'] + except urllib.error.HTTPError as error: + LOGGER.info("Debug flag is not set for tracing") + assert trace is not None, "Debug flag is set for tracing" + + def test_api_blk_payload_present_unique(self, setup): + """Tests that block payload is should be present + and unique for each batch in the block + """ + prev_line = '' + try: + with open ('payload.txt', 'w') as f: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + payload = trans['payload'] + f.write(payload) + with open('payload.txt', 'r') as f: + payloads = f.readlines() + for payload in payloads: + if prev_line < payload: + LOGGER.info("Payload is unique in each transaction") + prev_line = payload + except urllib.error.HTTPError as error: + LOGGER.info("Payload is missing in some of the transactions") + assert payload is not None, "Payload is unique and available for all transactions in batch" + + diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py index 6a7c73281a..5c6733387e 100644 --- a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py +++ b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py @@ -125,4 +125,4 @@ def test_rest_api_mul_val_Node(self): workload_thread.join() - \ No newline at end of file + diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index 8d8f9ef560..6241a692ff 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -45,7 +45,7 @@ from google.protobuf.message import DecodeError from google.protobuf.json_format import MessageToDict -from utils import batch_count, transaction_count, get_batch_statuses, post_batch +from utils import batch_count, transaction_count, get_batch_statuses, post_batch, get_reciepts,get_transactions, get_state_list INTKEY_ADDRESS_PREFIX = hashlib.sha512( 'intkey'.encode('utf-8')).hexdigest()[0:6] @@ -148,6 +148,32 @@ def get_batch_valid_txns(self): ] self.data = self.get_txns_commit_data(txns,self.signer, self.data) return self.data + + def get_batch_same_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns_fam_name(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data def get_txns_commit_data(self, txns, signer, data): """Setup method for posting batches and returning the @@ -174,6 +200,9 @@ def get_txns_commit_data(self, txns, signer, data): self.data['expected_trxn_ids'] = expected_trxn_ids expected_trxns['trxn_id'] = [dict['header_signature']] expected_trxns['payload'] = [dict['payload']] + #print(expected_trxns['trxn_id']) + print(expected_trxns['payload']) + LOGGER.info("Creating batches for transactions 3trn/batch") @@ -188,6 +217,7 @@ def get_txns_commit_data(self, txns, signer, data): expected_batches.append(batch_id) length_batches = len(expected_batches) length_transactions = len(expected_trxn_ids) + data['expected_txns'] = expected_trxns['trxn_id'][::-1] post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] try: @@ -195,14 +225,21 @@ def get_txns_commit_data(self, txns, signer, data): response = post_batch(batch) batch_id = dict['header_signature'] expected_batches.append(batch_id) + self.data['response'] = response['data'][0]['status'] except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") json_data = json.loads(error.fp.read().decode('utf-8')) + #print(json_data['error']['code']) + #print(json_data['error']['message']) LOGGER.info(json_data['error']['title']) LOGGER.info(json_data['error']['message']) - LOGGER.info(json_data['error']['code']) - - self.data['response'] = response['data'][0]['status'] + LOGGER.info(json_data['error']['code']) + self.data['code'] = json_data['error']['code'] + #receipts = get_reciepts(expected_trxns['trxn_id']) + #print(receipts) + self.state_addresses = [state['address'] for state in get_state_list()['data']] + self.data['state_address'] = self.state_addresses self.data['initial_batch_length'] = initial_batch_length self.data['initial_trn_length'] = initial_transaction_length self.data['expected_batch_length'] = initial_batch_length + length_batches @@ -213,11 +250,19 @@ def create_intkey_transaction(self, verb, deps, count, signer): words = random_word_list(count) name=random.choice(words) payload = IntKeyPayload( - verb=verb,name=name,value=1) + verb=verb,name=name,value=21) addr = make_intkey_address(name) data = self.get_txns_data(addr,deps, payload) return data + def create_intkey_same_transaction(self, verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidtye): words = random_word_list(count) @@ -232,6 +277,16 @@ def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidty addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( name.encode('utf-8')).hexdigest()[-64:] + + if invalidtye=="invaddr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-62:] elif invalidtye=="min": payload = IntKeyPayload( @@ -252,7 +307,31 @@ def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidty payload = IntKeyPayload( verb="verb",name=name,value=1) addr = make_intkey_address(name) - + + elif invalidtye=="fn": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='abcd', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + return transaction + data = self.get_txns_data(addr,deps, payload) return data diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index 63c4035566..911a818149 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -41,14 +41,15 @@ from utils import post_batch, get_state_list , get_blocks , get_transactions, \ get_batches , get_state_address, check_for_consensus,\ - _get_node_list, _get_node_chains, post_batch_no_endpoint + _get_node_list, _get_node_chains, post_batch_no_endpoint, get_reciepts from payload import get_signer, create_intkey_transaction, create_batch,\ create_intkey_same_transaction from base import RestApiBaseTest -from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns +from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns, setup_same_txns, setup_invalid_txns_min, setup_invalid_txns_max,\ + setup_valid_txns, setup_invalid_txns_fn, setup_invalid_invaddr LOGGER = logging.getLogger(__name__) @@ -370,8 +371,9 @@ def test_rest_api_post_no_endpoint(self, setup): errdata = e.file.read().decode("utf-8") errcode = e.code assert errcode == 404 - + class TestPostMulTxns(RestApiBaseTest): + def test_txn_invalid_addr(self, setup_invalid_txns): initial_batch_length = setup_invalid_txns['initial_batch_length'] expected_batch_length = setup_invalid_txns['expected_batch_length'] @@ -381,6 +383,24 @@ def test_txn_invalid_addr(self, setup_invalid_txns): assert initial_trn_length < expected_trn_length assert setup_invalid_txns['response'] == 'INVALID' + def test_txn_invalid_min(self, setup_invalid_txns_min): + initial_batch_length = setup_invalid_txns_min['initial_batch_length'] + expected_batch_length = setup_invalid_txns_min['expected_batch_length'] + initial_trn_length = setup_invalid_txns_min['initial_trn_length'] + expected_trn_length = setup_invalid_txns_min['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_min['response'] == 'INVALID' + + def test_txn_invalid_max(self, setup_invalid_txns_max): + initial_batch_length = setup_invalid_txns_max['initial_batch_length'] + expected_batch_length = setup_invalid_txns_max['expected_batch_length'] + initial_trn_length = setup_invalid_txns_max['initial_trn_length'] + expected_trn_length = setup_invalid_txns_max['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_max['response'] == 'INVALID' + def test_txn_valid_invalid_txns(self, setup_valinv_txns): #data=Txns.setup_batch_valinv_txns() initial_batch_length = setup_valinv_txns['initial_batch_length'] @@ -391,7 +411,7 @@ def test_txn_valid_invalid_txns(self, setup_valinv_txns): assert initial_trn_length < expected_trn_length assert setup_valinv_txns['response'] == 'INVALID' - def test_txn_invalid_valid_txns(self, setup_invval_txns): + def test_txn_invalid_valid_txns(self, setup_invval_txns): initial_batch_length = setup_invval_txns['initial_batch_length'] expected_batch_length = setup_invval_txns['expected_batch_length'] initial_trn_length = setup_invval_txns['initial_trn_length'] @@ -399,10 +419,49 @@ def test_txn_invalid_valid_txns(self, setup_invval_txns): assert initial_batch_length < expected_batch_length assert initial_trn_length < expected_trn_length assert setup_invval_txns['response'] == 'INVALID' - - - - + + def test_txn_same_txns(self, setup_same_txns): + initial_batch_length = setup_same_txns['initial_batch_length'] + expected_batch_length = setup_same_txns['expected_batch_length'] + initial_trn_length = setup_same_txns['initial_trn_length'] + expected_trn_length = setup_same_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_same_txns['code'] == 30 + + def test_api_sent_commit_txns(self, setup_valid_txns): + expected_transaction=setup_valid_txns['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + assert transaction_id == response['data'][0]['id'] + assert response['data'][0]['state_changes'][0]['type'] == "SET" + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + assert response['error']['title'] == 'Invalid Resource Id' + + def test_txn_invalid_family_name(self, setup_invalid_txns_fn): + initial_batch_length = setup_invalid_txns_fn['initial_batch_length'] + expected_batch_length = setup_invalid_txns_fn['expected_batch_length'] + initial_trn_length = setup_invalid_txns_fn['initial_trn_length'] + expected_trn_length = setup_invalid_txns_fn['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_fn['code'] == 17 - + def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): + initial_batch_length = setup_invalid_invaddr['initial_batch_length'] + expected_batch_length = setup_invalid_invaddr['expected_batch_length'] + initial_trn_length = setup_invalid_invaddr['initial_trn_length'] + expected_trn_length = setup_invalid_invaddr['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_invaddr['code'] == 17 + + \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index d1eed5e01c..d4639c3d26 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -290,7 +290,7 @@ def _get_client_address(): command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - + def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" @@ -448,3 +448,4 @@ def _get_transaction_list(response): transaction_list += data_list return transaction_list + From 3d4bf6953e57cff3e4886061199dbc420529d3f3 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Fri, 7 Sep 2018 11:01:28 +0530 Subject: [PATCH 07/64] Deleted multi validator tests Signed-off-by: sandeeplandt --- .../mul/test_rest_api_mul_validator.py | 128 ------------------ 1 file changed, 128 deletions(-) delete mode 100644 rest_api/tests/api_test/mul/test_rest_api_mul_validator.py diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py deleted file mode 100644 index 5c6733387e..0000000000 --- a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2018 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------------------------ -import pytest -import logging -import json -import urllib.request -import urllib.error -import base64 -import argparse -import cbor -import subprocess -import shlex -import requests -import time -import paramiko -import sys -import threading -import os -import signal - - -from google.protobuf.json_format import MessageToDict - -from base import RestApiBaseTest -from payload import get_signer, create_intkey_transaction , create_batch -from utils import _get_client_address, _send_cmd, _get_node_list, \ - _get_node_chain, check_for_consensus, _stop_validator\ - -from workload import Workload -from ssh import SSH -from thread import Workload_thread, SSH_thread, Consensus_Thread,\ - wait_for_event, wait_for_event_timeout - - - -logging.basicConfig(level=logging.INFO, - format='[%(levelname)s] (%(threadName)-10s) %(message)s', - ) - -WAIT_TIME = 10 -PORT =22 -USERNAME = 'test' -PASSWORD = 'aditya9971' - -BLOCK_TO_CHECK_CONSENSUS = 1 - -pytestmark = pytest.mark.mul - - -class TestMultiple(RestApiBaseTest): - def test_rest_api_mul_val_intk(self): - """Tests that transactions are submitted and committed for - each block that are created by submitting intkey and XO batches - """ - signer = get_signer() - expected_trxns = {} - expected_batches = [] - node_list = [{_get_client_address()}] - - logging.info('Starting Test for Intkey payload') - - logging.info("Creating intkey batches") - - txns = [ - create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), - ] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - expected_trxns['trxn_id'] = [dict['header_signature']] - expected_trxns['payload'] = [dict['payload']] - - logging.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - node_list = _get_node_list() - - chains = _get_node_chain(node_list) - check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) - - def test_rest_api_mul_val_Node(self): - """Tests that leaf nodes are brought up/down in a network - and checks are performed on the respective nodes - """ - leaf_nodes = ['10.223.155.134', '10.223.155.25'] - threads = [] - - workload_thread = Workload_thread() - workload_thread.setName('workload_thread') - workload_thread.start() - - consensus_thread = Consensus_Thread(leaf_nodes) - consensus_thread.setName('consensus_thread') - consensus_thread.setDaemon(True) - consensus_thread.start() - - for node in leaf_nodes: - ssh_thread = SSH_thread(node,PORT,USERNAME,PASSWORD) - ssh_thread.setName('ssh_thread') - threads.append(ssh_thread) - - for thread in threads: - thread.start() - thread.join() - - consensus_thread.join() - workload_thread.join() - - - From 43f4d300db3a716461e8ce8bd7ee65fdf89a0158 Mon Sep 17 00:00:00 2001 From: lntdev <36985109+lntdev@users.noreply.github.com> Date: Wed, 20 Jun 2018 16:27:55 +0530 Subject: [PATCH 08/64] Test files added for Rest Api testing (#30) Contains the implemented directory structure for rest api testing Includes utility files required for testing the rest api Includes fixtures for creating setup functions Signed-off-by: aditya singh --- rest_api/tests/api_test/base.py | 160 ++++++++ rest_api/tests/api_test/conftest.py | 121 ++++++ rest_api/tests/api_test/fixtures.py | 117 ++++++ .../api_test/get/test_rest_api_get_batch.py | 285 +++++++++++++ rest_api/tests/api_test/utils.py | 387 ++++++++++++++++++ 5 files changed, 1070 insertions(+) create mode 100644 rest_api/tests/api_test/base.py create mode 100644 rest_api/tests/api_test/conftest.py create mode 100644 rest_api/tests/api_test/fixtures.py create mode 100644 rest_api/tests/api_test/get/test_rest_api_get_batch.py create mode 100644 rest_api/tests/api_test/utils.py diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..8c7a389ab8 --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,160 @@ + +class RestApiBaseTest(object): + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_has_valid_head(self, response, expected): + """Asserts a response has a head string with an expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_has_valid_link(self, response, expected_ending): + """Asserts a response has a link url string with an expected ending + """ + assert link in response['link'] + self.assert_valid_url(link, expected_ending) + + + def assert_has_valid_paging(self, js_response, pb_paging, + next_link=None, previous_link=None): + """Asserts a response has a paging dict with the expected values. + """ + assert 'paging' in js_response + js_paging = js_response['paging'] + + if pb_paging.next: + assert 'next_position' in js_paging + + if next_link is not None: + assert 'next' in js_paging + self.assert_valid_url(js_paging['next'], next_link) + else: + assert 'next' not in js_paging + + def assert_has_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_has_valid_data_list(self, response, expected_length): + """Asserts a response has a data list of dicts of an expected length. + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + assert expected_length == len(data) + self.assert_items(data, dict) + + def assert_has_valid_url(self, url, expected_ending=''): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_ending) + + + def aasert_check_block_seq(blocks, *expected_ids): + if not isinstance(blocks, list): + blocks = [blocks] + + consensus = None + + for block, expected_id in zip(blocks, expected_ids): + assert isinstance(block, dict) + assert expected_id == block['header_signature'] + assert isinstance(block['header'], dict) + assert consensus == b64decode(block['header']['consensus']) + + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 + assert isinstance(batches, dict) + assert check_batch_seq(batches, expected_id) + + return True + + def assert_check_batch_seq(signer_key , batches , *expected_ids): + if not isinstance(batches, list): + batches = [batches] + + for batch, expected_id in zip(batches, expected_ids): + assert expected_id == batch['header_signature'] + assert isinstance(batch['header'], dict) + + + txns = batch['transactions'] + assert isinstance(txns, list) + assert len(txns) == 1 + assert isinstance(txns, dict) + assert check_transaction_seq(txns, expected_id) == True + + return True + + def assert_check_transaction_seq(txns , *expected_ids): + if not isinstance(txns, list): + txns = [txns] + + payload = None + + for txn, expected_id in zip(txns, expected_ids): + assert expected_id == txn['header_signature'] + assert payload == b64decode(txn['payload']) + assert isinstance(txn['header'], dict) + assert expected_id == txn['header']['nonce'] + + return True + + def assert_check_batch_nonce(self, response): + pass + + def assert_check_family(self, response): + assert 'family_name' in response + assert 'family_version' in response + + def assert_check_dependency(self, response): + pass + + def assert_check_content(self, response): + pass + + def assert_check_payload_algo(self): + pass + + def assert_check_payload(self, response): + pass + + def assert_batcher_public_key(self, signer_key , batch): + assert 'public_key' == batch['header']['signer_public_key'] + + def assert_signer_public_key(self, signer_key , batch): + assert 'public_key' == batch['header']['signer_public_key'] + + def aasert_check_batch_trace(self, trace): + assert bool(trace) + + def assert_check_consensus(self): + pass + + def assert_state_root_hash(self): + pass + + def assert_check_previous_block_id(self): + pass + + def assert_check_block_num(self): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py new file mode 100644 index 0000000000..9cfbd181e6 --- /dev/null +++ b/rest_api/tests/api_test/conftest.py @@ -0,0 +1,121 @@ +import pytest +import sys +import platform +import inspect + +ALL = set("darwin linux win32".split()) + + +def pytest_addoption(parser): + parser.addoption( + "--get", action="store_true", default=False, help="run get tests" + ) + + parser.addoption( + "--post", action="store_true", default=False, help="run post tests" + ) + + parser.addoption( + "--sn", action="store_true", default=False, help="run scenario based tests" + ) + + parser.addoption("--batch", action="store", metavar="NAME", + help="only run batch tests." + ) + + parser.addoption("--transaction", action="store", metavar="NAME", + help="only run transaction tests." + ) + + parser.addoption("--state", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("--block", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("-E", action="store", metavar="NAME", + help="only run tests matching the environment NAME." + ) + + parser.addoption("-N", action="store", metavar="NAME", + help="only run tests matching the Number." + ) + + parser.addoption("-O", action="store", metavar="NAME", + help="only run tests matching the OS release version." + ) + + +def pytest_collection_modifyitems(config, items): + try: + num = int(config.getoption("-N")) + except: + num = None + + selected_items = [] + deselected_items = [] + if config.getoption("--get"): + for item in items: + for marker in list(item.iter_markers()): + if marker.name == 'get': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--post"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'post': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--sn"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'scenario': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + else: + selected_items = items[:num] + items[:] = selected_items + return items + +# def pytest_pycollect_makeitem(collector, name, obj): +# if _is_coroutine(obj): +# wrappped = pytest.mark.asyncio(obj) +# return pytest.Function(name=name, parent=collector) + + +# def pytest_configure(config): +# # register an additional marker +# config.addinivalue_line("markers", +# "env(name): mark test to run only on named environment") +# +def pytest_runtest_setup(item): + envnames = [mark.args[0] for mark in item.iter_markers(name='env')] + option = item.config.getoption("-E") + option = item.config.getoption("-O") + if option: + if option not in envnames: + pytest.skip("test requires env in %r" % envnames) + + +# supported_platforms = ALL.intersection(mark.name for mark in item.iter_markers()) +# plat = platform.platform() +# print(platform.system()) +# print(platform.release()) +# print(platform.linux_distribution()) +# print(platform.version()) +# if supported_platforms and plat not in supported_platforms: +# pytest.skip("cannot run on platform %s" % (plat)) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..659eaf6da5 --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,117 @@ +import pytest +import logging +import urllib +import json + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from utils import get_batches, post_batch, get_signer , get_blocks , create_batch, \ + create_intkey_transaction , get_state_list , _delete_genesis , _start_validator, \ + _stop_validator + + +from google.protobuf.json_format import MessageToDict + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +@pytest.fixture(scope="module") +def setup_batch(request): + """Setup method for posting batches and returning the + response + """ + data = {} + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction("set", 'a', 0, [], signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + data['expected_trxn_ids'] = expected_trxn_ids + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + data['expected_batch_ids'] = expected_batch_ids + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + block_list = get_blocks() + data['block_list'] = block_list + block_ids = [block['header_signature'] for block in block_list] + data['block_ids'] = block_ids + batch_ids = [block['header']['batch_ids'][0] for block in block_list] + data['batch_ids'] = batch_ids + expected_head_id = block_ids[0] + data['expected_head_id'] = expected_head_id + yield data + + +@pytest.fixture(scope="module") +def delete_genesis(): + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _stop_validator() + _create_genesis() + _start_validator() + +def _create_genesis(): + LOGGER.info("creating the genesis data") + batch_path = '~' + os.chdir(batch_path) + cmd = "sudo -u sawadm config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py new file mode 100644 index 0000000000..0d6fb7afaf --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -0,0 +1,285 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from fixtures import setup_batch , delete_genesis +from utils import get_batches, _get_node_list, _get_node_chain, check_for_consensus + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.batch] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +@pytest.mark.usefixtures('setup_batch') +class TestBatchList(RestApiBaseTest): + def test_api_get_batch_list(self, setup_batch): + """Tests the batch list by submitting intkey batches + """ + signer_key = setup_batch['signer_key'] + + try: + response = get_batches() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is unreachable") + + print(response) + + self.assert_check_family(response) + self.assert_check_batch_nonce(response) + +# def test_api_get_batch_list_no_batches(self): +# """Tests that transactions are submitted and committed for +# each block that are created by submitting intkey batches +# """ +# batch=b'' +# try: +# response = post_batch(batch) +# except urllib.error.HTTPError as error: +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 34 +# +# def test_api_get_batch_list_invalid_batch(self): +# """Tests that transactions are submitted and committed for +# each block that are created by submitting intkey batches +# """ +# batch= b'' +# try: +# response = post_batch(batch) +# except urllib.error.HTTPError as error: +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 34 +# +# def test_api_get_batch_list_head(self , setup_batch): +# """Tests that GET /batches is reachable with head parameter +# """ +# LOGGER.info("Starting test for batch with head parameter") +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# self.assert_has_valid_head() +# +# try: +# response = get_batches(head_id=expected_head_id) +# except urllib.error.HTTPError as error: +# LOGGER.info("Rest Api not reachable") +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# +# assert response['head'] == expected_head_id , "request is not correct" +# +# def test_api_get_batch_list_bad_head(self): +# """Tests that GET /batches is unreachable with bad head parameter +# """ +# LOGGER.info("Starting test for batch with bad head parameter") +# expected_head_id = setup_batch[3] +# bad_head = 'ff' +# +# try: +# batch_list = get_batches(head_id=bad_head) +# except urllib.error.HTTPError as error: +# LOGGER.info("Rest Api is not reachable") +# data = json.loads(error.fp.read().decode('utf-8')) +# if data: +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 60 +# assert data['error']['title'] == 'Invalid Resource Id' +# +# def test_api_get_batch_list_id(self): +# """Tests that GET /batches is reachable with id as parameter +# """ +# LOGGER.info("Starting test for batch with id parameter") +# +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# +# try: +# response = get_batches(id=expected_id) +# except: +# LOGGER.info("Rest Api is not reachable") +# +# +# assert response['head'] == expected_head_id , "request is not correct" +# assert response['paging']['start'] == None , "request is not correct" +# assert response['paging']['limit'] == None , "request is not correct" +# +# def test_api_get_batch_list_bad_id(self): +# """Tests that GET /batches is unreachable with bad id parameter +# """ +# LOGGER.info("Starting test for batch with bad id parameter") +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# bad_id = 'ff' +# +# try: +# batch_list = get_batches(head_id=bad_id) +# except urllib.error.HTTPError as error: +# LOGGER.info("Rest Api is not reachable") +# data = json.loads(error.fp.read().decode('utf-8')) +# if data: +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 60 +# assert data['error']['title'] == 'Invalid Resource Id' +# +# def test_api_get_batch_list_head_and_id(self): +# """Tests GET /batches is reachable with head and id as parameters +# """ +# LOGGER.info("Starting test for batch with head and id parameter") +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# +# +# response = get_batches(head_id=expected_head_id , id=expected_id) +# +# assert response['head'] == expected_head_id , "head is not matching" +# assert response['paging']['start'] == None , "start parameter is not correct" +# assert response['paging']['limit'] == None , "request is not correct" +# assert bool(response['data']) == True +# +# +# def test_api_get_paginated_batch_list(self): +# """Tests GET /batches is reachbale using paging parameters +# """ +# LOGGER.info("Starting test for batch with paging parameters") +# +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# start = 1 +# limit = 1 +# +# try: +# response = get_batches(start=start , limit=limit) +# except urllib.error.HTTPError as error: +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 54 +# +# def test_api_get_batch_list_invalid_start(self): +# """Tests that GET /batches is unreachable with invalid start parameter +# """ +# LOGGER.info("Starting test for batch with invalid start parameter") +# +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# start = -1 +# +# try: +# response = get_batches(start=start) +# except urllib.error.HTTPError as error: +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 54 +# +# def test_api_get_batch_list_invalid_limit(self): +# """Tests that GET /batches is unreachable with bad limit parameter +# """ +# LOGGER.info("Starting test for batch with bad limit parameter") +# +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# limit = 0 +# +# try: +# response = get_batches(limit=limit) +# except urllib.error.HTTPError as error: +# data = json.loads(error.fp.read().decode('utf-8')) +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 53 +# +# def test_api_get_batch_list_reversed(self): +# """Tests that GET /batches is unreachable with bad head parameter +# """ +# LOGGER.info("Starting test for batch with bad head parameter") +# +# block_list = setup_batch[0] +# block_ids = setup_batch[1] +# batch_ids = setup_batch[2] +# expected_head_id = setup_batch[3] +# expected_id = batch_ids[0] +# reverse = True +# +# try: +# response = get_batches(reverse=reverse) +# except urllib.error.HTTPError as error: +# assert response.code == 400 +# +# assert response['head'] == expected_head_id , "request is not correct" +# assert response['paging']['start'] == None , "request is not correct" +# assert response['paging']['limit'] == None , "request is not correct" +# assert bool(response['data']) == True +# +# class BatchGetTest(): +# def test_api_get_batch_id(): +# """Tests that transactions are submitted and committed for +# each block that are created by submitting intkey batches +# """ +# LOGGER.info('Starting test for batch post') +# LOGGER.info("Creating batches") +# batches = make_batches('abcd') +# +# LOGGER.info("Submitting batches to the handlers") +# +# for i, batch in enumerate(batches): +# response = post_batch(batch) +# block_list = get_blocks() +# batch_ids = [block['header']['batch_ids'][0] for block in block_list] +# for id in batch_ids: +# data = get_batch(id) +# +# if response['data'][0]['status'] == 'COMMITTED': +# LOGGER.info('Batch is committed') +# assert response['data'][0]['id'] in batch_ids, "Block is not created for the given batch" +# +# batch_list = get_batches() +# for batch in batch_list: +# data = get_batch(batch['header_signature']) +# assert data , "No batches were submitted" diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..a49383d4f8 --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,387 @@ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + +def get_blocks(): + response = query_rest_api('/blocks') + return response['data'] + +def get_batches(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response['data'] + +def get_transactions(): + response = query_rest_api('/transactions') + return response['data'] + +def get_transaction(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response['data'] + +def get_state_list(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/state?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/state?id=%s'% id) + return response + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch): + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "ifconfig lo | grep 'inet addr' | cut -d ':' -f 2 | cut -d ' ' -f 1" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + + +def create_intkey_transaction(verb, name, value, deps, signer): + payload = IntKeyPayload( + verb=verb, name=name, value=value) + + # The prefix should eventually be looked up from the + # validator's namespace registry. + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + + + +def _expand_block(cls, block): + """Deserializes a Block's header, and the header of its Batches. + """ + cls._parse_header(BlockHeader, block) + if 'batches' in block: + block['batches'] = [cls._expand_batch(b) for b in block['batches']] + return block + + +def _expand_batch(cls, batch): + """Deserializes a Batch's header, and the header of its Transactions. + """ + cls._parse_header(BatchHeader, batch) + if 'transactions' in batch: + batch['transactions'] = [ + cls._expand_transaction(t) for t in batch['transactions']] + return batch + + +def _expand_transaction(cls, transaction): + """Deserializes a Transaction's header. + """ + return cls._parse_header(TransactionHeader, transaction) + + +def _parse_header(cls, header_proto, resource): + """Deserializes a resource's base64 encoded Protobuf header. + """ + header = header_proto() + try: + header_bytes = base64.b64decode(resource['header']) + header.ParseFromString(header_bytes) + except (KeyError, TypeError, ValueError, DecodeError): + header = resource.get('header', None) + LOGGER.error( + 'The validator sent a resource with %s %s', + 'a missing header' if header is None else 'an invalid header:', + header or '') + raise errors.ResourceHeaderInvalid() + + resource['header'] = cls._message_to_dict(header) + return resource + From 0a2849c1a51372d5d462ae6ccd7afdfcb1288d22 Mon Sep 17 00:00:00 2001 From: shresthichauhan Date: Tue, 14 Aug 2018 14:58:30 +0530 Subject: [PATCH 09/64] Head Length - count from 89 to 93 Signed-off-by: shresthichauhan --- .../api_test/get/test_rest_api_get_batch.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 0d6fb7afaf..0cdc004b5c 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -47,6 +47,57 @@ def test_api_get_batch_list(self, setup_batch): self.assert_check_family(response) self.assert_check_batch_nonce(response) + + + def test_api_get_each_state_head_length(self, setup): + """Tests the each state head length should be 128 hex character long + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("State Head length is not 128 hex character long") + assert head_len == head + + + def test_api_get_each_batch_id_length(self, setup): + """Tests the each batch id length should be 128 hex character long + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + expected_head = batch['header']['batch_ids'][0] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Batch id length is not 128 hex character long") + assert head_len == head + + def test_api_get_first_block_id_length(self, setup): + """Tests the first block id length should be 128 hex character long + """ + try: + for block_list in get_blocks(): + batch_list = get_batches() + for block in batch_list: + expected_head = batch_list['head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Block id length is not 128 hex character long") + assert head_len == head + + def test_api_get_transaction_id_length(self, setup): + """Tests the transaction id length should be 128 hex character long + """ + try: + transaction_list = get_transactions() + for trans in transaction_list['data']: + transaction_ids = trans['header_signature'] + head_len = len(transaction_ids) + except urllib.error.HTTPError as error: + LOGGER.info("Transaction id length is not 128 hex character long") + assert head_len == head + # def test_api_get_batch_list_no_batches(self): # """Tests that transactions are submitted and committed for From 19303193000c34fca290de5c3b571f04825f2333 Mon Sep 17 00:00:00 2001 From: shresthichauhan Date: Fri, 17 Aug 2018 00:19:06 +0530 Subject: [PATCH 10/64] Block content and count Signed-off-by: shresthichauhan --- .../api_test/get/test_rest_api_get_batch.py | 92 ++++++++++++++++++- 1 file changed, 91 insertions(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 0cdc004b5c..76659b8ec2 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -23,6 +23,7 @@ from utils import get_batches, _get_node_list, _get_node_chain, check_for_consensus from base import RestApiBaseTest +head = 128 pytestmark = [pytest.mark.get , pytest.mark.batch] @@ -47,7 +48,96 @@ def test_api_get_batch_list(self, setup_batch): self.assert_check_family(response) self.assert_check_batch_nonce(response) - + + def test_rest_api_blk_content_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + head_signature = trans['header_signature'] + except urllib.error.HTTPError as error: + LOGGER.info("Header signature is missing in some of the batches") + assert head_signature is not None, "Head signature is available for all batches in block" + + def test_rest_api_blk_signer_key_ids_list_head(self, setup): + """Tests block content with signer key, transaction, batch, + block lists and ids. + """ + try: + block_list = setup + for batchcount in enumerate(block_list, start=1): + assert 'expected_txns' in setup + assert 'signer_key' in setup + assert 'address' in setup + assert 'header' in setup + assert 'transaction_list' in setup + assert 'trace' in setup + assert 'block_list' in setup + assert 'transactions' in setup + assert 'batch_list' in setup + assert 'block_ids' in setup + assert 'expected_batches' in setup + assert 'batch_ids' in setup + assert 'state_head' in setup + assert 'header_signature' in setup + assert 'transaction_ids' in setup + assert 'expected_head' in setup + except urllib.error.HTTPError as error: + LOGGER.info("Some of the parameters of blocks are missing") + + def test_rest_api_check_blocks_count(self, setup): + """Tests blocks count from block list + """ + count =0 + try: + block_list = get_blocks() + for block in enumerate(block_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("BLock count not able to collect") + #assert count == _get_batch_list + + def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list + """ + count =0 + try: + batch_list = get_batches() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Batch count not able to collect") + #assert count == _get_block_list + + def test_rest_api_check_transactions_count(self, setup): + """Tests transaction count from transaction list + """ + count =0 + try: + batch_list = get_transactions() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Transaction count not able to collect") + #assert count == _get_transaction_list + + def test_rest_api_check_state_count(self, setup): + """Tests state count from state list + """ + count =0 + try: + state_list = get_state_list()['data'] + for batch in enumerate(state_list): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("State count not able to collect") + #assert count == _get_state_list def test_api_get_each_state_head_length(self, setup): """Tests the each state head length should be 128 hex character long From 85b5432bb3c4e65358fa80f7b86d1b1aa6b60879 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Fri, 7 Sep 2018 17:04:40 +0530 Subject: [PATCH 11/64] Updated utils file Signed-off-by: sandeeplandt --- rest_api/tests/api_test/utils.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index 817f1e0415..376ec92378 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -287,15 +287,10 @@ def _make_http_address(node_number): return node_number def _get_client_address(): - command = "ifconfig lo | grep 'inet addr' | cut -d ':' -f 2 | cut -d ' ' -f 1" - node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') - return 'http://' + node_ip + ':8008' - ''' command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - ''' - + def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" From 23f4d968b9cb6858961e8c1003378c07a17ae3c4 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Tue, 11 Sep 2018 12:57:03 +0530 Subject: [PATCH 12/64] Added test to get txns based on signer key Signed-off-by: sandeeplandt --- .../get/test_rest_api_get_transaction.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py index 6480a6f94f..32568d5007 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -373,6 +373,29 @@ def test_api_get_transaction_bad_id(self, setup): LOGGER.info(response['error']['message']) self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_transaction_signer_key(self, setup): + """Tests that GET /transactions/{transaction_id} is reachable + """ + LOGGER.info("Starting test for transaction/{transaction_id}") + expected_head = setup['expected_head'] + expected_id = setup['transaction_ids'][0] + address = setup['address'] + expected_length = 1 + + expected_link = '{}/transactions/{}'.format(address,expected_id) + + try: + response = get_transactions() + for i in range(len(response['data'])): + transaction=get_transaction_id(response['data'][i]['header_signature']) + assert response['data'][i]['header']['signer_public_key'] == transaction['data']['header']['signer_public_key'] + assert bool(transaction['data']) == True + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) From 24b7c618c3b942d167c826402af10fada8c4f65e Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 5 Oct 2018 12:17:12 +0530 Subject: [PATCH 13/64] Update test_rest_api_get_batch.py --- rest_api/tests/api_test/get/test_rest_api_get_batch.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 2eb2de7f21..0cccb67655 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -55,6 +55,7 @@ class TestBatchList(RestApiBaseTest): """This class tests the batch list with different parameters """ + ''' def test_api_get_batch_list(self, setup): """Tests the batch list by submitting intkey batches """ @@ -89,7 +90,7 @@ def test_api_get_batch_list(self, setup): signer_key) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response, expected_link) - + ''' def test_api_get_batch_list_head(self, setup): """Tests that GET /batches is reachable with head parameter """ @@ -608,4 +609,4 @@ def test_api_get_batch_statuses_default_wait(self,setup): LOGGER.info(data['error']['message']) self.assert_status(response,status) - self.assert_valid_link(response, expected_link) \ No newline at end of file + self.assert_valid_link(response, expected_link) From c90c9d5c83c50696216fdbaa62a22bcbfbfb142d Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:21:28 +0530 Subject: [PATCH 14/64] Update test_rest_api_get_batch.py --- rest_api/tests/api_test/get/test_rest_api_get_batch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 0cccb67655..cfb568d797 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -314,7 +314,7 @@ def test_api_get_batch_list_invalid_limit(self, setup): LOGGER.info(data['error']['message']) self.assert_valid_error(data, INVALID_COUNT_QUERY) - + ''' def test_api_get_batch_list_reversed(self, setup): """verifies that GET /batches is unreachable with bad head parameter """ @@ -350,7 +350,7 @@ def test_api_get_batch_list_reversed(self, setup): self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response) - + ''' def test_api_get_batch_key_params(self, setup): """Tests/ validate the block key parameters with data, head, link and paging """ From a99a65d0abe45e8057ec5445ba03a20c975c7fbd Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:24:59 +0530 Subject: [PATCH 15/64] Update test_rest_api_get_batch.py --- .../tests/api_test/get/test_rest_api_get_batch.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index cfb568d797..1dc99c79cb 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -577,20 +577,22 @@ def test_api_get_batch_statuses_invalid(self, invalid_batch): def test_api_get_batch_statuses_unknown(self, setup): address = setup['address'] expected_batches = setup['expected_batches'] - unknown_batch = expected_batches[0] + batch = expected_batches[0] + unknown_batch = batch[:1] + "b" + batch[1+1:] status = "UNKNOWN" expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) - - try: + + try: response = get_batch_statuses([unknown_batch]) except urllib.error.HTTPError as error: data = json.loads(error.fp.read().decode('utf-8')) LOGGER.info(data['error']['title']) LOGGER.info(data['error']['message']) - + self.assert_status(response,status) - self.assert_valid_link(response, expected_link) + self.assert_valid_link(response, expected_link + def test_api_get_batch_statuses_default_wait(self,setup): signer_key = setup['signer_key'] From d5ffedb8c5b30e0efc00cbf60ec578170ec4aa6c Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:27:34 +0530 Subject: [PATCH 16/64] Update test_rest_api_get_receipts.py --- .../tests/api_test/get/test_rest_api_get_receipts.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py index 749fdbf951..95a87f7ecf 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -53,7 +53,7 @@ def test_api_get_reciept_invalid_id(self): LOGGER.info(response['error']['message']) assert response['error']['code'] == INVALID_RESOURCE_ID assert response['error']['title'] == 'Invalid Resource Id' - + ''' def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): """Test the get reciepts for multiple transaction. """ @@ -71,7 +71,7 @@ def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_trans for res,txn in zip(response['data'],reversed(li)): assert str(res['id']) == txn - + ''' def test_api_get_reciepts_single_transactions(self,setup): """Tests get reciepts response for single transaction""" @@ -118,7 +118,7 @@ def test_api_post_reciepts_invalid_transactions(self): LOGGER.info(response['error']['message']) assert response['error']['code'] == RECEIPT_BODY_INVALID assert response['error']['title'] == 'Bad Receipts Request' - + ''' def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): """Test the post reciepts response for multiple transaction. """ @@ -134,4 +134,5 @@ def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_tran response = json.loads(error.fp.read().decode('utf-8')) for res,txn in zip(response['data'], transaction_list): - assert str(res['id']) == txn \ No newline at end of file + assert str(res['id']) == txn + ''' From cd9d9421348d4258a371e19e8714b82afe49348b Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:29:47 +0530 Subject: [PATCH 17/64] Update test_rest_api_get_state.py --- rest_api/tests/api_test/get/test_rest_api_get_state.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index e3a40a8005..1d14a09d61 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -52,6 +52,7 @@ class TestStateList(RestApiBaseTest): """This class tests the state list with different parameters """ + ''' def test_api_get_state_list(self, setup): """Tests the state list by submitting intkey batches """ @@ -68,7 +69,7 @@ def test_api_get_state_list(self, setup): state_list = response['data'][:-1] self.assert_valid_head(response , expected_head) - + ''' def test_api_get_state_list_invalid_batch(self, invalid_batch): """Tests that transactions are submitted and committed for each block that are created by submitting invalid intkey batches From 71191168ee09cfed20273de35751db219e714539 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:32:41 +0530 Subject: [PATCH 18/64] Update test_rest_api_post.py --- rest_api/tests/api_test/post/test_rest_api_post.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index 911a818149..f0b2cbf58a 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -226,7 +226,7 @@ def test_rest_api_post_wrong_header(self,setup): LOGGER.info(error['error']['message']) assert (json.loads(errdata)['error']['code']) == 42 assert e.code == 400 - + ''' def test_rest_api_post_same_txns(self, setup): """Tests the rest-api by submitting multiple transactions with same key """ @@ -280,7 +280,7 @@ def test_rest_api_post_same_txns(self, setup): LOGGER.info(error['error']['message']) assert (json.loads(errdata)['error']['code']) == 42 assert e.code == 400 - + def test_rest_api_multiple_txns_batches(self, setup): """Tests rest-api state by submitting multiple transactions in multiple batches @@ -337,7 +337,7 @@ def test_rest_api_multiple_txns_batches(self, setup): assert e.code == 400 final_state_length = len(get_state_list()) assert initial_state_length == final_state_length - + ''' def test_api_post_batch_different_signer(self, setup): signer_trans = get_signer() intkey=create_intkey_transaction("set",[],50,signer_trans) @@ -464,4 +464,4 @@ def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): assert setup_invalid_invaddr['code'] == 17 - \ No newline at end of file + From d671654813edb4b29059f5edee7919c7b805f8bd Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 12:34:11 +0530 Subject: [PATCH 19/64] Update test_rest_api_scenario.py --- rest_api/tests/api_test/scenario/test_rest_api_scenario.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py index bc4510c05d..56cd93df38 100644 --- a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py +++ b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py @@ -47,7 +47,7 @@ pytestmark = pytest.mark.scenario - +''' class TestScenario(RestApiBaseTest): def test_rest_api_mul_val_intk_xo(self): """Tests that transactions are submitted and committed for @@ -128,3 +128,4 @@ def test_rest_api_mul_val_intk_xo(self): chains = _get_node_chain(node_list) check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) +''' From b65d4f719b7d943bcd5f5a9edcd8594df68df1a6 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 14:16:09 +0530 Subject: [PATCH 20/64] Update test_rest_api_get_batch.py --- rest_api/tests/api_test/get/test_rest_api_get_batch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 1dc99c79cb..d8b80bb730 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -591,7 +591,7 @@ def test_api_get_batch_statuses_unknown(self, setup): LOGGER.info(data['error']['message']) self.assert_status(response,status) - self.assert_valid_link(response, expected_link + self.assert_valid_link(response, expected_link) def test_api_get_batch_statuses_default_wait(self,setup): From 0f9dd51fd134e7f7379ad131b0700d81f94538c2 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 5 Oct 2018 15:28:23 +0530 Subject: [PATCH 21/64] Update test_rest_api_get_state.py --- rest_api/tests/api_test/get/test_rest_api_get_state.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index 1d14a09d61..814118d719 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -131,7 +131,7 @@ def test_api_get_state_list_address(self, setup): LOGGER.info(data['error']['message']) assert response['head'] == expected_head , "request is not correct" - + ''' def test_api_get_state_list_bad_address(self, setup): """Tests that GET /state is unreachable with bad address parameter """ @@ -147,7 +147,7 @@ def test_api_get_state_list_bad_address(self, setup): LOGGER.info(data['error']['message']) self.assert_valid_error(data , INVALID_RESOURCE_ID) - + ''' def test_api_get_paginated_state_list(self, setup): """Tests GET /state is reachbale using paging parameters """ From 00e8dfe95e9c33a69531280361c164883be7505f Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 12 Oct 2018 14:28:14 +0530 Subject: [PATCH 22/64] Update test_rest_api_get_batch.py --- .../api_test/get/test_rest_api_get_batch.py | 37 ++++++++----------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index d8b80bb730..ed2323acfd 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -20,7 +20,8 @@ import urllib.error -from fixtures import break_genesis, invalid_batch +from fixtures import break_genesis, invalid_batch, setup_batch_statuses_10,\ + setup_batch_statuses_15 from utils import get_batches, get_batch_id, post_batch,\ get_batch_statuses, post_batch_statuses,\ _create_expected_link, _get_batch_list @@ -428,37 +429,29 @@ def test_api_get_bad_batch_id(self, setup): class TestBatchStatusesList(RestApiBaseTest): """This class tests the batch status list with different parameters """ - def test_api_post_batch_status_15ids(self, setup): + def test_api_post_batch_status_15ids(self, setup_batch_statuses_15): """verifies that POST /batches_statuses with more than 15 ids """ LOGGER.info("Starting test for batch with bad head parameter") data = {} - batch_ids = setup['batch_ids'] - data['batch_ids'] = batch_ids - expected_head = setup['expected_head'] - expected_id = batch_ids[0] - data_str=json.dumps(data['batch_ids']).encode() - + batch_ids = setup_batch_statuses_15['batch_ids'] + data_str=json.dumps(batch_ids).encode() + try: response = post_batch_statuses(data_str) - assert response['data'][0]['status'] == "COMMITTED" + for resp in response['data']: + assert resp['status'] == "COMMITTED" except urllib.error.HTTPError as error: assert response.code == 400 - - def test_api_post_batch_status_10ids(self, setup): + + def test_api_post_batch_status_10ids(self, setup_batch_statuses_10): """verifies that POST /batches_status with less than 15 ids """ - LOGGER.info("Starting test for batch with bad head parameter") - data = {} - values = [] - batch_ids = setup['batch_ids'] - data['batch_ids'] = batch_ids - expected_head = setup['expected_head'] - expected_id = batch_ids[0] - for i in range(10): - values.append(data['batch_ids'][i]) - data_str=json.dumps(values).encode() - + LOGGER.info("Starting test for post batch statuses with less than 15 ids") + batch_ids = setup_batch_statuses_10['batch_ids'] + + data_str=json.dumps(batch_ids).encode() + try: response = post_batch_statuses(data_str) assert response['data'][0]['status'] == "COMMITTED" From b54fc997a8be266de733f3d04b9ae415e348f492 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 12 Oct 2018 14:31:53 +0530 Subject: [PATCH 23/64] Update test_rest_api_get_batch.py --- rest_api/tests/api_test/get/test_rest_api_get_batch.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index ed2323acfd..0984537102 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -433,7 +433,6 @@ def test_api_post_batch_status_15ids(self, setup_batch_statuses_15): """verifies that POST /batches_statuses with more than 15 ids """ LOGGER.info("Starting test for batch with bad head parameter") - data = {} batch_ids = setup_batch_statuses_15['batch_ids'] data_str=json.dumps(batch_ids).encode() @@ -449,12 +448,12 @@ def test_api_post_batch_status_10ids(self, setup_batch_statuses_10): """ LOGGER.info("Starting test for post batch statuses with less than 15 ids") batch_ids = setup_batch_statuses_10['batch_ids'] - data_str=json.dumps(batch_ids).encode() try: response = post_batch_statuses(data_str) - assert response['data'][0]['status'] == "COMMITTED" + for resp in response['data']: + assert resp['status'] == "COMMITTED" except urllib.error.HTTPError as error: assert response.code == 400 From 6e09f1c645db4d65e3002d5cad0460f46835b179 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 12 Oct 2018 14:34:39 +0530 Subject: [PATCH 24/64] Update fixtures.py --- rest_api/tests/api_test/fixtures.py | 111 ++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index cd63718f90..de690108f4 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -259,6 +259,117 @@ def setup_batch_multiple_transaction(): LOGGER.info(data['error']['message']) return expected_trxns + +@pytest.fixture(scope="function") +def setup_batch_statuses_10(): + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = len(get_state_list()) + address = _get_client_address() + + + txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(10)] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + response = post_batch(batch) + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + data['batch_ids'] = expected_batches + return data + + +@pytest.fixture(scope="function") +def setup_batch_statuses_15(): + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = len(get_state_list()) + address = _get_client_address() + + + txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(15)] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + response = post_batch(batch) + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + data['batch_ids'] = expected_batches + return data From f573512aaf852e51014ed2b431e073724e592c31 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 12 Oct 2018 16:04:31 +0530 Subject: [PATCH 25/64] Update test_rest_api_get_batch.py --- rest_api/tests/api_test/get/test_rest_api_get_batch.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 0984537102..a3e8c601da 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -438,8 +438,6 @@ def test_api_post_batch_status_15ids(self, setup_batch_statuses_15): try: response = post_batch_statuses(data_str) - for resp in response['data']: - assert resp['status'] == "COMMITTED" except urllib.error.HTTPError as error: assert response.code == 400 @@ -452,8 +450,6 @@ def test_api_post_batch_status_10ids(self, setup_batch_statuses_10): try: response = post_batch_statuses(data_str) - for resp in response['data']: - assert resp['status'] == "COMMITTED" except urllib.error.HTTPError as error: assert response.code == 400 From f3339e60386e84fe90280ff1cdb451e850ceb23f Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Sun, 14 Oct 2018 19:01:01 +0530 Subject: [PATCH 26/64] Update conftest.py --- rest_api/tests/api_test/conftest.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py index e3e9beb9bc..1fc1edc9cd 100644 --- a/rest_api/tests/api_test/conftest.py +++ b/rest_api/tests/api_test/conftest.py @@ -54,6 +54,7 @@ LIMIT = 100 +BATCH_SIZE = 15 def pytest_addoption(parser): @@ -164,10 +165,7 @@ def setup(request): LOGGER.info("Creating intkey transactions with set operations") - txns = [ - create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), - ] + txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] for txn in txns: dict = MessageToDict( From 428e06a960947cbfd355d5167692031a63bc6c67 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Sun, 14 Oct 2018 19:01:34 +0530 Subject: [PATCH 27/64] Update fixtures.py --- rest_api/tests/api_test/fixtures.py | 118 +--------------------------- 1 file changed, 1 insertion(+), 117 deletions(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index de690108f4..60088fb5e8 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -258,120 +258,4 @@ def setup_batch_multiple_transaction(): LOGGER.info(data['error']['title']) LOGGER.info(data['error']['message']) - return expected_trxns - -@pytest.fixture(scope="function") -def setup_batch_statuses_10(): - data = {} - signer = get_signer() - expected_trxns = {} - expected_batches = [] - transaction_list = [] - initial_state_length = len(get_state_list()) - address = _get_client_address() - - - txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(10)] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - expected_trxns['trxn_id'] = [dict['header_signature']] - expected_trxns['payload'] = [dict['payload']] - - LOGGER.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - for batch in batches: - dict = MessageToDict( - batch, - including_default_value_fields=True, - preserving_proto_field_name=True) - - batch_id = dict['header_signature'] - expected_batches.append(batch_id) - - length_batches = len(expected_batches) - length_transactions = len(expected_trxns) - - post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] - - LOGGER.info("Submitting batches to the handlers") - - for batch in post_batch_list: - response = post_batch(batch) - try: - response = post_batch(batch) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - data['batch_ids'] = expected_batches - return data - - -@pytest.fixture(scope="function") -def setup_batch_statuses_15(): - data = {} - signer = get_signer() - expected_trxns = {} - expected_batches = [] - transaction_list = [] - initial_state_length = len(get_state_list()) - address = _get_client_address() - - - txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(15)] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - expected_trxns['trxn_id'] = [dict['header_signature']] - expected_trxns['payload'] = [dict['payload']] - - LOGGER.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - for batch in batches: - dict = MessageToDict( - batch, - including_default_value_fields=True, - preserving_proto_field_name=True) - - batch_id = dict['header_signature'] - expected_batches.append(batch_id) - - length_batches = len(expected_batches) - length_transactions = len(expected_trxns) - - post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] - - LOGGER.info("Submitting batches to the handlers") - - for batch in post_batch_list: - response = post_batch(batch) - try: - response = post_batch(batch) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - data['batch_ids'] = expected_batches - return data - - - - - + return expected_trxn From 6a88af5f7ae45172a6b7efb63fb1cfa09060bf1e Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Sun, 14 Oct 2018 19:02:56 +0530 Subject: [PATCH 28/64] Update test_rest_api_get_batch.py --- .../tests/api_test/get/test_rest_api_get_batch.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index a3e8c601da..0ad3fb4805 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -20,8 +20,7 @@ import urllib.error -from fixtures import break_genesis, invalid_batch, setup_batch_statuses_10,\ - setup_batch_statuses_15 +from fixtures import break_genesis, invalid_batch from utils import get_batches, get_batch_id, post_batch,\ get_batch_statuses, post_batch_statuses,\ _create_expected_link, _get_batch_list @@ -429,11 +428,11 @@ def test_api_get_bad_batch_id(self, setup): class TestBatchStatusesList(RestApiBaseTest): """This class tests the batch status list with different parameters """ - def test_api_post_batch_status_15ids(self, setup_batch_statuses_15): + def test_api_post_batch_status_15ids(self, setup): """verifies that POST /batches_statuses with more than 15 ids """ LOGGER.info("Starting test for batch with bad head parameter") - batch_ids = setup_batch_statuses_15['batch_ids'] + batch_ids = setup['batch_ids'] data_str=json.dumps(batch_ids).encode() try: @@ -441,11 +440,11 @@ def test_api_post_batch_status_15ids(self, setup_batch_statuses_15): except urllib.error.HTTPError as error: assert response.code == 400 - def test_api_post_batch_status_10ids(self, setup_batch_statuses_10): + def test_api_post_batch_status_10ids(self, setup): """verifies that POST /batches_status with less than 15 ids """ LOGGER.info("Starting test for post batch statuses with less than 15 ids") - batch_ids = setup_batch_statuses_10['batch_ids'] + batch_ids = setup['batch_ids'] data_str=json.dumps(batch_ids).encode() try: From 186b39dbd064c9a688e6ff0dfbfa7d41a42a2c84 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Tue, 16 Oct 2018 15:02:51 +0530 Subject: [PATCH 29/64] Update test_rest_api_get_batch.py --- .../api_test/get/test_rest_api_get_batch.py | 659 ++++++++++-------- 1 file changed, 360 insertions(+), 299 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 0ad3fb4805..4650e4e783 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -18,16 +18,18 @@ import json import urllib.request import urllib.error +import aiohttp - + from fixtures import break_genesis, invalid_batch + from utils import get_batches, get_batch_id, post_batch,\ get_batch_statuses, post_batch_statuses,\ _create_expected_link, _get_batch_list from base import RestApiBaseTest -pytestmark = [pytest.mark.get , pytest.mark.batch] +pytestmark = [pytest.mark.get , pytest.mark.batch, pytest.mark.second] LOGGER = logging.getLogger(__name__) @@ -55,10 +57,10 @@ class TestBatchList(RestApiBaseTest): """This class tests the batch list with different parameters """ - ''' - def test_api_get_batch_list(self, setup): + async def test_api_get_batch_list(self, setup): """Tests the batch list by submitting intkey batches """ + LOGGER.info("Starting tests for batch list") signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] @@ -67,32 +69,34 @@ def test_api_get_batch_list(self, setup): payload = setup['payload'] start = setup['start'] limit = setup['limit'] - address = setup['address'] - + address = setup['address'] + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ expected_head, start, limit) - + paging_link = '{}/batches?head={}&start={}'.format(address,\ expected_head, start) - + try: - response = get_batches() - except urllib.error.HTTPError as error: + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - - batches = _get_batch_list(response) - + + batches = _get_batch_list(response) + self.assert_valid_data(response) - self.assert_valid_head(response, expected_head) - self.assert_valid_data_list(batches, expected_length) - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + self.assert_valid_head(response, expected_head) + self.assert_valid_data_length(batches, expected_length) + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response, expected_link) - ''' - def test_api_get_batch_list_head(self, setup): - """Tests that GET /batches is reachable with head parameter + + async def test_api_get_batch_list_head(self, setup): + """Tests that GET /batches is reachable with head parameter """ LOGGER.info("Starting test for batch with head parameter") signer_key = setup['signer_key'] @@ -104,145 +108,162 @@ def test_api_get_batch_list_head(self, setup): start = setup['start'] limit = setup['limit'] address = setup['address'] - + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ expected_head, start, limit) - + + params={'head': expected_head} + try: - response = get_batches(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + batches = response['data'][:-1] - - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) - + self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response, expected_link) - - def test_api_get_batch_list_bad_head(self, setup): - """Tests that GET /batches is unreachable with bad head parameter - """ + + async def test_api_get_batch_list_bad_head(self, setup): + """Tests that GET /batches is unreachable with bad head parameter + """ LOGGER.info("Starting test for batch with bad head parameter") - + params={'head': BAD_HEAD} + try: - batch_list = get_batches(head_id=BAD_HEAD) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_RESOURCE_ID) - - - def test_api_get_batch_list_id(self, setup): - """Tests that GET /batches is reachable with id as parameter + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_list_id(self, setup): + """Tests that GET /batches is reachable with id as parameter """ LOGGER.info("Starting test for batch with id parameter") signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] expected_txns = setup['expected_txns'] - payload = setup['payload'] + payload = setup['payload'] batch_ids = setup['batch_ids'] start = setup['start'] limit = setup['limit'] address = setup['address'] - + expected_id = batch_ids[0] expected_length = len([expected_id]) - + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ expected_head, start, limit, expected_id) - + + params={'id': expected_id} + try: - response = get_batches(id=expected_id) - except: - LOGGER.info("Rest Api is not reachable") - - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'][:-1] - - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) - + self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) - - def test_api_get_batch_list_bad_id(self, setup): - """Tests that GET /batches is unreachable with bad id parameter + + async def test_api_get_batch_list_bad_id(self, setup): + """Tests that GET /batches is unreachable with bad id parameter """ LOGGER.info("Starting test for batch with bad id parameter") - + + params={'head': BAD_ID} + try: - batch_list = get_batches(head_id=BAD_ID) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_RESOURCE_ID) - - def test_api_get_batch_list_head_and_id(self, setup): - """Tests GET /batches is reachable with head and id as parameters + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_list_head_and_id(self, setup): + """Tests GET /batches is reachable with head and id as parameters """ LOGGER.info("Starting test for batch with head and id parameter") signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] expected_txns = setup['expected_txns'] - payload = setup['payload'] + payload = setup['payload'] batch_ids = setup['batch_ids'] start = setup['start'] limit = setup['limit'] address = setup['address'] - + expected_id = batch_ids[0] expected_length = len([expected_id]) - + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ expected_head, start, limit, expected_id) - + + params={'head':expected_head,'id':expected_id} + try: - response = get_batches(head_id=expected_head , id=expected_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'][:-1] - - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) - + self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) - - def test_api_get_paginated_batch_list(self, setup): - """Tests GET /batches is reachable using paging parameters + + async def test_api_get_paginated_batch_list(self, setup): + """Tests GET /batches is reachable using paging parameters """ LOGGER.info("Starting test for batch with paging parameters") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - start = 1 - limit = 1 - + + params={'limit':1, 'start':1} + try: - response = get_batches(start=start , limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_PAGING_QUERY) - - def test_api_get_batch_list_limit(self, setup): - """Tests GET /batches is reachable using paging parameters + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_batch_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters """ LOGGER.info("Starting test for batch with paging parameters") signer_key = setup['signer_key'] @@ -255,139 +276,155 @@ def test_api_get_batch_list_limit(self, setup): expected_id = batch_ids[0] start = setup['start'] address = setup['address'] - limit = 1 - + params={'limit':1} + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ - expected_head, start, limit) - + expected_head, start, 1) + try: - response = get_batches(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + batches = response['data'][:-1] - - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) - + self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response, expected_link) - - - def test_api_get_batch_list_invalid_start(self, setup): - """Tests that GET /batches is unreachable with invalid start parameter + + + async def test_api_get_batch_list_invalid_start(self, setup): + """Tests that GET /batches is unreachable with invalid start parameter """ LOGGER.info("Starting test for batch with invalid start parameter") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - start = -1 - + params={'start':-1} + try: - response = get_batches(start=start) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_PAGING_QUERY) - - - def test_api_get_batch_list_invalid_limit(self, setup): - """Tests that GET /batches is unreachable with bad limit parameter + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + + async def test_api_get_batch_list_invalid_limit(self, setup): + """Tests that GET /batches is unreachable with bad limit parameter """ LOGGER.info("Starting test for batch with bad limit parameter") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - limit = 0 - + params={'limit':0} + try: - response = get_batches(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_COUNT_QUERY) - ''' - def test_api_get_batch_list_reversed(self, setup): - """verifies that GET /batches is unreachable with bad head parameter + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + async def test_api_get_batch_list_reversed(self, setup): + """verifies that GET /batches is unreachable with bad head parameter """ - LOGGER.info("Starting test for batch with bad head parameter") + LOGGER.info("Starting test for batch list as reversed") signer_key = setup['signer_key'] expected_head = setup['expected_head'] - setup_batches = setup['expected_batches'] + expected_batches = setup['expected_batches'] expected_txns = setup['expected_txns'] expected_length = setup['expected_batch_length'] - payload = setup['payload'] - start = setup['start'] + payload = setup['payload'] + start = setup['batch_ids'][::-1][0] limit = setup['limit'] address = setup['address'] - expected_batches = setup_batches[::-1] - - expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + + expected_link = '{}/batches?head={}&start={}&limit={}&reverse'.format(address,\ expected_head, start, limit) - - reverse = True - + + params = 'reverse' + try: - response = get_batches(reverse=reverse) - except urllib.error.HTTPError as error: - assert response.code == 400 - - batches = response['data'][:-1] - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'][::-1][:-1] + - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) - + self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) - self.assert_valid_paging(response) - ''' - def test_api_get_batch_key_params(self, setup): - """Tests/ validate the block key parameters with data, head, link and paging + self.assert_valid_paging(response, expected_link) + + async def test_api_get_batch_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging """ - response = get_batches() + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + assert 'link' in response assert 'data' in response assert 'paging' in response assert 'head' in response - - def test_api_get_batch_param_link_val(self, setup): + + async def test_api_get_batch_param_link_val(self, setup): """Tests/ validate the batch parameters with batches, head, start and limit """ try: - batch_list = get_batches() - for link in batch_list: + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + response = await data.json() + + for link in response: if(link == 'link'): - assert 'head' in batch_list['link'] - assert 'start' in batch_list['link'] - assert 'limit' in batch_list['link'] - assert 'batches' in batch_list['link'] - except urllib.error.HTTPError as error: + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'batches' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") assert response.code == 400 LOGGER.info("Link is not proper for batch and parameters are missing") - - def test_rest_api_check_batches_count(self, setup): - """Tests batches count from batch list + + async def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list """ count =0 try: - batch_list = get_batches() - for batch in enumerate(batch_list['data']): - count = count+1 - except urllib.error.HTTPError as error: - LOGGER.info("Batch count not able to collect") + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + response = await data.json() + for batch in enumerate(response['data']): + count = count+1 + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + class TestBatchGet(RestApiBaseTest): - def test_api_get_batch_id(self, setup): + async def test_api_get_batch_id(self, setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] @@ -397,133 +434,150 @@ def test_api_get_batch_id(self, setup): expected_id = batch_ids[0] payload = setup['payload'] address = setup['address'] - + expected_link = '{}/batches/{}'.format(address, expected_batches[0]) - + try: - response = get_batch_id(expected_batches[0]) - except urllib.error.HTTPError as error: + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches/{}'.format(expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - + batches = response['data'] - - self.assert_check_batch_seq(batches, expected_batches, - expected_txns, payload, + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, signer_key) self.assert_valid_link(response, expected_link) - - def test_api_get_bad_batch_id(self, setup): - """verifies that GET /batches/{bad_batch_id} - is unreachable with bad head parameter - """ + + async def test_api_get_bad_batch_id(self, setup): + """verifies that GET /batches/{bad_batch_id} + is unreachable with bad head parameter + """ try: - batch_list = get_batches(head_id=BAD_ID) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_RESOURCE_ID) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batches/{}'.format(BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + class TestBatchStatusesList(RestApiBaseTest): """This class tests the batch status list with different parameters """ - def test_api_post_batch_status_15ids(self, setup): + async def test_api_post_batch_status_15ids(self, setup): """verifies that POST /batches_statuses with more than 15 ids """ LOGGER.info("Starting test for batch with bad head parameter") batch_ids = setup['batch_ids'] data_str=json.dumps(batch_ids).encode() - + headers = {'content-type': 'application/json'} + try: - response = post_batch_statuses(data_str) - except urllib.error.HTTPError as error: - assert response.code == 400 - - def test_api_post_batch_status_10ids(self, setup): + async with aiohttp.ClientSession() as session: + async with session.post(url='http://10.223.155.43:8008/batch_statuses', + data=data_str,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_post_batch_status_10ids(self,setup): """verifies that POST /batches_status with less than 15 ids """ LOGGER.info("Starting test for post batch statuses with less than 15 ids") batch_ids = setup['batch_ids'] data_str=json.dumps(batch_ids).encode() + headers = {'content-type': 'application/json'} try: - response = post_batch_statuses(data_str) - except urllib.error.HTTPError as error: - assert response.code == 400 - - def test_api_get_batch_statuses(self,setup): + async with aiohttp.ClientSession() as session: + async with session.post(url='http://10.223.155.43:8008/batch_statuses', + data=data_str,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_get_batch_statuses(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] address = setup['address'] status = "COMMITTED" - - expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) - + params = {'id': expected_batches[0]} + try: - response = get_batch_statuses([expected_batches[0]]) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_status(response,status) self.assert_valid_link(response, expected_link) - - def test_api_get_batch_statuses_many_ids(self,setup): + + async def test_api_get_batch_statuses_many_ids(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] address = setup['address'] status = "COMMITTED" - batches = ",".join(expected_batches) + params = {'id': batches} expected_link = '{}/batch_statuses?id={}'.format(address, batches) - + try: - response = get_batch_statuses(expected_batches) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_status(response,status) self.assert_valid_link(response, expected_link) - - def test_api_get_batch_statuses_bad_id(self,setup): + + async def test_api_get_batch_statuses_bad_id(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] address = setup['address'] - + params = {'id': BAD_ID} + try: - response = get_batch_statuses(BAD_ID) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_RESOURCE_ID) - - def test_api_get_batch_statuses_invalid_query(self,setup): + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_statuses_invalid_query(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] address = setup['address'] - + try: - response = get_batch_statuses() - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, STATUS_ID_QUERY_INVALID) - - def test_api_get_batch_statuses_wait(self,setup): + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses') as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + self.assert_valid_error(response, STATUS_ID_QUERY_INVALID) + + async def test_api_get_batch_statuses_wait(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] @@ -531,71 +585,78 @@ def test_api_get_batch_statuses_wait(self,setup): status = "COMMITTED" expected_link = '{}/batch_statuses?id={}&wait={}'.format(address, expected_batches[0], WAIT) - + + params = {'id': expected_batches[0], 'wait':WAIT} + try: - response = get_batch_statuses([expected_batches[0]],WAIT) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) self.assert_valid_link(response, expected_link) - - - def test_api_get_batch_statuses_invalid(self, invalid_batch): + + + async def test_api_get_batch_statuses_invalid(self, invalid_batch): expected_batches = invalid_batch['expected_batches'] address = invalid_batch['address'] status = "INVALID" - expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) - + params = {'id': expected_batches[0]} + try: - response = get_batch_statuses([expected_batches[0]]) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_status(response,status) self.assert_valid_link(response, expected_link) - - - def test_api_get_batch_statuses_unknown(self, setup): + + + async def test_api_get_batch_statuses_unknown(self, setup): address = setup['address'] expected_batches = setup['expected_batches'] batch = expected_batches[0] unknown_batch = batch[:1] + "b" + batch[1+1:] status = "UNKNOWN" + params = {'id': unknown_batch} expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) - try: - response = get_batch_statuses([unknown_batch]) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) self.assert_status(response,status) self.assert_valid_link(response, expected_link) - - - def test_api_get_batch_statuses_default_wait(self,setup): + + async def test_api_get_batch_statuses_default_wait(self,setup): signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] address = setup['address'] status = "COMMITTED" - expected_link = '{}/batch_statuses?id={}&wait=300'.format(address, expected_batches[0]) - + params = {'id': expected_batches[0], 'wait':300} + try: - response = get_batch_statuses([expected_batches[0]],300) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='http://10.223.155.43:8008/batch_statuses', + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_status(response,status) self.assert_valid_link(response, expected_link) From ef25b0b74a7f779fe30ff087e63472c73eeca50e Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:50:28 +0530 Subject: [PATCH 30/64] Update test_rest_api_post.py --- .../tests/api_test/post/test_rest_api_post.py | 365 +++++++++++------- 1 file changed, 228 insertions(+), 137 deletions(-) diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index f0b2cbf58a..52bbbc2623 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -24,6 +24,8 @@ import shlex import requests import hashlib +import aiohttp +import asyncio from google.protobuf.json_format import MessageToDict @@ -41,16 +43,20 @@ from utils import post_batch, get_state_list , get_blocks , get_transactions, \ get_batches , get_state_address, check_for_consensus,\ - _get_node_list, _get_node_chains, post_batch_no_endpoint, get_reciepts + _get_node_list, _get_node_chains, post_batch_no_endpoint,\ + get_reciepts, _get_client_address, state_count from payload import get_signer, create_intkey_transaction, create_batch,\ create_intkey_same_transaction from base import RestApiBaseTest -from fixtures import setup_valinv_txns, setup_invval_txns, setup_invalid_txns, setup_same_txns, setup_invalid_txns_min, setup_invalid_txns_max,\ - setup_valid_txns, setup_invalid_txns_fn, setup_invalid_invaddr +from fixtures import setup_empty_trxs_batch, setup_invalid_txns,setup_invalid_txns_min,\ + setup_invalid_txns_max, setup_valinv_txns, setup_invval_txns, \ + setup_same_txns, setup_valid_txns, setup_invalid_txns_fn,\ + setup_invalid_invaddr + LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) @@ -59,18 +65,43 @@ EMPTY_BATCH = b'' NO_BATCHES_SUBMITTED = 34 BAD_PROTOBUF_SUBMITTED = 35 +WRONG_HEADER_TYPE=42 BATCH_QUEUE_FULL = 31 INVALID_BATCH = 30 WRONG_CONTENT_TYPE = 43 +WAIT=300 +RECEIPT_NOT_FOUND = 80 BLOCK_TO_CHECK_CONSENSUS = 1 -pytestmark = pytest.mark.post -data = {} +pytestmark = [pytest.mark.post,pytest.mark.last] -class TestPost(RestApiBaseTest): - - def test_rest_api_post_batch(self): +async def async_fetch_url(url, session,params=None): + try: + async with session.get(url) as response: + return await response.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + +async def async_post_batch(url, session, data, params=None,headers=None): + if headers: + headers=headers + else: + headers = {'Content-Type': 'application/octet-stream'} + try: + async with session.post(url,data=data,headers=headers) as response: + data = await response.json() + if 'link' in data: + link = data['link'] + return await async_fetch_url('{}&wait={}'.format(link, WAIT),session) + else: + return data + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + +class TestPostList(RestApiBaseTest): + async def test_rest_api_post_batch(self): """Tests that transactions are submitted and committed for each block that are created by submitting intkey batches with set operations @@ -80,12 +111,13 @@ def test_rest_api_post_batch(self): signer = get_signer() expected_trxn_ids = [] expected_batch_ids = [] - initial_state_length = len(get_state_list()['data']) + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] LOGGER.info("Creating intkey transactions with set operations") txns = [ create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), ] for txn in txns: @@ -114,67 +146,77 @@ def test_rest_api_post_batch(self): LOGGER.info("Submitting batches to the handlers") - for batch in post_batch_list: - try: - response = post_batch(batch) - except urllib.error.HTTPError as error: - data = error.fp.read().decode('utf-8') - LOGGER.info(data) - - block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] - state_addresses = [state['address'] for state in get_state_list()['data']] - state_head_list = [get_state_address(address)['head'] for address in state_addresses] - committed_transaction_list = get_transactions()['data'] - + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + for response in responses: if response['data'][0]['status'] == 'COMMITTED': LOGGER.info('Batch is committed') - + for batch in expected_batch_ids: if batch in block_batch_ids: LOGGER.info("Block is created for the respective batch") - + elif response['data'][0]['status'] == 'INVALID': - LOGGER.info('Batch submission failed') - + LOGGER.info('Batch is not committed') + if any(['message' in response['data'][0]['invalid_transactions'][0]]): message = response['data'][0]['invalid_transactions'][0]['message'] LOGGER.info(message) - + for batch in batch_ids: - if batch in block_batch_ids: - LOGGER.info("Block is created for the respective batch") - - final_state_length = len(get_state_list()['data']) + if batch not in block_batch_ids: + LOGGER.info("Block is not created for the respective batch") + node_list = _get_node_list() chains = _get_node_chains(node_list) - assert final_state_length == initial_state_length + len(expected_batch_ids) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True - def test_rest_api_no_batches(self): + async def test_rest_api_no_batches(self): LOGGER.info("Starting test for batch with bad protobuf") - + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + try: - response = post_batch(batch=EMPTY_BATCH) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - self.assert_valid_error(response, NO_BATCHES_SUBMITTED) - - def test_rest_api_bad_protobuf(self): + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(async_post_batch(url,session,data=EMPTY_BATCH)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], NO_BATCHES_SUBMITTED) + + async def test_rest_api_bad_protobuf(self): LOGGER.info("Starting test for batch with bad protobuf") + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + try: - response = post_batch(batch=BAD_PROTOBUF) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - self.assert_valid_error(response, BAD_PROTOBUF_SUBMITTED) + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(async_post_batch(url,session,data=BAD_PROTOBUF)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], BAD_PROTOBUF_SUBMITTED) - def test_rest_api_post_wrong_header(self,setup): + async def test_rest_api_post_wrong_header(self,setup): """Tests rest api by posting with wrong header """ LOGGER.info('Starting test for batch post') @@ -182,13 +224,14 @@ def test_rest_api_post_wrong_header(self,setup): signer = get_signer() expected_trxn_ids = [] expected_batch_ids = [] - initial_state_length = len(get_state_list()) + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + headers = {'Content-Type': 'application/json'} LOGGER.info("Creating intkey transactions with set operations") txns = [ create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), ] for txn in txns: @@ -216,32 +259,35 @@ def test_rest_api_post_wrong_header(self,setup): post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] LOGGER.info("Submitting batches to the handlers") - - for batch in post_batch_list: - try: - response = post_batch(batch,headers="True") - except urllib.error.HTTPError as e: - errdata = e.file.read().decode("utf-8") - error = json.loads(errdata) - LOGGER.info(error['error']['message']) - assert (json.loads(errdata)['error']['code']) == 42 - assert e.code == 400 - ''' - def test_rest_api_post_same_txns(self, setup): + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch,headers=headers)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + for response in responses: + self.assert_valid_error(response, WRONG_HEADER_TYPE) + + async def test_rest_api_post_same_txns(self, setup): """Tests the rest-api by submitting multiple transactions with same key """ LOGGER.info('Starting test for batch post') - + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] signer = get_signer() expected_trxn_ids = [] expected_batch_ids = [] - initial_state_length = len(get_state_list()) + initial_state_length = state_count() LOGGER.info("Creating intkey transactions with set operations") txns = [ create_intkey_same_transaction("set", [] , 50 , signer), create_intkey_same_transaction("set", [] , 50 , signer), - create_intkey_same_transaction("set", [] , 50 , signer), ] for txn in txns: @@ -270,18 +316,17 @@ def test_rest_api_post_same_txns(self, setup): LOGGER.info("Submitting batches to the handlers") - for batch in post_batch_list: - try: - response = post_batch(batch,headers="None") - assert response['data'][0]['status'] == "INVALID" - except urllib.error.HTTPError as e: - errdata = e.file.read().decode("utf-8") - error = json.loads(errdata) - LOGGER.info(error['error']['message']) - assert (json.loads(errdata)['error']['code']) == 42 - assert e.code == 400 - - def test_rest_api_multiple_txns_batches(self, setup): + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_rest_api_multiple_txns_batches(self, setup): """Tests rest-api state by submitting multiple transactions in multiple batches """ @@ -290,13 +335,14 @@ def test_rest_api_multiple_txns_batches(self, setup): signer = get_signer() expected_trxn_ids = [] expected_batch_ids = [] - initial_state_length = len(get_state_list()) + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] LOGGER.info("Creating intkey transactions with set operations") txns = [ create_intkey_transaction("set", [] , 50 , signer), create_intkey_transaction("set", [] , 50 , signer), - create_intkey_transaction("set", [] , 50 , signer), ] for txn in txns: @@ -310,7 +356,7 @@ def test_rest_api_multiple_txns_batches(self, setup): LOGGER.info("Creating batches for transactions 1trn/batch") - batches = [create_batch([txns], signer)] + batches = [create_batch([txn], signer) for txn in txns] for batch in batches: data = MessageToDict( @@ -325,55 +371,104 @@ def test_rest_api_multiple_txns_batches(self, setup): LOGGER.info("Submitting batches to the handlers") - for batch in post_batch_list: - try: - response = post_batch(batch,headers="None") - response = get_state_list() - except urllib.error.HTTPError as e: - errdata = e.file.read().decode("utf-8") - error = json.loads(errdata) - LOGGER.info(error['error']['message']) - assert (json.loads(errdata)['error']['code']) == 17 - assert e.code == 400 - final_state_length = len(get_state_list()) - assert initial_state_length == final_state_length - ''' - def test_api_post_batch_different_signer(self, setup): + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + for response in responses: + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch is not committed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in expected_batch_ids: + if batch not in block_batch_ids: + LOGGER.info("Block is not created for the respective batch") + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + + async def test_api_post_empty_trxns_list(self, setup_empty_trxs_batch): + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + batch = setup_empty_trxs_batch + post_batch_list = [BatchList(batches=[batch]).SerializeToString()] + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_api_post_batch_different_signer(self, setup): + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] signer_trans = get_signer() intkey=create_intkey_transaction("set",[],50,signer_trans) translist=[intkey] signer_batch = get_signer() batch= create_batch(translist,signer_batch) - batch_list=[BatchList(batches=[batch]).SerializeToString()] - for batc in batch_list: - try: - response = post_batch(batc) - print(response) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - assert data['error']['code'] == 30 - assert data['error']['title'] =='Submitted Batches Invalid' - - def test_rest_api_post_no_endpoint(self, setup): + post_batch_list=[BatchList(batches=[batch]).SerializeToString()] + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], INVALID_BATCH) + + async def test_rest_api_post_no_endpoint(self, setup): + address = _get_client_address() + url='/'.format(address) + tasks=[] signer_trans = get_signer() intkey=create_intkey_transaction("set",[],50,signer_trans) translist=[intkey] batch= create_batch(translist,signer_trans) - batch_list=[BatchList(batches=[batch]).SerializeToString()] - for batc in batch_list: - try: - response = post_batch_no_endpoint(batc) - except urllib.error.HTTPError as e: - errdata = e.file.read().decode("utf-8") - errcode = e.code - assert errcode == 404 - -class TestPostMulTxns(RestApiBaseTest): - + post_batch_list=[BatchList(batches=[batch]).SerializeToString()] + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.InvalidURL as error: + LOGGER.info("Rest Api is Unreachable") + LOGGER.info("Url is not correct") + + +class TestPostInvalidTxns(RestApiBaseTest): def test_txn_invalid_addr(self, setup_invalid_txns): initial_batch_length = setup_invalid_txns['initial_batch_length'] expected_batch_length = setup_invalid_txns['expected_batch_length'] @@ -402,7 +497,6 @@ def test_txn_invalid_max(self, setup_invalid_txns_max): assert setup_invalid_txns_max['response'] == 'INVALID' def test_txn_valid_invalid_txns(self, setup_valinv_txns): - #data=Txns.setup_batch_valinv_txns() initial_batch_length = setup_valinv_txns['initial_batch_length'] expected_batch_length = setup_valinv_txns['expected_batch_length'] initial_trn_length = setup_valinv_txns['initial_trn_length'] @@ -442,8 +536,15 @@ def test_api_sent_commit_txns(self, setup_valid_txns): response = json.loads(error.fp.read().decode('utf-8')) LOGGER.info(response['error']['title']) LOGGER.info(response['error']['message']) - assert response['error']['code'] == RECEIPT_NOT_FOUND - assert response['error']['title'] == 'Invalid Resource Id' + assert response['error']['code'] == RECEIPT_NOT_FOUND + + def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): + initial_batch_length = setup_invalid_invaddr['initial_batch_length'] + expected_batch_length = setup_invalid_invaddr['expected_batch_length'] + initial_trn_length = setup_invalid_invaddr['initial_trn_length'] + expected_trn_length = setup_invalid_invaddr['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length def test_txn_invalid_family_name(self, setup_invalid_txns_fn): initial_batch_length = setup_invalid_txns_fn['initial_batch_length'] @@ -452,16 +553,6 @@ def test_txn_invalid_family_name(self, setup_invalid_txns_fn): expected_trn_length = setup_invalid_txns_fn['expected_trn_length'] assert initial_batch_length < expected_batch_length assert initial_trn_length < expected_trn_length - assert setup_invalid_txns_fn['code'] == 17 - def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): - initial_batch_length = setup_invalid_invaddr['initial_batch_length'] - expected_batch_length = setup_invalid_invaddr['expected_batch_length'] - initial_trn_length = setup_invalid_invaddr['initial_trn_length'] - expected_trn_length = setup_invalid_invaddr['expected_trn_length'] - assert initial_batch_length < expected_batch_length - assert initial_trn_length < expected_trn_length - assert setup_invalid_invaddr['code'] == 17 - + - From 011d291c6766ca48d544f91502ce8e2963d6580b Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:53:07 +0530 Subject: [PATCH 31/64] Update test_rest_api_get_batch.py --- .../api_test/get/test_rest_api_get_batch.py | 110 +++++++++++------- 1 file changed, 66 insertions(+), 44 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py index 4650e4e783..4bee2de7f6 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_batch.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -19,6 +19,7 @@ import urllib.request import urllib.error import aiohttp +import asyncio from fixtures import break_genesis, invalid_batch @@ -53,6 +54,10 @@ STATUS_WRONG_CONTENT_TYPE = 46 WAIT = 10 +async def fetch(url, session,params=None): + async with session.get(url) as response: + return await response.json() + class TestBatchList(RestApiBaseTest): """This class tests the batch list with different parameters @@ -69,7 +74,9 @@ async def test_api_get_batch_list(self, setup): payload = setup['payload'] start = setup['start'] limit = setup['limit'] - address = setup['address'] + address = setup['address'] + url='{}/batches'.format(address) + tasks=[] expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ expected_head, start, limit) @@ -78,22 +85,24 @@ async def test_api_get_batch_list(self, setup): expected_head, start) try: - async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: - response = await data.json() + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(fetch(url, session)) + tasks.append(task) + response = await asyncio.gather(*tasks) except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") + - batches = _get_batch_list(response) + batches = _get_batch_list(response[0]) - self.assert_valid_data(response) - self.assert_valid_head(response, expected_head) + self.assert_valid_data(response[0]) + self.assert_valid_head(response[0], expected_head) self.assert_valid_data_length(batches, expected_length) self.assert_check_batch_seq(batches, expected_batches, expected_txns, payload, signer_key) - self.assert_valid_link(response, expected_link) - self.assert_valid_paging(response, expected_link) + self.assert_valid_link(response[0], expected_link) + self.assert_valid_paging(response[0], expected_link) async def test_api_get_batch_list_head(self, setup): """Tests that GET /batches is reachable with head parameter @@ -116,7 +125,7 @@ async def test_api_get_batch_list_head(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params, + async with session.get(url='{}/batches'.format(address), params=params, raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -137,10 +146,11 @@ async def test_api_get_batch_list_bad_head(self, setup): """ LOGGER.info("Starting test for batch with bad head parameter") params={'head': BAD_HEAD} + address = setup['address'] try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + async with session.get(url='{}/batches'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -171,7 +181,7 @@ async def test_api_get_batch_list_id(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params, + async with session.get(url='{}/batches'.format(address), params=params, raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -191,12 +201,12 @@ async def test_api_get_batch_list_bad_id(self, setup): """Tests that GET /batches is unreachable with bad id parameter """ LOGGER.info("Starting test for batch with bad id parameter") - + address = setup['address'] params={'head': BAD_ID} try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + async with session.get(url='{}/batches'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -227,7 +237,7 @@ async def test_api_get_batch_list_head_and_id(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params, + async with session.get(url='{}/batches'.format(address), params=params, raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -248,6 +258,7 @@ async def test_api_get_paginated_batch_list(self, setup): """ LOGGER.info("Starting test for batch with paging parameters") batch_ids = setup['batch_ids'] + address = setup['address'] expected_head = setup['expected_head'] expected_id = batch_ids[0] @@ -255,7 +266,7 @@ async def test_api_get_paginated_batch_list(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + async with session.get(url='{}/batches'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -283,7 +294,7 @@ async def test_api_get_batch_list_limit(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params, + async with session.get(url='{}/batches'.format(address), params=params, raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -305,13 +316,14 @@ async def test_api_get_batch_list_invalid_start(self, setup): """ LOGGER.info("Starting test for batch with invalid start parameter") batch_ids = setup['batch_ids'] + address = setup['address'] expected_head = setup['expected_head'] expected_id = batch_ids[0] params={'start':-1} try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + async with session.get(url='{}/batches'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -324,13 +336,14 @@ async def test_api_get_batch_list_invalid_limit(self, setup): """ LOGGER.info("Starting test for batch with bad limit parameter") batch_ids = setup['batch_ids'] + address = setup['address'] expected_head = setup['expected_head'] expected_id = batch_ids[0] params={'limit':0} try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params) as data: + async with session.get(url='{}/batches'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -346,8 +359,10 @@ async def test_api_get_batch_list_reversed(self, setup): expected_batches = setup['expected_batches'] expected_txns = setup['expected_txns'] expected_length = setup['expected_batch_length'] - payload = setup['payload'] + payload = setup['payload'] start = setup['batch_ids'][::-1][0] + print(setup['batch_ids']) + print(start) limit = setup['limit'] address = setup['address'] @@ -355,15 +370,16 @@ async def test_api_get_batch_list_reversed(self, setup): expected_head, start, limit) params = 'reverse' - + try: - async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', params=params, + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - + + batches = response['data'][::-1][:-1] @@ -374,13 +390,15 @@ async def test_api_get_batch_list_reversed(self, setup): self.assert_valid_head(response, expected_head) self.assert_valid_link(response, expected_link) self.assert_valid_paging(response, expected_link) - + + async def test_api_get_batch_key_params(self, setup): """Tests/ validate the block key parameters with data, head, link and paging """ + address = setup['address'] try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") @@ -393,9 +411,10 @@ async def test_api_get_batch_key_params(self, setup): async def test_api_get_batch_param_link_val(self, setup): """Tests/ validate the batch parameters with batches, head, start and limit """ + address = setup['address'] try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: response = await data.json() for link in response: @@ -412,10 +431,11 @@ async def test_api_get_batch_param_link_val(self, setup): async def test_rest_api_check_batches_count(self, setup): """Tests batches count from batch list """ + address = setup['address'] count =0 try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches', raise_for_status=True) as data: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: response = await data.json() for batch in enumerate(response['data']): @@ -439,7 +459,7 @@ async def test_api_get_batch_id(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches/{}'.format(expected_id), + async with session.get(url='{}/batches/{}'.format(address,expected_id), raise_for_status=True) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -455,10 +475,11 @@ async def test_api_get_batch_id(self, setup): async def test_api_get_bad_batch_id(self, setup): """verifies that GET /batches/{bad_batch_id} is unreachable with bad head parameter - """ + """ + address = setup['address'] try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batches/{}'.format(BAD_ID)) as data: + async with session.get(url='{}/batches/{}'.format(address,BAD_ID)) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) @@ -473,12 +494,13 @@ async def test_api_post_batch_status_15ids(self, setup): """ LOGGER.info("Starting test for batch with bad head parameter") batch_ids = setup['batch_ids'] + address = setup['address'] data_str=json.dumps(batch_ids).encode() headers = {'content-type': 'application/json'} try: async with aiohttp.ClientSession() as session: - async with session.post(url='http://10.223.155.43:8008/batch_statuses', + async with session.post(url='{}/batch_statuses'.format(address), data=data_str,headers=headers) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -490,12 +512,13 @@ async def test_api_post_batch_status_10ids(self,setup): """ LOGGER.info("Starting test for post batch statuses with less than 15 ids") batch_ids = setup['batch_ids'] + address = setup['address'] data_str=json.dumps(batch_ids).encode() headers = {'content-type': 'application/json'} try: async with aiohttp.ClientSession() as session: - async with session.post(url='http://10.223.155.43:8008/batch_statuses', + async with session.post(url='{}/batch_statuses'.format(address), data=data_str,headers=headers) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -504,16 +527,16 @@ async def test_api_post_batch_status_10ids(self,setup): async def test_api_get_batch_statuses(self,setup): signer_key = setup['signer_key'] + address = setup['address'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] - address = setup['address'] status = "COMMITTED" expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) params = {'id': expected_batches[0]} try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -535,7 +558,7 @@ async def test_api_get_batch_statuses_many_ids(self,setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -553,7 +576,7 @@ async def test_api_get_batch_statuses_bad_id(self,setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -569,12 +592,11 @@ async def test_api_get_batch_statuses_invalid_query(self,setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses') as data: + async with session.get(url='{}/batch_statuses'.format(address)) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info(error) - - + self.assert_valid_error(response, STATUS_ID_QUERY_INVALID) async def test_api_get_batch_statuses_wait(self,setup): @@ -590,7 +612,7 @@ async def test_api_get_batch_statuses_wait(self,setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -610,7 +632,7 @@ async def test_api_get_batch_statuses_invalid(self, invalid_batch): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -632,7 +654,7 @@ async def test_api_get_batch_statuses_unknown(self, setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: @@ -652,7 +674,7 @@ async def test_api_get_batch_statuses_default_wait(self,setup): try: async with aiohttp.ClientSession() as session: - async with session.get(url='http://10.223.155.43:8008/batch_statuses', + async with session.get(url='{}/batch_statuses'.format(address), params=params) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: From 4c47c1fb2afd4c04c2eb6cb291d2ca893c45a124 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:53:50 +0530 Subject: [PATCH 32/64] Update test_rest_api_get_block.py --- .../api_test/get/test_rest_api_get_block.py | 548 ++++++++++-------- 1 file changed, 315 insertions(+), 233 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py index 8b10a4bd1c..f48f2b5a6f 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_block.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -18,13 +18,15 @@ import json import urllib.request import urllib.error +import aiohttp from utils import get_blocks, get_block_id, get_batches, get_transactions from base import RestApiBaseTest -pytestmark = [pytest.mark.get , pytest.mark.block] +pytestmark = [pytest.mark.get , pytest.mark.block, pytest.mark.fourth] + START = 1 @@ -42,6 +44,7 @@ HEAD_LENGTH = 128 MAX_BATCH_IN_BLOCK = 100 FAMILY_NAME = 'xo' +TIMEOUT=5 LOGGER = logging.getLogger(__name__) @@ -51,401 +54,480 @@ class TestBlockList(RestApiBaseTest): """This class tests the blocks list with different parameters """ - def test_api_get_block_list(self, setup): + async def test_api_get_block_list(self, setup): """Tests the block list by submitting intkey batches """ signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_batches = setup['expected_batches'] expected_txns = setup['expected_txns'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + payload = setup['payload'] + + expected_link = '{}/blocks?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/blocks?head={}&start={}'.format(address,\ + expected_head, start) - try: - response = get_blocks() - except urllib.error.HTTPError as error: + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - blocks = response['data'][:-1] + blocks = response['data'][:-1] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) - def test_api_get_block_list_head(self, setup): + async def test_api_get_block_list_head(self, setup): """Tests that GET /blocks is reachable with head parameter """ LOGGER.info("Starting test for blocks with head parameter") + address = setup['address'] expected_head = setup['expected_head'] + params={'head': expected_head} try: - response = get_blocks(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - assert response['head'] == expected_head , "request is not correct" + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + self.assert_valid_head(response, expected_head) - def test_api_get_block_list_bad_head(self, setup): + async def test_api_get_block_list_bad_head(self, setup): """Tests that GET /blocks is unreachable with bad head parameter """ LOGGER.info("Starting test for blocks with bad head parameter") - + address = setup['address'] + params={'head': BAD_HEAD} + try: - batch_list = get_blocks(head_id=BAD_HEAD) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_valid_error(response, INVALID_RESOURCE_ID) - def test_api_get_block_list_id(self, setup): + async def test_api_get_block_list_id(self, setup): """Tests that GET /blocks is reachable with id as parameter """ LOGGER.info("Starting test for blocks with id parameter") - + address = setup['address'] + signer_key = setup['signer_key'] block_ids = setup['block_ids'] expected_head = setup['expected_head'] expected_id = block_ids[0] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + address = setup['address'] + payload = setup['payload'] + + expected_link = '{}/blocks?head={}&start&limit=0&id={}'.format(address,\ + expected_head, expected_id) + params={'id': expected_id} + try: - response = get_blocks(id=expected_id) - except: - LOGGER.info("Rest Api is not reachable") - + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] - assert response['head'] == expected_head, "request is not correct" - assert response['paging']['start'] == None , "request is not correct" - assert response['paging']['limit'] == None , "request is not correct" + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) - def test_api_get_block_list_bad_id(self, setup): + async def test_api_get_block_list_bad_id(self, setup): """Tests that GET /blocks is unreachable with bad id parameter """ LOGGER.info("Starting test for blocks with bad id parameter") - bad_id = 'f' - + address = setup['address'] + params={'head': BAD_ID} + try: - batch_list = get_blocks(head_id=bad_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_valid_error(response, INVALID_RESOURCE_ID) - def test_api_get_block_list_head_and_id(self, setup): - """Tests GET /blocks is reachable with head and id as parameters - """ - LOGGER.info("Starting test for blocks with head and id parameter") - block_ids = setup['block_ids'] - expected_head = setup['expected_head'] - expected_id = block_ids[0] - - - response = get_blocks(head_id=expected_head , id=expected_id) - - assert response['head'] == expected_head , "head is not matching" - assert response['paging']['start'] == None , "start parameter is not correct" - assert response['paging']['limit'] == None , "request is not correct" - assert bool(response['data']) == True - + - def test_api_get_paginated_block_list(self, setup): + async def test_api_get_paginated_block_list(self, setup): """Tests GET /blocks is reachable using paging parameters """ LOGGER.info("Starting test for blocks with paging parameters") + address = setup['address'] block_ids = setup['block_ids'] expected_head = setup['expected_head'] expected_id = block_ids[0] - start = 1 - limit = 1 + params={'limit':1, 'start':1} + try: - response = get_blocks(start=start , limit=limit, id=expected_id) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_block_list_start_id(self, setup): - """Tests GET /blocks is reachable using paging parameters + async def test_api_get_block_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters """ - LOGGER.info("Starting test for blocks with paging parameters") - block_ids = setup['block_ids'] + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] expected_head = setup['expected_head'] - expected_id = block_ids[0] - start = 1 - limit = 1 - + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + payload = setup['payload'] + params={'limit':1} + try: - response = get_blocks(start=start , limit=limit, id=expected_id) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - self.assert_valid_error(response, INVALID_PAGING_QUERY) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) + - def test_api_get_block_list_invalid_start(self, setup): + async def test_api_get_block_list_invalid_start(self, setup): """Tests that GET /blocks is unreachable with invalid start parameter """ - LOGGER.info("Starting test for batch with invalid start parameter") + LOGGER.info("Starting test for block with invalid start parameter") + address = setup['address'] block_ids = setup['block_ids'] expected_head = setup['expected_head'] expected_id = block_ids[0] - start = -1 - - try: - response = get_blocks(start=start) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - + params={'start':-1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_block_list_limit(self, setup): - """Tests that GET /blocks is unreachable with invalid start parameter - """ - LOGGER.info("Starting test for batch with invalid start parameter") - block_ids = setup['block_ids'] - expected_head = setup['expected_head'] - expected_id = block_ids[0] - start = -1 - - try: - response = get_blocks(start=start) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_block_list_invalid_limit(self, setup): + async def test_api_get_block_list_invalid_limit(self, setup): """Tests that GET /blocks is unreachable with bad limit parameter """ - LOGGER.info("Starting test for batch with bad limit parameter") + LOGGER.info("Starting test for block with bad limit parameter") + address = setup['address'] block_ids = setup['block_ids'] expected_head = setup['expected_head'] expected_id = block_ids[0] - limit = 0 - - try: - response = get_blocks(limit=limit) - except urllib.error.HTTPError as error: - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - + params={'limit':0} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + self.assert_valid_error(response, INVALID_COUNT_QUERY) - def test_api_get_block_list_reversed(self, setup): + async def test_api_get_block_list_reversed(self, setup): """verifies that GET /blocks is unreachable with bad head parameter """ - LOGGER.info("Starting test for batch with bad head parameter") + LOGGER.info("Starting test for blocks with reversed list") + address = setup['address'] block_ids = setup['block_ids'] expected_head = setup['expected_head'] expected_id = block_ids[0] - reverse = True + params = 'reverse' + try: - response = get_blocks(reverse=reverse) - except urllib.error.HTTPError as error: - assert response.code == 400 + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") assert response['head'] == expected_head , "request is not correct" assert response['paging']['start'] == None , "request is not correct" assert response['paging']['limit'] == None , "request is not correct" assert bool(response['data']) == True - def test_api_get_block_link_val(self, setup): + async def test_api_get_block_link_val(self, setup): """Tests/ validate the block parameters with blocks, head, start and limit """ + address = setup['address'] try: - block_list = get_blocks() - for link in block_list: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for link in response: if(link == 'link'): - assert 'head' in block_list['link'] - assert 'start' in block_list['link'] - assert 'limit' in block_list['link'] - assert 'blocks' in block_list['link'] - except urllib.error.HTTPError as error: - assert response.code == 400 + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'blocks' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) LOGGER.info("Link is not proper for state and parameters are missing") - def test_api_get_block_key_params(self, setup): + async def test_api_get_block_key_params(self, setup): """Tests/ validate the block key parameters with data, head, link and paging """ - response = get_blocks() + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + assert 'link' in response assert 'data' in response assert 'paging' in response assert 'head' in response - def test_api_get_each_batch_id_length(self, setup): + async def test_api_get_each_block_batch_id_length(self, setup): """Tests the each batch id length should be 128 hex character long - """ + """ + address = setup['address'] try: - block_list = get_blocks() - for batch in block_list['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for batch in response['data']: expected_head = batch['header']['batch_ids'][0] head_len = len(expected_head) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Batch id length is not 128 hex character long") assert head_len == HEAD_LENGTH - def test_api_get_first_block_id_length(self, setup): + async def test_api_get_first_block_id_length(self, setup): """Tests the first block id length should be 128 hex character long """ + address = setup['address'] try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + for block_list in get_blocks(): batch_list = get_batches() for block in batch_list: expected_head = batch_list['head'] head_len = len(expected_head) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Block id length is not 128 hex character long") assert head_len == HEAD_LENGTH - def test_rest_api_check_post_max_batches(self, setup): + async def test_rest_api_check_post_max_batches(self, setup): """Tests that allow max post batches in block Handled max 100 batches post in block and handle for extra batch """ - block_list = get_blocks()['data'] - for batchcount, _ in enumerate(block_list, start=1): - if batchcount == MAX_BATCH_IN_BLOCK: - print("Max 100 Batches are present in Block") + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + for batchcount, _ in enumerate(block_list, start=1): + if batchcount == MAX_BATCH_IN_BLOCK: + print("Max 100 Batches are present in Block") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_rest_api_check_head_signature(self, setup): + async def test_rest_api_check_head_signature(self, setup): """Tests that head signature of each batch of the block should be not none """ - block_list = get_blocks()['data'] - head_signature = [block['batches'][0]['header_signature'] for block in block_list] - for i, _ in enumerate(block_list): - head_sig = json.dumps(head_signature[i]).encode('utf8') - assert head_signature[i] is not None, "Head signature is available for all batches in block" + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + head_signature = [block['batches'][0]['header_signature'] for block in block_list] + for i, _ in enumerate(block_list): + head_sig = json.dumps(head_signature[i]).encode('utf8') + assert head_signature[i] is not None, "Head signature is available for all batches in block" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_rest_api_check_family_version(self, setup): + async def test_rest_api_check_family_version(self, setup): """Test batch transaction family version should be present for each transaction header """ - block_list = get_blocks()['data'] - family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] - for i, _ in enumerate(block_list): - assert family_version[i] is not None, "family version present for all batches in block" + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] + for i, _ in enumerate(block_list): + assert family_version[i] is not None, "family version present for all batches in block" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_rest_api_check_input_output_content(self,setup): + async def test_rest_api_check_input_output_content(self,setup): """Test batch input and output content should be same for each batch and unique from other """ - block_list = get_blocks()['data'] - txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] - txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] - if(txn_input == txn_output): - return True - def test_rest_api_check_signer_public_key(self, setup): + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] + txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] + if(txn_input == txn_output): + return True + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_signer_public_key(self, setup): """Tests that signer public key is calculated for a block properly """ - block_list = get_blocks()['data'] - signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] - assert signer_public_key is not None, "signer public key is available" + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] + assert signer_public_key is not None, "signer public key is available" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_rest_api_check_blocks_count(self, setup): + async def test_rest_api_check_blocks_count(self, setup): """Tests blocks count from block list """ + address = setup['address'] count =0 try: - block_list = get_blocks() - for block in enumerate(block_list['data']): + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for block in enumerate(response['data']): count = count+1 - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("BLock count not able to collect") - def test_rest_api_blk_content_head_signature(self, setup): + async def test_rest_api_blk_content_head_signature(self, setup): """Tests that head signature of each batch of the block should be not none """ + address = setup['address'] try: - block_list = get_blocks() - for batch in block_list['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for batch in response['data']: batch_list = get_batches() - for block in batch_list: + for batch in batch_list: transaction_list = get_transactions() for trans in transaction_list['data']: head_signature = trans['header_signature'] - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Header signature is missing in some of the batches") assert head_signature is not None, "Head signature is available for all batches in block" class TestBlockGet(RestApiBaseTest): - def test_api_get_block_id(self, setup): + async def test_api_get_block_id(self, setup): """Tests that GET /blocks/{block_id} is reachable """ LOGGER.info("Starting test for blocks/{block_id}") + signer_key = setup['signer_key'] expected_head = setup['expected_head'] - expected_block_id = setup['block_ids'][0] + expected_id = setup['block_ids'][0] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + address = setup['address'] + expected_link = '{}/blocks/{}'.format(address, expected_id) try: - response = get_block_id(block_id=expected_block_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks/{}'.format(address,expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) - def test_api_get_bad_block_id(self, setup): + async def test_api_get_bad_block_id(self, setup): """Tests that GET /blocks/{bad_block_id} is not reachable with bad id """ LOGGER.info("Starting test for blocks/{bad_block_id}") + address = setup['address'] try: - response = get_block_id(block_id=BAD_ID) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - def test_api_blk_debug_flag_set_proper(self, setup): - """Tests that block debug flag should set proper - for true and false value - """ - try: - block_list = get_blocks() - for batch in block_list['data']: - batch_list = get_batches() - for trans in batch_list['data']: - trace = trans['trace'] - except urllib.error.HTTPError as error: - LOGGER.info("Debug flag is not set for tracing") - assert trace is not None, "Debug flag is set for tracing" - - def test_api_blk_payload_present_unique(self, setup): - """Tests that block payload is should be present - and unique for each batch in the block - """ - prev_line = '' - try: - with open ('payload.txt', 'w') as f: - block_list = get_blocks() - for batch in block_list['data']: - batch_list = get_batches() - for block in batch_list: - transaction_list = get_transactions() - for trans in transaction_list['data']: - payload = trans['payload'] - f.write(payload) - with open('payload.txt', 'r') as f: - payloads = f.readlines() - for payload in payloads: - if prev_line < payload: - LOGGER.info("Payload is unique in each transaction") - prev_line = payload - except urllib.error.HTTPError as error: - LOGGER.info("Payload is missing in some of the transactions") - assert payload is not None, "Payload is unique and available for all transactions in batch" - - + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks/{}'.format(address,BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) From 5c14ef1cbb4c454deb9dd52d0532bd87f6b74847 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:54:18 +0530 Subject: [PATCH 33/64] Update test_rest_api_get_peers.py --- .../tests/api_test/get/test_rest_api_get_peers.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_peers.py b/rest_api/tests/api_test/get/test_rest_api_get_peers.py index 55fd908aaf..826e1c37dd 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_peers.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_peers.py @@ -18,6 +18,7 @@ import json import urllib.request import urllib.error +import aiohttp from utils import get_peers @@ -33,16 +34,18 @@ class TestPeerList(RestApiBaseTest): """This class tests the peer list with different parameters """ - def test_api_get_peer_list(self, setup): + async def test_api_get_peer_list(self, setup): """Tests the peer list """ address = setup['address'] expected_link = '{}/peers'.format(address) - try: - response = get_peers() - except urllib.error.HTTPError as error: + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/peers'.format(address), raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") self.assert_valid_link(response, expected_link) - \ No newline at end of file + From c8aca0782a821708b693104f1a57d85c16c8a038 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:54:41 +0530 Subject: [PATCH 34/64] Update test_rest_api_get_receipts.py --- .../get/test_rest_api_get_receipts.py | 145 ++++++++++-------- 1 file changed, 78 insertions(+), 67 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py index 95a87f7ecf..d7ae7f7de7 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -18,121 +18,132 @@ import json import urllib.request import urllib.error +import aiohttp -from conftest import setup from utils import get_state_list, get_reciepts, post_receipts from base import RestApiBaseTest -from fixtures import setup_batch_multiple_transaction LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) -pytestmark = [pytest.mark.get , pytest.mark.receipts] +pytestmark = [pytest.mark.get , pytest.mark.receipts, pytest.mark.fifth] RECEIPT_NOT_FOUND = 80 RECEIPT_WRONG_CONTENT_TYPE = 81 RECEIPT_BODY_INVALID = 82 RECEIPT_Id_QUERYINVALID = 83 INVALID_RESOURCE_ID = 60 +TIMEOUT=5 class TestReceiptsList(RestApiBaseTest): """This class tests the receipt list with different parameters """ - def test_api_get_reciept_invalid_id(self): + async def test_api_get_reciept_invalid_id(self,setup): """Tests the reciepts after submitting invalid transaction """ + address = setup['address'] transaction_id="s" - try: - response = get_reciepts(transaction_id) - except urllib.error.HTTPError as error: + params={'id':transaction_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - assert response['error']['code'] == INVALID_RESOURCE_ID - assert response['error']['title'] == 'Invalid Resource Id' - ''' - def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_reciepts_multiple_transactions(self, setup): """Test the get reciepts for multiple transaction. """ transaction_list="" - li=setup_batch_multiple_transaction - for txn in li: + expected_txns = setup['expected_txns'] + address = setup['address'] + print(expected_txns) + + for txn in expected_txns: transaction_list=txn+","+transaction_list trans_list = str(transaction_list)[:-1] + params={'id':trans_list} + try: - response = get_reciepts(trans_list) - except urllib.error.HTTPError as error: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) + - for res,txn in zip(response['data'],reversed(li)): + for res,txn in zip(response['data'],reversed(expected_txns)): assert str(res['id']) == txn - ''' - def test_api_get_reciepts_single_transactions(self,setup): + + async def test_api_get_reciepts_single_transactions(self,setup): """Tests get reciepts response for single transaction""" expected_transaction=setup['expected_txns'] - + address = setup['address'] transaction_id=str(expected_transaction)[2:-2] - try: - response = get_reciepts(transaction_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - assert response['error']['code'] == RECEIPT_NOT_FOUND - assert response['error']['title'] == 'Invalid Resource Id' + params={'id':transaction_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") - def test_api_post_reciepts_single_transactions(self,setup): + async def test_api_post_reciepts_single_transactions(self,setup): """Test post reciepts response for single transaction""" expected_transaction=setup['expected_txns'] - - transaction_json=json.dumps(expected_transaction).encode() - try: - response = post_receipts(transaction_json) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - assert response['error']['code'] == INVALID_RESOURCE_ID - assert response['error']['title'] == 'Invalid Resource Id' - - def test_api_post_reciepts_invalid_transactions(self): + address = setup['address'] + transaction_json=json.dumps(expected_transaction).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=transaction_json,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_post_reciepts_invalid_transactions(self,setup): """test reciepts post for invalid transaction""" expected_transaction="few" + address = setup['address'] transaction_json=json.dumps(expected_transaction).encode() - try: - response = post_receipts(transaction_json) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - assert response['error']['code'] == RECEIPT_BODY_INVALID - assert response['error']['title'] == 'Bad Receipts Request' - ''' - def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=transaction_json,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_api_post_reciepts_multiple_transactions(self, setup): """Test the post reciepts response for multiple transaction. """ - - transaction_list=setup_batch_multiple_transaction - - json_list=json.dumps(transaction_list).encode() + address = setup['address'] + expected_txns = setup['expected_txns'] + json_list=json.dumps(expected_txns).encode() + headers = {'content-type': 'application/json'} try: - response= post_receipts(json_list) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") - response = json.loads(error.fp.read().decode('utf-8')) + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=json_list,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - for res,txn in zip(response['data'], transaction_list): + for res,txn in zip(response['data'], expected_txns): assert str(res['id']) == txn - ''' From 8d94f9c855bfb31514c955a8c7355d1c4cb6aadb Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:55:15 +0530 Subject: [PATCH 35/64] Update test_rest_api_get_state.py --- .../api_test/get/test_rest_api_get_state.py | 547 ++++++++++-------- 1 file changed, 318 insertions(+), 229 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index 814118d719..516de086e5 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -18,7 +18,10 @@ import json import urllib.request import urllib.error - +import aiohttp +import asyncio + + from utils import get_state_list, get_state_address from fixtures import invalid_batch @@ -29,7 +32,8 @@ LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) -pytestmark = [pytest.mark.get, pytest.mark.state] +pytestmark = [pytest.mark.get, pytest.mark.state, pytest.mark.third] + START = 1 LIMIT = 1 @@ -47,365 +51,421 @@ STATE_NOT_FOUND = 75 INVALID_STATE_ADDRESS = 62 HEAD_LENGTH = 128 +TIMEOUT=5 class TestStateList(RestApiBaseTest): """This class tests the state list with different parameters """ - ''' - def test_api_get_state_list(self, setup): + async def test_api_get_state_list(self, setup): """Tests the state list by submitting intkey batches """ + address = setup['address'] signer_key = setup['signer_key'] expected_head = setup['expected_head'] - expected_batches = setup['expected_batches'] - expected_txns = setup['expected_txns'] - - try: - response = get_state_list() - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") - - state_list = response['data'][:-1] - - self.assert_valid_head(response , expected_head) - ''' - def test_api_get_state_list_invalid_batch(self, invalid_batch): - """Tests that transactions are submitted and committed for - each block that are created by submitting invalid intkey batches - """ - batches = invalid_batch['expected_batches'] + expected_address = setup['state_address'][0] + expected_link = "{}/state?head={}&start={}&limit=100".format(address, expected_head,\ + expected_address) try: - response = get_state_list() + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), raise_for_status=True) as data: + response = await data.json() except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + LOGGER.info("Rest Api is Unreachable") - def test_api_get_state_list_head(self, setup): + + state_list = response['data'][::-1] + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_state_list_head(self, setup): """Tests that GET /state is reachable with head parameter """ LOGGER.info("Starting test for state with head parameter") + + address = setup['address'] + signer_key = setup['signer_key'] expected_head = setup['expected_head'] - + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_head = setup['expected_head'] + state_address = setup['state_address'][0] + expected_link = "{}/state?head={}&start={}&limit=100".format(address, expected_head,\ + state_address) + params={'head': expected_head} + + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address),params=params) as data: + response = await data.json() + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_state_list_invalid_batch(self, invalid_batch): + """Tests that state is not updated for when + submitting invalid intkey batches + """ + address = invalid_batch['address'] + batches = invalid_batch['expected_batches'] try: - response = get_state_list(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - assert response['head'] == expected_head , "request is not correct" - - def test_api_get_state_list_bad_head(self, setup): + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + async def test_api_get_state_list_bad_head(self, setup): """Tests that GET /state is unreachable with bad head parameter - """ + """ + address = setup['address'] LOGGER.info("Starting test for state with bad head parameter") bad_head = 'f' + params={'head': BAD_HEAD} + try: - batch_list = get_state_list(head_id=bad_head) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data , INVALID_RESOURCE_ID) + self.assert_valid_error(response, INVALID_RESOURCE_ID) - def test_api_get_state_list_address(self, setup): + async def test_api_get_state_list_address(self, setup): """Tests that GET /state is reachable with address parameter """ + address = setup['address'] LOGGER.info("Starting test for state with address parameter") expected_head = setup['expected_head'] - address = setup['state_address'][0] - + state_address = setup['state_address'][0] + params = {'address': state_address} + try: - response = get_state_list(address=address) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - assert response['head'] == expected_head , "request is not correct" - ''' - def test_api_get_state_list_bad_address(self, setup): + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_head(response , expected_head) + + async def test_api_get_state_list_bad_address(self, setup): """Tests that GET /state is unreachable with bad address parameter - """ + """ + address = setup['address'] LOGGER.info("Starting test for state with bad address parameter") - bad_address = 'f' + params = {'address': BAD_ADDRESS} try: - batch_list = get_state_list(address=bad_address) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data , INVALID_RESOURCE_ID) - ''' - def test_api_get_paginated_state_list(self, setup): + self.assert_valid_error(response , INVALID_STATE_ADDRESS) + + async def test_api_get_paginated_state_list(self, setup): """Tests GET /state is reachbale using paging parameters """ + address = setup['address'] LOGGER.info("Starting test for state with paging parameters") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - start = 1 - limit = 1 + params={'limit':1, 'start':1} + try: - response = get_state_list(start=start , limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data , INVALID_PAGING_QUERY) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_paginated_state_list_limit(self, setup): + async def test_api_get_paginated_state_list_limit(self, setup): """Tests GET /state is reachbale using paging parameters """ + address = setup['address'] LOGGER.info("Starting test for state with paging parameters") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - limit = 1 + params={'limit':1} try: - response = get_state_list(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") - def test_api_get_paginated_state_list_start(self, setup): + async def test_api_get_paginated_state_list_start(self, setup): """Tests GET /state is reachbale using paging parameters """ + address = setup['address'] LOGGER.info("Starting test for state with paging parameters") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - limit = 1 + params={'limit':1} try: - response = get_state_list(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") - def test_api_get_state_list_bad_paging(self, setup): + async def test_api_get_state_list_bad_paging(self, setup): """Tests GET /state is reachbale using bad paging parameters """ + address = setup['address'] LOGGER.info("Starting test for state with bad paging parameters") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] - expected_id = batch_ids[0] - start = -1 - limit = -1 + expected_id = batch_ids[0] + params = {'start':-1 , 'limit':-1} try: - response = get_state_list(start=start , limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data , INVALID_COUNT_QUERY) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) - def test_api_get_state_list_invalid_start(self, setup): + async def test_api_get_state_list_invalid_start(self, setup): """Tests that GET /state is unreachable with invalid start parameter """ + address = setup['address'] LOGGER.info("Starting test for state with invalid start parameter") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - start = -1 - - try: - response = get_state_list(start=start) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data , INVALID_PAGING_QUERY) + params = {'start':-1 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_state_list_invalid_limit(self, setup): + async def test_api_get_state_list_invalid_limit(self, setup): """Tests that GET /state is unreachable with bad limit parameter """ + address = setup['address'] LOGGER.info("Starting test for state with bad limit parameter") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - limit = 0 + params = {'limit': 0 } - try: - response = get_state_list(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data , INVALID_COUNT_QUERY) + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) - def test_api_get_state_list_reversed(self, setup): + async def test_api_get_state_list_reversed(self, setup): """verifies that GET /state is unreachable with bad head parameter """ + address = setup['address'] LOGGER.info("Starting test for state with bad head parameter") batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] - reverse = True + params = 'reverse' try: - response = get_state_list(reverse=reverse) - except urllib.error.HTTPError as error: - assert response.code == 400 + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") assert response['paging']['start'] == None , "request is not correct" assert response['paging']['limit'] == None , "request is not correct" assert bool(response['data']) == True - def test_api_get_state_data_address_prefix_namespace(self, setup): + async def test_api_get_state_data_address_prefix_namespace(self, setup): """Tests the state data address with 6 hex characters long namespace prefix """ + address = setup['address'] try: - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using namespace prefix namespace = state['address'][:6] res=get_state_list(address=namespace) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Not able to access related state address using namespace prefix") - def test_api_get_state_data_head_wildcard_character(self, setup): + async def test_api_get_state_data_head_wildcard_character(self, setup): """Tests the state head with wildcard_character ***STL-1345*** - """ - pass -# try: -# for _ in get_state_list()['data']: -# expected_head = setup['expected_head'][:6] -# addressList = list(expected_head) -# addressList[2]='?' -# expected_head = ''.join(addressList) -# print("\nVALUE is: ", expected_head) -# res=get_state_list(head_id=expected_head) -# except urllib.error.HTTPError as error: -# LOGGER.info("Not able to access ") -# data = json.loads(error.fp.read().decode('utf-8')) -# if data: -# LOGGER.info(data['error']['title']) -# LOGGER.info(data['error']['message']) -# assert data['error']['code'] == 60 -# assert data['error']['title'] == 'Invalid Resource Id' + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: + expected_head = setup['expected_head'][:6] + addressList = list(expected_head) + addressList[2]='?' + expected_head = ''.join(addressList) + print("\nVALUE is: ", expected_head) + res=get_state_list(head_id=expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_api_get_state_data_head_partial_character(self, setup): + async def test_api_get_state_data_head_partial_character(self, setup): """Tests the state head with partial head address ***STL-1345*** - """ + """ + address = setup['address'] try: - for _ in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: expected_head = setup['expected_head'][:6] res=get_state_list(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Not able to access ") - data = json.loads(error.fp.read().decode('utf-8')) - if data: - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - assert data['error']['code'] == 60 - assert data['error']['title'] == 'Invalid Resource Id' + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_api_get_state_data_address_partial_character(self, setup): + async def test_api_get_state_data_address_partial_character(self, setup): """Tests the state address with partial head address ***STL-1346*** - """ + """ + address = setup['address'] try: - for _ in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: expected_head = setup['expected_head'][:6] res=get_state_list(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Not able to access ") - data = json.loads(error.fp.read().decode('utf-8')) - if data: - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - assert data['error']['code'] == 62 - assert data['error']['title'] == 'Invalid State Address' + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - def test_api_get_state_data_address_length(self, setup): + async def test_api_get_state_data_address_length(self, setup): """Tests the state data address length is 70 hex character long with proper prefix namespace - """ + """ + address = setup['address'] try: - response = get_state_list() - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using of state address = len(response['data'][0]['address']) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("State address is not 70 character long") assert address == STATE_ADDRESS_LENGTH - def test_api_get_state_data_address_with_odd_hex_value(self, setup): + async def test_api_get_state_data_address_with_odd_hex_value(self, setup): """Tests the state data address fail with odd hex character address """ + address = setup['address'] try: - response = get_state_list() - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using of state address = len(response['data'][0]['address']) if(address%2 == 0): pass - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Odd state address is not correct") - def test_api_get_state_data_address_with_reduced_length(self, setup): + async def test_api_get_state_data_address_with_reduced_length(self, setup): """Tests the state data address with reduced even length hex character long - """ + """ + address = setup['address'] try: - response = get_state_list() - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using of state address = response['data'][0]['address'] nhex = address[:-4] get_state_list(address = nhex) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Reduced length data address failed to processed") - def test_api_get_state_data_address_64_Hex(self, setup): + async def test_api_get_state_data_address_64_Hex(self, setup): """Tests the state data address with 64 hex give empty data - """ + """ + address = setup['address'] try: - response = get_state_list() - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using of state address = response['data'][0]['address'] nhex = address[6:70] naddress = get_state_list(address = nhex) assert naddress['data'] == [] - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("state data address with 64 hex characters not processed ") - def test_api_get_state_data_address_alter_bytes(self, setup): + async def test_api_get_state_data_address_alter_bytes(self, setup): """Tests the state data address with alter bytes give empty data - """ + """ + address = setup['address'] try: - response = get_state_list() - for state in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: #Access each address using of state address = response['data'][0]['address'] nhex = address[6:8] @@ -413,77 +473,106 @@ def test_api_get_state_data_address_alter_bytes(self, setup): addressList = list(naddress) addressList[2]='z' naddress = ''.join(addressList) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("state data address with altered bytes not processed ") - def test_api_get_state_link_val(self, setup): + async def test_api_get_state_link_val(self, setup): """Tests/ validate the state parameters with state, head, start and limit """ + address = setup['address'] try: - state_list = get_state_list() - for link in state_list: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for link in response: if(link == 'link'): - assert 'head' in state_list['link'] - assert 'start' in state_list['link'] - assert 'limit' in state_list['link'] - assert 'state' in state_list['link'] - except urllib.error.HTTPError as error: - assert response.code == 400 + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'state' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) LOGGER.info("Link is not proper for state and parameters are missing") - def test_api_get_state_key_params(self, setup): + async def test_api_get_state_key_params(self, setup): """Tests/ validate the state key parameters with data, head, link and paging """ - response = get_state_list() + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + assert 'link' in response assert 'data' in response assert 'paging' in response assert 'head' in response - def test_api_get_each_state_head_length(self, setup): + async def test_api_get_each_state_head_length(self, setup): """Tests the each state head length should be 128 hex character long - """ + """ + address = setup['address'] try: - for _ in get_state_list()['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: expected_head = setup['expected_head'] head_len = len(expected_head) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("State Head length is not 128 hex character long") assert head_len == HEAD_LENGTH - def test_rest_api_check_state_count(self, setup): + async def test_rest_api_check_state_count(self, setup): """Tests state count from state list """ + address = setup['address'] count = 0 try: - state_list = get_state_list()['data'] + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] for batch in enumerate(state_list): count = count+1 - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("State count not able to collect") class TestStateGet(RestApiBaseTest): - def test_api_get_state_address(self, setup): + async def test_api_get_state_address(self, setup): """Tests/ validate the state key parameters with data, head, link and paging """ - address = setup['state_address'][0] + address = setup['address'] + state_address = setup['state_address'][0] try: - response = get_state_address(address=address) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state/{}'.format(address,state_address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") - def test_api_get_bad_address(self, setup): + async def test_api_get_bad_address(self, setup): """Tests /state/{bad_state_address} """ + address = setup['address'] + LOGGER.info("Starting test for state/{bad_address}") try: - response = get_state_address(address=BAD_ADDRESS) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state/{}'.format(address,BAD_ADDRESS)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_STATE_ADDRESS) + self.assert_valid_error(response, INVALID_STATE_ADDRESS) From afa991cd3be48dc17dd2b0258fcdc1c0d4602546 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 12:55:46 +0530 Subject: [PATCH 36/64] Update test_rest_api_get_transaction.py --- .../get/test_rest_api_get_transaction.py | 423 +++++++++++------- 1 file changed, 250 insertions(+), 173 deletions(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py index 32568d5007..aea78be0e5 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ - + import pytest import logging import json import urllib.request import urllib.error +import aiohttp from fixtures import break_genesis @@ -25,7 +26,8 @@ from base import RestApiBaseTest -pytestmark = [pytest.mark.get , pytest.mark.transactions] +pytestmark = [pytest.mark.get , pytest.mark.transactions, pytest.mark.first] + LOGGER = logging.getLogger(__name__) @@ -44,40 +46,47 @@ VALIDATOR_NOT_READY = 15 TRANSACTION_NOT_FOUND = 72 HEAD_LENGTH = 128 +TIMEOUT=5 class TestTransactionList(RestApiBaseTest): - def test_api_get_transaction_list(self, setup): + async def test_api_get_transaction_list(self, setup): """Tests the transaction list after submitting intkey batches """ signer_key = setup['signer_key'] expected_head = setup['expected_head'] expected_txns = setup['expected_txns'] expected_length = setup['expected_trn_length'] - payload = setup['payload'][0] + payload = setup['payload'] address = setup['address'] - start = expected_txns[::-1][0] + start = setup['start'] + limit = setup['limit'] + start = expected_txns[0] expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ - expected_head, start, LIMIT) + expected_head, start, limit) + + paging_link = '{}/transactions?head={}&start={}'.format(address,\ + expected_head, start) - try: - response = get_transactions() - except urllib.error.HTTPError as error: + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) txns = response['data'][:-1] -# self.assert_check_transaction_seq(txns, expected_txns, -# payload, signer_key) -# self.assert_valid_head(response , expected_head) -# self.assert_valid_paging(response) + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) - def test_api_get_transaction_list_head(self, setup): + async def test_api_get_transaction_list_head(self, setup): """Tests that GET /transactions is reachable with head parameter """ LOGGER.info("Starting test for transactions with head parameter") @@ -87,41 +96,47 @@ def test_api_get_transaction_list_head(self, setup): expected_length = setup['expected_trn_length'] payload = setup['payload'][0] address = setup['address'] - start = expected_txns[::-1][0] + start = expected_txns[0] + limit = setup['limit'] expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ - expected_head, start, LIMIT) + expected_head, start, limit) + params={'head': expected_head} + try: - response = get_transactions(head_id=expected_head) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") txns = response['data'][:-1] self.assert_check_transaction_seq(txns, expected_txns, payload, signer_key) self.assert_valid_head(response , expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) - def test_api_get_transaction_list_bad_head(self, setup): + async def test_api_get_transaction_list_bad_head(self, setup): """Tests that GET /transactions is unreachable with bad head parameter """ LOGGER.info("Starting test for transactions with bad head parameter") + address = setup['address'] + params={'head': BAD_HEAD} try: - response = get_transactions(head_id=BAD_HEAD) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_RESOURCE_ID) + self.assert_valid_error(response, INVALID_RESOURCE_ID) - def test_api_get_transaction_list_id(self, setup): + async def test_api_get_transaction_list_id(self, setup): """Tests that GET /transactions is reachable with id as parameter """ LOGGER.info("Starting test for transactions with id parameter") @@ -132,18 +147,24 @@ def test_api_get_transaction_list_id(self, setup): expected_length = setup['expected_trn_length'] payload = setup['payload'][0] address = setup['address'] - start = expected_txns[::-1][0] + start = expected_txns[0] transaction_ids = setup['transaction_ids'] expected_id = transaction_ids[0] expected_length = len([expected_id]) + limit = setup['limit'] expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ - expected_head, start, LIMIT, expected_id) + expected_head, start, limit, expected_id) + params={'id': expected_id} + try: - response = get_transactions(id=expected_id) - except: - LOGGER.info("Rest Api is not reachable") + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") txns = response['data'][:-1] @@ -151,21 +172,23 @@ def test_api_get_transaction_list_id(self, setup): self.assert_check_transaction_seq(txns, expected_txns, payload, signer_key) - def test_api_get_transaction_list_bad_id(self, setup): + async def test_api_get_transaction_list_bad_id(self, setup): """Tests that GET /transactions is unreachable with bad id parameter """ LOGGER.info("Starting test for transactions with bad id parameter") - bad_id = 'f' - + address = setup['address'] + params={'head': BAD_ID} + try: - response = get_transactions(head_id=bad_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_RESOURCE_ID) + self.assert_valid_error(response, INVALID_RESOURCE_ID) - def test_api_get_transaction_list_head_and_id(self, setup): + async def test_api_get_transaction_list_head_and_id(self, setup): """Tests GET /transactions is reachable with head and id as parameters """ LOGGER.info("Starting test for transactions with head and id parameter") @@ -176,18 +199,24 @@ def test_api_get_transaction_list_head_and_id(self, setup): expected_length = setup['expected_trn_length'] payload = setup['payload'][0] address = setup['address'] - start = expected_txns[::-1][0] + start = expected_txns[0] transaction_ids = setup['transaction_ids'] expected_id = transaction_ids[0] expected_length = len([expected_id]) + limit = setup['limit'] expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ - expected_head, start, LIMIT, expected_id) + expected_head, start, limit, expected_id) - try: - response = get_transactions(head_id=expected_head , id=expected_id) - except: - LOGGER.info("Rest Api not reachable") + params={'head':expected_head,'id':expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") txns = response['data'][:-1] @@ -196,207 +225,255 @@ def test_api_get_transaction_list_head_and_id(self, setup): payload, signer_key) self.assert_valid_head(response , expected_head) - def test_api_get_paginated_transaction_list(self, setup): - """Tests GET /transactions is reachbale using paging parameters + async def test_api_get_paginated_transaction_list(self, setup): + """Tests GET /transactions is reachable using paging parameters """ LOGGER.info("Starting test for transactions with paging parameters") + address = setup['address'] batch_ids = setup['batch_ids'] expected_head = setup['expected_head'] expected_id = batch_ids[0] start = 1 limit = 1 + params={'limit':1, 'start':1} + try: - response = get_transactions(start=start , limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) - - self.assert_valid_error(data, INVALID_PAGING_QUERY) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_transaction_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + + params={'limit':1} + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, 1) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) - def test_api_get_transaction_bad_paging(self, setup): + async def test_api_get_transaction_bad_paging(self, setup): """Tests GET /transactions is reachbale using bad paging parameters """ LOGGER.info("Starting test for transactions with bad paging parameters") - batch_ids = setup['batch_ids'] - expected_head = setup['expected_head'] - expected_id = batch_ids[0] - start = -1 - limit = -1 + address = setup['address'] + params = {'start':-1 , 'limit':-1} try: - response = get_transactions(start=start , limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_COUNT_QUERY) + self.assert_valid_error(response, INVALID_COUNT_QUERY) - def test_api_get_transaction_list_invalid_start(self, setup): + async def test_api_get_transaction_list_invalid_start(self, setup): """Tests that GET /transactions is unreachable with invalid start parameter """ - LOGGER.info("Starting test for transactions with invalid start parameter") - batch_ids = setup['batch_ids'] - expected_head = setup['expected_head'] - expected_id = batch_ids[0] - start = -1 - - try: - response = get_transactions(start=start) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + LOGGER.info("Starting test for transactions with invalid start parameter") + address = setup['address'] + params = {'start':-1 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_PAGING_QUERY) + self.assert_valid_error(response, INVALID_PAGING_QUERY) - def test_api_get_transaction_list_invalid_limit(self, setup): + async def test_api_get_transaction_list_invalid_limit(self, setup): """Tests that GET /transactions is unreachable with bad limit parameter """ LOGGER.info("Starting test for transactions with bad limit parameter") - batch_ids = setup['batch_ids'] - expected_head = setup['expected_head'] - expected_id = batch_ids[0] - limit = 0 - - try: - response = get_transactions(limit=limit) - except urllib.error.HTTPError as error: - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + address = setup['address'] + params = {'limit': 0 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) - self.assert_valid_error(data, INVALID_COUNT_QUERY) + self.assert_valid_error(response, INVALID_COUNT_QUERY) - def test_api_get_transaction_list_reversed(self, setup): + async def test_api_get_transaction_list_reversed(self, setup): """verifies that GET /transactions with list reversed """ LOGGER.info("Starting test for transactions with list reversed") - batch_ids = setup['batch_ids'] + signer_key = setup['signer_key'] expected_head = setup['expected_head'] - expected_id = batch_ids[0] - reverse = True + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + params = 'reverse' try: - response = get_transactions(reverse=reverse) - except urllib.error.HTTPError as error: - assert response.code == 400 + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") assert response['paging']['start'] == None , "request is not correct" assert response['paging']['limit'] == None , "request is not correct" assert bool(response['data']) == True - def test_api_get_transactions_link_val(self, setup): + async def test_api_get_transactions_link_val(self, setup): """Tests/ validate the transactions parameters with transactions, head, start and limit """ + address = setup['address'] try: - transactions_list = get_transactions() - for link in transactions_list: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for link in response: if(link == 'link'): - assert 'head' in transactions_list['link'] - assert 'start' in transactions_list['link'] - assert 'limit' in transactions_list['link'] - assert 'transactions' in transactions_list['link'] + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'transactions' in response['link'] except urllib.error.HTTPError as error: assert response.code == 400 LOGGER.info("Link is not proper for transactions and parameters are missing") - def test_api_get_transactions_key_params(self, setup): + async def test_api_get_transactions_key_params(self, setup): """Tests/ validate the state key parameters with data, head, link and paging """ - response = get_transactions() + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + assert 'link' in response assert 'data' in response assert 'paging' in response assert 'head' in response - def test_api_get_transaction_id_length(self, setup): + async def test_api_get_transaction_id_length(self, setup): """Tests the transaction id length should be 128 hex character long - """ + """ + address = setup['address'] try: - transaction_list = get_transactions() - for trans in transaction_list['data']: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for trans in response['data']: transaction_ids = trans['header_signature'] head_len = len(transaction_ids) - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Transaction id length is not 128 hex character long") assert head_len == HEAD_LENGTH - def test_rest_api_check_transactions_count(self, setup): + async def test_rest_api_check_transactions_count(self, setup): """Tests transaction count from transaction list """ + address = setup['address'] count =0 try: - batch_list = get_transactions() - for batch in enumerate(batch_list['data']): + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for trans in enumerate(response['data']): count = count+1 - except urllib.error.HTTPError as error: + except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Transaction count not able to collect") -class TesttransactionGet(RestApiBaseTest): - def test_api_get_transaction_id(self, setup): +class TestTransactionGet(RestApiBaseTest): + async def test_api_get_transaction_id(self, setup): """Tests that GET /transactions/{transaction_id} is reachable """ LOGGER.info("Starting test for transaction/{transaction_id}") + signer_key = setup['signer_key'] expected_head = setup['expected_head'] - expected_id = setup['transaction_ids'][0] + expected_txns = setup['expected_txns'] + expected_id = expected_txns[0] address = setup['address'] + payload = setup['payload'] expected_length = 1 expected_link = '{}/transactions/{}'.format(address,expected_id) try: - response = get_transaction_id(transaction_id=expected_id) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions/{}'.format(address,expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + txns = response['data'] - self.assert_valid_link(response, expected_link) - assert bool(response['data']) == True + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_link(response, expected_link) - def test_api_get_transaction_bad_id(self, setup): + async def test_api_get_transaction_bad_id(self, setup): """Tests that GET /transactions/{transaction_id} is not reachable with bad id """ - LOGGER.info("Starting test for transactions/{transaction_id}") - try: - response = get_transaction_id(transaction_id=BAD_ID) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - self.assert_valid_error(response, INVALID_RESOURCE_ID) - - def test_api_get_transaction_signer_key(self, setup): - """Tests that GET /transactions/{transaction_id} is reachable - """ - LOGGER.info("Starting test for transaction/{transaction_id}") - expected_head = setup['expected_head'] - expected_id = setup['transaction_ids'][0] + LOGGER.info("Starting test for transactions/{bad_id}") address = setup['address'] - expected_length = 1 - - expected_link = '{}/transactions/{}'.format(address,expected_id) - try: - response = get_transactions() - for i in range(len(response['data'])): - transaction=get_transaction_id(response['data'][i]['header_signature']) - assert response['data'][i]['header']['signer_public_key'] == transaction['data']['header']['signer_public_key'] - assert bool(transaction['data']) == True - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions/{}'.format(address,BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) - - - \ No newline at end of file From 23f8d9207949bc963d35ee253618f7700dca1e56 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 13:11:05 +0530 Subject: [PATCH 37/64] Update pytest.ini --- rest_api/tests/api_test/pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini index a444254e4b..830bd0e1be 100644 --- a/rest_api/tests/api_test/pytest.ini +++ b/rest_api/tests/api_test/pytest.ini @@ -1,5 +1,5 @@ [pytest] -addopts = -s -v --json-report --json-report-file=report.json +addopts = -s -v --json-report --json-report-file=report.json -p no:warnings python_files = test_rest*.py log_cli_date_format = %Y-%m-%d %H:%M:%S log_cli_format = %(asctime)s %(levelname)s %(message)s From b8c3cd266d06581ef892938a3942d719221b0e92 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 13:42:28 +0530 Subject: [PATCH 38/64] Update utils.py --- rest_api/tests/api_test/utils.py | 170 ++++++++++++++++++++----------- 1 file changed, 108 insertions(+), 62 deletions(-) diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index 376ec92378..69a15f8585 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ - + import pytest import logging import json @@ -30,17 +30,16 @@ import hashlib import os import time -import socket -import netifaces - +import aiohttp + LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) - + WAIT = 300 -def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): if all(v is not None for v in [head_id , id]): response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) return response @@ -49,13 +48,13 @@ def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): return response if limit is not None: response = query_rest_api('/blocks?limit=%s'% limit) - return response + return response if start is not None: response = query_rest_api('/blocks?start=%s'% start) - return response + return response if head_id is not None: response = query_rest_api('/blocks?head=%s'% head_id) - return response + return response if id is not None: response = query_rest_api('/blocks?id=%s'% id) return response @@ -67,7 +66,7 @@ def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): return response -def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): +def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): if all(v is not None for v in [head_id , id]): response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) return response @@ -76,13 +75,13 @@ def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): return response if limit is not None: response = query_rest_api('/batches?limit=%s'% limit) - return response + return response if start is not None: response = query_rest_api('/batches?start=%s'% start) - return response + return response if head_id is not None: response = query_rest_api('/batches?head=%s'% head_id) - return response + return response if id is not None: response = query_rest_api('/batches?id=%s'% id) return response @@ -118,13 +117,13 @@ def get_transactions(head_id=None , id=None , start=None , limit=None , reverse= return response if limit is not None: response = query_rest_api('/transactions?limit=%s'% limit) - return response + return response if start is not None: response = query_rest_api('/transactions?start=%s'% start) - return response + return response if head_id is not None: response = query_rest_api('/transactions?head=%s'% head_id) - return response + return response if id is not None: response = query_rest_api('/transactions?id=%s'% id) return response @@ -144,13 +143,13 @@ def get_state_list(head_id=None , address=None , start=None , limit=None , rever return response if limit is not None: response = query_rest_api('/state?limit=%s'% limit) - return response + return response if start is not None: response = query_rest_api('/state?start=%s'% start) - return response + return response if head_id is not None: response = query_rest_api('/state?head=%s'% head_id) - return response + return response if address is not None: response = query_rest_api('/state?address=%s'% address) return response @@ -167,25 +166,13 @@ def get_state_address(address): def post_batch(batch, headers="None"): if headers=="True": - headers = {'Content-Type': 'application/json'} + headers = {'Content-Type': 'application/json'} else: headers = {'Content-Type': 'application/octet-stream'} - + response = query_rest_api( '/batches', data=batch, headers=headers) - - response = submit_request('{}&wait={}'.format(response['link'], WAIT)) - return response - -def post_batch_no_endpoint(batch, headers="None"): - if headers=="True": - headers = {'Content-Type': 'application/json'} - else: - headers = {'Content-Type': 'application/octet-stream'} - - response = query_rest_api( - '/', data=batch, headers=headers) - + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) return response @@ -218,13 +205,13 @@ def _get_node_chain(node_list): except: LOGGER.warning("Couldn't connect to %s REST API", node) return chain_list - + def _get_node_list(): client_address = _get_client_address() node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] node_list.append(_get_client_address()) return node_list - + def _get_peers_list(rest_client, fmt='json'): cmd_output = _run_peer_command( @@ -249,7 +236,7 @@ def _get_node_chains(node_list): except: LOGGER.warning("Couldn't connect to %s REST API", node) return chain_list - + def check_for_consensus(chains , block_num): LOGGER.info("Checking Consensus on block number %s" , block_num) blocks = [] @@ -286,16 +273,16 @@ def _make_http_address(node_number): node_number = node.replace('8800' , '8008') return node_number -def _get_client_address(): +def _get_client_address(): command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - + def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) - + def _stop_validator(): LOGGER.info('Stopping the validator') cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" @@ -310,7 +297,7 @@ def _start_settings_tp(): def _stop_settings_tp(): LOGGER.info('Stopping the settings-tp') cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" - subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) def _create_genesis(): LOGGER.info("creating the genesis data") @@ -318,15 +305,15 @@ def _create_genesis(): os.chdir("/home/aditya") cmd = "sawadm genesis config-genesis.batch" subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) - - + + def _create_genesis_batch(): LOGGER.info("creating the config genesis batch") os.chdir("/home/aditya") cmd = "sawset genesis --force" subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) - - + + def post_batch_statuses(batch): headers = {'content-type': 'application/json'} response = query_rest_api( @@ -338,7 +325,7 @@ def get_batch_statuses(batch_ids=None, wait=None): batches = ",".join(batch_ids) except: batches = None - + if batches: if wait == 'default': response = query_rest_api('/batch_statuses?wait&id={}'.format(batches)) @@ -348,11 +335,11 @@ def get_batch_statuses(batch_ids=None, wait=None): return response else: response = query_rest_api('/batch_statuses?id=%s' % batches) - return response + return response else: response = query_rest_api('/batch_statuses') return response - + def get_state_limit(limit): response = query_rest_api('/state?limit=%s' % limit) return response @@ -367,6 +354,25 @@ def post_receipts(receipts): response = query_rest_api('/receipts', data=receipts, headers=headers) return response +def state_count(): + state_list = get_state_list() + count = len(state_list['data']) + try: + next_position = state_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + state_list = get_state_list(start=next_position) + try: + next_position = state_list['paging']['next_position'] + except: + next_position = None + + count += len(state_list['data']) + return count + + def batch_count(): batch_list = get_batches() count = len(batch_list['data']) @@ -374,16 +380,17 @@ def batch_count(): next_position = batch_list['paging']['next_position'] except: next_position = None - + while(next_position): batch_list = get_batches(start=next_position) try: next_position = batch_list['paging']['next_position'] except: next_position = None - + count += len(batch_list['data']) - return count + return count + def transaction_count(): transaction_list = get_transactions() @@ -392,51 +399,56 @@ def transaction_count(): next_position = transaction_list['paging']['next_position'] except: next_position = None - + while(next_position): transaction_list = get_transactions(start=next_position) try: next_position = transaction_list['paging']['next_position'] except: next_position = None - + count += len(transaction_list['data']) - return count + return count def _create_expected_link(expected_ids): for id in expected_ids: link = '{}/batch_statuses?id={},{}'.format(address, id) return link + def _get_batch_list(response): batch_list = response['data'] - + try: next_position = response['paging']['next_position'] + print(next_position) except: next_position = None - + while(next_position): response = get_batches(start=next_position) + print(response) data_list = response['data'] try: next_position = response['paging']['next_position'] except: next_position = None - + + print(next_position) + batch_list += data_list - + return batch_list def _get_transaction_list(response): transaction_list = response['data'] - + try: next_position = response['paging']['next_position'] except: next_position = None - + while(next_position): response = get_transactions(start=next_position) data_list = response['data'] @@ -444,7 +456,41 @@ def _get_transaction_list(response): next_position = response['paging']['next_position'] except: next_position = None - + transaction_list += data_list - + return transaction_list + + +def _get_state_list(response): + state_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_state_list(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + state_list += data_list + + return state_list + + +def post_batch_no_endpoint(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response From c5476a67e881b1169029b1b05e33f9038eaccbf3 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 13:44:54 +0530 Subject: [PATCH 39/64] Update fixtures.py --- rest_api/tests/api_test/fixtures.py | 99 ++++++++++++----------------- 1 file changed, 41 insertions(+), 58 deletions(-) diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index 60088fb5e8..4a06e42956 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -1,4 +1,3 @@ - # Copyright 2018 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,13 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ + import pytest import logging import urllib import json import os -import random -import hashlib from sawtooth_signing import create_context from sawtooth_signing import CryptoFactory @@ -37,27 +35,23 @@ from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction - from google.protobuf.json_format import MessageToDict from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ get_state_list , _delete_genesis , _start_validator, \ _stop_validator , _create_genesis , _get_client_address, \ - _stop_settings_tp, _start_settings_tp, batch_count, transaction_count, get_batch_statuses + _stop_settings_tp, _start_settings_tp from payload import get_signer, create_intkey_transaction , create_batch,\ create_invalid_intkey_transaction, create_intkey_same_transaction, random_word_list, IntKeyPayload, \ make_intkey_address, Transactions + LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) -LIMIT = 100 - - -data = {} - + @pytest.fixture(scope="function") def break_genesis(request): """Setup Function for deleting the genesis data @@ -83,6 +77,7 @@ def teardown(): request.addfinalizer(teardown) + @pytest.fixture(scope="function") def invalid_batch(): """Setup method for creating invalid batches @@ -92,26 +87,26 @@ def invalid_batch(): expected_trxns = {} expected_batches = [] address = _get_client_address() - + LOGGER.info("Creating intkey transactions with set operations") - + txns = [ create_invalid_intkey_transaction("set", [] , 50 , signer), ] - + for txn in txns: dict = MessageToDict( txn, including_default_value_fields=True, preserving_proto_field_name=True) - + expected_trxns['trxn_id'] = [dict['header_signature']] - + LOGGER.info("Creating batches for transactions 1trn/batch") batches = [create_batch([txn], signer) for txn in txns] - + for batch in batches: dict = MessageToDict( batch, @@ -120,13 +115,13 @@ def invalid_batch(): batch_id = dict['header_signature'] expected_batches.append(batch_id) - + data['expected_txns'] = expected_trxns['trxn_id'][::-1] data['expected_batches'] = expected_batches[::-1] data['address'] = address post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] - + for batch in post_batch_list: try: response = post_batch(batch) @@ -135,9 +130,29 @@ def invalid_batch(): response = json.loads(error.fp.read().decode('utf-8')) LOGGER.info(response['error']['title']) LOGGER.info(response['error']['message']) - + return data + +@pytest.fixture(scope="function") +def setup_empty_trxs_batch(): + signer = get_signer() + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=[]) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=[], + header_signature=signature) + + return batch + @pytest.fixture(scope="function") def setup_valinv_txns(request): """Setup method for posting batches and returning the @@ -146,7 +161,8 @@ def setup_valinv_txns(request): Txns=Transactions(invalidtype="addr") data = Txns.get_batch_valinv_txns() return data - + + @pytest.fixture(scope="function") def setup_invval_txns(request): """Setup method for posting batches and returning the @@ -156,6 +172,7 @@ def setup_invval_txns(request): data = Txns.get_batch_invval_txns() return data + @pytest.fixture(scope="function") def setup_invalid_txns(request): """Setup method for posting batches and returning the @@ -165,6 +182,7 @@ def setup_invalid_txns(request): data = Txns.get_batch_invalid_txns() return data + @pytest.fixture(scope="function") def setup_invalid_invaddr(request): """Setup method for posting batches and returning the @@ -173,7 +191,8 @@ def setup_invalid_invaddr(request): Txns=Transactions(invalidtype="invaddr") data = Txns.get_batch_invalid_txns() return data - + + @pytest.fixture(scope="function") def setup_same_txns(request): """Setup method for posting batches and returning the @@ -218,44 +237,8 @@ def setup_invalid_txns_fn(request): Txns=Transactions(invalidtype="fn") data = Txns.get_batch_invalid_txns_fam_name() return data - -@pytest.fixture(scope="function") -def setup_batch_multiple_transaction(): - data = {} - signer = get_signer() - transactions= [] - expected_trxns = [] - expected_batches = [] - initial_state_length = len(get_state_list()) - LOGGER.info("Creating intkey transactions with set operations") - for val in range(15): - txns = create_intkey_transaction("set", [] , 50 , signer) - transactions.append(txns) - - - for txn in transactions: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - trxn_id = data['header_signature'] - expected_trxns.append(trxn_id) - - - batch_s= create_batch(transactions, signer) - post_batch_list = BatchList(batches=[batch_s]).SerializeToString() - - LOGGER.info("Submitting batches to the handlers") - - try: - response = post_batch(post_batch_list) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - data = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(data['error']['title']) - LOGGER.info(data['error']['message']) + - return expected_trxn From 7e1911cf13d76b2388daf5b1e1112f03b0ad07e5 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 15:53:46 +0530 Subject: [PATCH 40/64] Create requirements.txt --- rest_api/tests/api_test/requirements.txt | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 rest_api/tests/api_test/requirements.txt diff --git a/rest_api/tests/api_test/requirements.txt b/rest_api/tests/api_test/requirements.txt new file mode 100644 index 0000000000..830bd0e1be --- /dev/null +++ b/rest_api/tests/api_test/requirements.txt @@ -0,0 +1,10 @@ +[pytest] +addopts = -s -v --json-report --json-report-file=report.json -p no:warnings +python_files = test_rest*.py +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +log_file = pytest-logs.txt +log_file_date_format = %Y-%m-%d %H:%M:%S +log_file_format = %(asctime)s %(levelname)s %(message)s +log_format = %(asctime)s %(levelname)s %(message)s From aa6e4ff69e68bfe938b5dc0af4ede8f2151bab51 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 15:55:48 +0530 Subject: [PATCH 41/64] Update base.py --- rest_api/tests/api_test/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py index 6eb41a667e..4cbf190e54 100644 --- a/rest_api/tests/api_test/base.py +++ b/rest_api/tests/api_test/base.py @@ -88,7 +88,6 @@ def assert_trace(self, response): """Asserts whether the response has trace parameter """ assert 'trace' in response -# assert bool(response['trace']) assert response['trace'] == TRACE def assert_check_consensus(self, response): From 1aa1d739cc7402faf75ced76f575ad752c62d8c4 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Wed, 24 Oct 2018 15:56:21 +0530 Subject: [PATCH 42/64] Update conftest.py --- rest_api/tests/api_test/conftest.py | 205 +++++++++++++++------------- 1 file changed, 112 insertions(+), 93 deletions(-) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py index 1fc1edc9cd..ebf10dc847 100644 --- a/rest_api/tests/api_test/conftest.py +++ b/rest_api/tests/api_test/conftest.py @@ -1,4 +1,3 @@ - # Copyright 2018 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ + import pytest import sys import platform @@ -21,6 +21,7 @@ import urllib import json import os +import time from sawtooth_signing import create_context from sawtooth_signing import CryptoFactory @@ -44,7 +45,7 @@ get_state_list , _delete_genesis , _start_validator, \ _stop_validator , _create_genesis , _get_client_address, \ _stop_settings_tp, _start_settings_tp, _get_client_address, batch_count, transaction_count,\ - get_batch_statuses + get_batch_statuses, state_count from payload import get_signer, create_intkey_transaction , create_batch @@ -54,7 +55,115 @@ LIMIT = 100 -BATCH_SIZE = 15 +BATCH_SIZE = 1 + +def _create_transaction(): + txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] + return txns + + +def _create_batch(): + batches = [create_batch([txn], signer) for txn in txns] + return batches + +@pytest.fixture(scope="session") +def setup(request): + """Setup method for posting batches and returning the + response + """ + LOGGER.info("Starting setup method for test cases") + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = state_count() + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + if 'trxn_id' not in expected_trxns: + expected_trxns['trxn_id'] = [] + if 'payload' not in expected_trxns: + expected_trxns['payload'] =[] + + expected_trxns['trxn_id'].append(dict['header_signature']) + expected_trxns['payload'].append(dict['payload']) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + data['expected_batch_length'] = initial_batch_length + length_batches + data['expected_trn_length'] = initial_transaction_length + length_transactions + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['payload'] = expected_trxns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + + for batch in expected_batches: + response = get_batch_statuses([batch]) + status = response['data'][0]['status'] + + batch_list = get_batches() + data['batch_ids'] = [batch['header_signature'] for batch in batch_list['data']] + data['batch_list'] = batch_list + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] + data['transaction_ids'] = transaction_ids + block_list = get_blocks() + data['block_list'] = block_list + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + return data def pytest_addoption(parser): @@ -102,7 +211,6 @@ def pytest_addoption(parser): def pytest_collection_modifyitems(config, items): - """Filters tests based on markers when parameters passed through the cli """ @@ -147,92 +255,3 @@ def pytest_collection_modifyitems(config, items): selected_items = items[:num] items[:] = selected_items return items - -@pytest.fixture(scope="session", autouse=True) -def setup(request): - """Setup method for posting batches and returning the - response - """ - data = {} - signer = get_signer() - expected_trxns = {} - expected_batches = [] - transaction_list = [] - initial_state_length = len(get_state_list()) - initial_batch_length = batch_count() - initial_transaction_length = transaction_count() - address = _get_client_address() - - LOGGER.info("Creating intkey transactions with set operations") - - txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - expected_trxns['trxn_id'] = [dict['header_signature']] - expected_trxns['payload'] = [dict['payload']] - - LOGGER.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - for batch in batches: - dict = MessageToDict( - batch, - including_default_value_fields=True, - preserving_proto_field_name=True) - - batch_id = dict['header_signature'] - expected_batches.append(batch_id) - - length_batches = len(expected_batches) - length_transactions = len(expected_trxns) - - data['expected_batch_length'] = initial_batch_length + length_batches - data['expected_trn_length'] = initial_transaction_length + length_transactions - data['expected_txns'] = expected_trxns['trxn_id'][::-1] - data['payload'] = expected_trxns['payload'][::-1] - data['expected_batches'] = expected_batches[::-1] - data['signer_key'] = signer.get_public_key().as_hex() - - post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] - - LOGGER.info("Submitting batches to the handlers") - - for batch in post_batch_list: - try: - response = post_batch(batch) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - - block_list = get_blocks() - data['block_list'] = block_list - batch_list = get_batches() - data['batch_list'] = batch_list - transaction_list = get_transactions() - data['transaction_list'] = transaction_list - transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] - data['transaction_ids'] = transaction_ids - block_ids = [block['header_signature'] for block in block_list['data']] - data['block_ids'] = block_ids[:-1] - batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] - data['batch_ids'] = batch_ids - expected_head = block_ids[0] - data['expected_head'] = expected_head - state_addresses = [state['address'] for state in get_state_list()['data']] - data['state_address'] = state_addresses - state_head_list = [get_state_address(address)['head'] for address in state_addresses] - data['state_head'] = state_head_list - data['address'] = address - data['limit'] = LIMIT - data['start'] = expected_batches[::-1][0] - data['family_name']=[block['batches'][0]['transactions'][0]['header']['family_name'] for block in block_list['data']] - return data From 87d7055bc356eba79131adf789d480013f087b00 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 26 Oct 2018 14:55:43 +0530 Subject: [PATCH 43/64] Update requirements.txt --- rest_api/tests/api_test/requirements.txt | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/rest_api/tests/api_test/requirements.txt b/rest_api/tests/api_test/requirements.txt index 830bd0e1be..c34f99b62c 100644 --- a/rest_api/tests/api_test/requirements.txt +++ b/rest_api/tests/api_test/requirements.txt @@ -1,10 +1,7 @@ -[pytest] -addopts = -s -v --json-report --json-report-file=report.json -p no:warnings -python_files = test_rest*.py -log_cli_date_format = %Y-%m-%d %H:%M:%S -log_cli_format = %(asctime)s %(levelname)s %(message)s -log_date_format = %Y-%m-%d %H:%M:%S -log_file = pytest-logs.txt -log_file_date_format = %Y-%m-%d %H:%M:%S -log_file_format = %(asctime)s %(levelname)s %(message)s -log_format = %(asctime)s %(levelname)s %(message)s +pytest==3.9.1 +pytest-aiohttp==0.3.0 +pytest-json-report==0.7.0 +pytest-metadata==1.7.0 +pytest-ordering==0.5 + + From fe8332c2aba54d6212bc706be239d97c9b3fe0e9 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Fri, 26 Oct 2018 14:56:09 +0530 Subject: [PATCH 44/64] Update requirements.txt --- rest_api/tests/api_test/requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/rest_api/tests/api_test/requirements.txt b/rest_api/tests/api_test/requirements.txt index c34f99b62c..e6c1ecacce 100644 --- a/rest_api/tests/api_test/requirements.txt +++ b/rest_api/tests/api_test/requirements.txt @@ -3,5 +3,3 @@ pytest-aiohttp==0.3.0 pytest-json-report==0.7.0 pytest-metadata==1.7.0 pytest-ordering==0.5 - - From b0a6e43d9270c19ef9bece0b76c20ea39b39700f Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Tue, 30 Oct 2018 15:05:29 +0530 Subject: [PATCH 45/64] Update base.py --- rest_api/tests/api_test/base.py | 66 +++++++++++++++++++++++++-------- 1 file changed, 50 insertions(+), 16 deletions(-) diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py index 4cbf190e54..3211ddb88a 100644 --- a/rest_api/tests/api_test/base.py +++ b/rest_api/tests/api_test/base.py @@ -13,20 +13,29 @@ # limitations under the License. # ------------------------------------------------------------------------------ import aiohttp +import logging from base64 import b64decode +from utils import _get_node_list + + CONSENSUS_ALGO = b'Devmode' FAMILY_NAME = 'intkey' FAMILY_VERSION = '1.0' DEFAULT_LIMIT = 100 TRACE = False NONCE = '' +TRIES=5 + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) class RestApiBaseTest(object): """Base class for Rest Api tests that simplifies making assertions for the test cases - """ + """ + def assert_status(self, response, status): for data in response['data']: assert data['status'] == status @@ -88,13 +97,13 @@ def assert_trace(self, response): """Asserts whether the response has trace parameter """ assert 'trace' in response +# assert bool(response['trace']) assert response['trace'] == TRACE def assert_check_consensus(self, response): """Asserts response has consensus as parameter """ assert 'consensus' in response - assert response['consensus'] == CONSENSUS_ALGO def assert_state_root_hash(self, response): """Asserts the response has state root hash @@ -130,6 +139,8 @@ def assert_valid_link(self, response, expected_link): """Asserts a response has a link url string with an expected ending """ + print(response['link']) + print(expected_link) assert 'link' in response assert response['link'] == expected_link self.assert_valid_url(response['link'], expected_link) @@ -187,31 +198,42 @@ def assert_valid_data(self, response): assert isinstance(data, list) self.assert_items(data, dict) - def assert_valid_data_list(self, response, expected_length): + def assert_valid_data_length(self, response, expected_length): """Asserts a response has a data list of dicts of an expected length. """ + LOGGER.info(len(response)) + LOGGER.info(expected_length) assert len(response) == expected_length - def assert_check_block_seq(self, blocks, expected_batches, expected_txns): + def assert_check_block_seq(self, blocks, expected_batches, expected_txns, payload, signer_key): """Asserts block is constructed properly after submitting batches """ if not isinstance(blocks, list): blocks = [blocks] - consensus_algo = CONSENSUS_ALGO + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + if not isinstance(payload, list): + payload = [payload] + - ep = list(zip(blocks, expected_batches, expected_txns)) + ep = list(zip(blocks, expected_batches, expected_txns, payload)) - for block, expected_batch, expected_txn in ep: + for block, expected_batch, expected_txn, payload in ep: assert isinstance(block, dict) assert isinstance(block['header'], dict) - assert consensus_algo == b64decode(block['header']['consensus']) batches = block['batches'] assert isinstance(batches, list) assert len(batches) == 1 - assert isinstance(batches, dict) - self.assert_check_batch_seq(batches, expected_batch, expected_txn) + self.assert_check_batch_seq(batches, expected_batch, expected_txn, payload, signer_key) def assert_check_batch_seq(self, batches, expected_batches, expected_txns, payload, signer_key): @@ -226,8 +248,11 @@ def assert_check_batch_seq(self, batches, expected_batches, expected_txns, if not isinstance(expected_txns, list): expected_txns = [expected_txns] + + if not isinstance(payload, list): + payload = [payload] - for batch, expected_batch , expected_txn in zip(batches, expected_batches , expected_txns): + for batch, expected_batch , expected_txn, payload in zip(batches, expected_batches , expected_txns, payload): assert expected_batch == batch['header_signature'] assert isinstance(batch['header'], dict) txns = batch['transactions'] @@ -238,7 +263,7 @@ def assert_check_batch_seq(self, batches, expected_batches, expected_txns, self.assert_signer_public_key(batch, signer_key) self.assert_trace(batch) self.assert_check_transaction_seq(txns, expected_txn, - payload[0], signer_key) + payload, signer_key) def assert_check_transaction_seq(self, txns, expected_ids, @@ -250,8 +275,12 @@ def assert_check_transaction_seq(self, txns, expected_ids, if not isinstance(expected_ids, list): expected_ids = [expected_ids] + + if not isinstance(payload, list): + payload = [payload] + - for txn, expected_id in zip(txns, expected_ids): + for txn, expected_id, payload in zip(txns, expected_ids, payload): assert expected_id == txn['header_signature'] assert isinstance(txn['header'], dict) self.assert_payload(txn, payload) @@ -262,10 +291,15 @@ def assert_check_transaction_seq(self, txns, expected_ids, self.assert_signer_public_key(txn, signer_key) self.assert_batcher_public_key(txn, signer_key) - def assert_check_state_seq(self, state, expected): + def assert_check_state_seq(self, response, expected): """Asserts state is updated properly """ - pass + self.assertEqual(len(proto_entries), len(json_entries)) + for pb_leaf, js_leaf in zip(proto_entries, json_entries): + self.assertIn('address', js_leaf) + self.assertIn('data', js_leaf) + self.assertEqual(pb_leaf.address, js_leaf['address']) + self.assertEqual(pb_leaf.data, b64decode(js_leaf['data'])) def wait_until_status(url, status_code=200, tries=5): """Pause the program until the given url returns the required status. @@ -303,7 +337,7 @@ def wait_until_status(url, status_code=200, tries=5): raise AssertionError( "{} is not available within {} attempts".format(url, tries)) - def wait_for_rest_apis(endpoints, tries=5): + def wait_for_rest_apis(endpoints, tries=TRIES): """Pause the program until all the given REST API endpoints are available. Args: From 52b1348274bcd3dc53348152a21ef1d195ca2f50 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Mon, 12 Nov 2018 10:21:46 +0530 Subject: [PATCH 46/64] Update payload.py --- rest_api/tests/api_test/payload.py | 191 +++++++++++++++++++++++++++-- 1 file changed, 180 insertions(+), 11 deletions(-) diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index 6241a692ff..de477b5922 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -45,15 +45,164 @@ from google.protobuf.message import DecodeError from google.protobuf.json_format import MessageToDict -from utils import batch_count, transaction_count, get_batch_statuses, post_batch, get_reciepts,get_transactions, get_state_list + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ + get_state_list, _get_client_address, \ + batch_count, transaction_count,\ + get_batch_statuses, state_count INTKEY_ADDRESS_PREFIX = hashlib.sha512( 'intkey'.encode('utf-8')).hexdigest()[0:6] LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) - + +LIMIT = 100 WAIT = 300 +BATCH_SIZE = 1 +WORD_COUNT=50 + +class Setup: + def __init__(self): + self.data = {} + self.signer= get_signer() + self.address = _get_client_address() + self.url='{}/batches'.format(self.address) + + def _create_transactions(self): + LOGGER.info("Creating intkey transactions with set operations") + txns = [create_intkey_transaction("set", [] , WORD_COUNT , self.signer) for i in range(BATCH_SIZE)] + return txns + + + def _create_batches(self,txns): + LOGGER.info("Creating batches for transactions 1trn/batch") + batches = [create_batch([txn], self.signer) for txn in txns] + return batches + + def _create_batch_list(self,batches): + batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + return batch_list + + + def _batch_statuses(self,expected_batches): + LOGGER.info("Batch statuses for the created batches") + for batch in expected_batches: + response = get_batch_statuses([batch]) + status = response['data'][0]['status'] + LOGGER.info(status) + + + def _expected_batch_ids(self,batches): + LOGGER.info("Expected batch ids") + expected_batches = [] + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + return expected_batches + + + def _expected_txn_ids(self,txns): + LOGGER.info("Expected transaction ids") + expected_txns = {} + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + if 'trxn_id' not in expected_txns: + expected_txns['trxn_id'] = [] + if 'payload' not in expected_txns: + expected_txns['payload'] =[] + + expected_txns['trxn_id'].append(dict['header_signature']) + expected_txns['payload'].append(dict['payload']) + return expected_txns + + + def _submit_batches(self,batch_list): + print("Submitting batches to the route handlers") + import time + start_time = time.time() + for batch in batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + print(time.time()-start_time) + return response + + + def _initial_count(self): + LOGGER.info("Calculating the initial count of batches,transactions, state before submission of batches") + data = self.data + data['state_length'] = state_count() + data['transaction_length'] = transaction_count() + data['batch_length'] = batch_count() + return data + + + def _expected_count(self,txns,batches): + LOGGER.info("Calculating the expected count of batches, transactions, state") + data = self.data + self._initial_count() + expected_txns=self._expected_txn_ids(txns) + expected_batches=self._expected_batch_ids(batches) + length_batches = len(expected_batches) + length_transactions = len(expected_txns['trxn_id']) + data['expected_batch_length'] = data['batch_length'] + length_batches + data['expected_trn_length'] = data['transaction_length'] + length_transactions + return data + + + def _expected_data(self,txns,batches): + LOGGER.info("Gathering expected data before submission of batches") + data = self.data + self._expected_count(txns,batches) + expected_txns=self._expected_txn_ids(txns) + expected_batches=self._expected_batch_ids(batches) + + data['expected_txns'] = expected_txns['trxn_id'][::-1] + data['payload'] = expected_txns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = self.signer.get_public_key().as_hex() + return data + + def _post_data(self,txns,batches): + print("Gathering data post submission of batches") + import time + start_time = time.time() + data = self.data + expected_batches=self._expected_batch_ids(batches) + batch_list = get_batches() + data['batch_list'] = batch_list + data['batch_ids'] = [batch['header_signature'] for batch in batch_list['data']] + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + data['transaction_ids'] = [trans['header_signature'] for trans in transaction_list['data']] + block_list = get_blocks() + data['block_list'] = block_list + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = self.address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + return data class IntKeyPayload(object): @@ -82,8 +231,33 @@ def sha512(self): self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() return self._sha512 + +class XOPayload(object): + def create_users(self,users): + for username in users: + _send_cmd('sawtooth keygen {} --force'.format(username)) + + def create_game(self, game, user,address): + cmd = 'xo create game-1 --username {}'.format(user) + _send_cmd(cmd) + + def take_game(self, game, user, position,address): + cmd = 'xo take game-1 {} --username {}'.format(position,user) + _send_cmd(cmd) + + def list_game(self): + cmd = 'xo list' + _send_cmd(cmd) + + def show_game(self,game,address): + cmd = 'xo show game-1'.format(game,address) + + def delete_game(self,game,address): + cmd = 'xo delete game-1 --username {}'.format(user) + _send_cmd(cmd) + + class Transactions: - def __init__(self, invalidtype): self.signer = get_signer() self.data = {} @@ -226,18 +400,15 @@ def get_txns_commit_data(self, txns, signer, data): batch_id = dict['header_signature'] expected_batches.append(batch_id) self.data['response'] = response['data'][0]['status'] + print(response) except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") json_data = json.loads(error.fp.read().decode('utf-8')) - #print(json_data['error']['code']) - #print(json_data['error']['message']) LOGGER.info(json_data['error']['title']) LOGGER.info(json_data['error']['message']) LOGGER.info(json_data['error']['code']) self.data['code'] = json_data['error']['code'] - #receipts = get_reciepts(expected_trxns['trxn_id']) - #print(receipts) + self.state_addresses = [state['address'] for state in get_state_list()['data']] self.data['state_address'] = self.state_addresses self.data['initial_batch_length'] = initial_batch_length @@ -358,8 +529,6 @@ def get_txns_data(self, addr, deps, payload): return transaction - - def create_intkey_transaction(verb, deps, count, signer): words = random_word_list(count) @@ -491,4 +660,4 @@ def random_word_list(count): with open('/usr/share/dict/words', 'r') as fd: return [x.strip() for x in fd.readlines()[0:count]] else: - return [random_word() for _ in range(0, count)] \ No newline at end of file + return [random_word() for _ in range(0, count)] From 204f0583998c6be8c5904a219c75834dd1a8b096 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Mon, 12 Nov 2018 10:22:28 +0530 Subject: [PATCH 47/64] Update conftest.py --- rest_api/tests/api_test/conftest.py | 141 +++------------------------- 1 file changed, 11 insertions(+), 130 deletions(-) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py index ebf10dc847..1f2e409364 100644 --- a/rest_api/tests/api_test/conftest.py +++ b/rest_api/tests/api_test/conftest.py @@ -21,148 +21,29 @@ import urllib import json import os -import time -from sawtooth_signing import create_context -from sawtooth_signing import CryptoFactory -from sawtooth_signing import ParseError -from sawtooth_signing.secp256k1 import Secp256k1PrivateKey - -from sawtooth_rest_api.protobuf.validator_pb2 import Message -from sawtooth_rest_api.protobuf import client_batch_submit_pb2 -from sawtooth_rest_api.protobuf import client_batch_pb2 -from sawtooth_rest_api.protobuf import client_list_control_pb2 - -from sawtooth_rest_api.protobuf.batch_pb2 import Batch -from sawtooth_rest_api.protobuf.batch_pb2 import BatchList -from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader -from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader -from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction - -from google.protobuf.json_format import MessageToDict - -from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ - get_state_list , _delete_genesis , _start_validator, \ - _stop_validator , _create_genesis , _get_client_address, \ - _stop_settings_tp, _start_settings_tp, _get_client_address, batch_count, transaction_count,\ - get_batch_statuses, state_count - -from payload import get_signer, create_intkey_transaction , create_batch +from payload import Setup - LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) -LIMIT = 100 -BATCH_SIZE = 1 - -def _create_transaction(): - txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] - return txns - - -def _create_batch(): - batches = [create_batch([txn], signer) for txn in txns] - return batches - @pytest.fixture(scope="session") def setup(request): """Setup method for posting batches and returning the response """ - LOGGER.info("Starting setup method for test cases") + LOGGER.info("Starting Setup method for posting batches using intkey as payload") data = {} - signer = get_signer() - expected_trxns = {} - expected_batches = [] - transaction_list = [] - initial_state_length = state_count() - initial_batch_length = batch_count() - initial_transaction_length = transaction_count() - address = _get_client_address() - - LOGGER.info("Creating intkey transactions with set operations") - - txns = [create_intkey_transaction("set", [] , 50 , signer) for i in range(BATCH_SIZE)] - - for txn in txns: - dict = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - if 'trxn_id' not in expected_trxns: - expected_trxns['trxn_id'] = [] - if 'payload' not in expected_trxns: - expected_trxns['payload'] =[] - - expected_trxns['trxn_id'].append(dict['header_signature']) - expected_trxns['payload'].append(dict['payload']) - - LOGGER.info("Creating batches for transactions 1trn/batch") - - batches = [create_batch([txn], signer) for txn in txns] - - for batch in batches: - dict = MessageToDict( - batch, - including_default_value_fields=True, - preserving_proto_field_name=True) - - batch_id = dict['header_signature'] - expected_batches.append(batch_id) - - length_batches = len(expected_batches) - length_transactions = len(expected_trxns) - - data['expected_batch_length'] = initial_batch_length + length_batches - data['expected_trn_length'] = initial_transaction_length + length_transactions - data['expected_txns'] = expected_trxns['trxn_id'][::-1] - data['payload'] = expected_trxns['payload'][::-1] - data['expected_batches'] = expected_batches[::-1] - data['signer_key'] = signer.get_public_key().as_hex() - - post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] - - LOGGER.info("Submitting batches to the handlers") - - - for batch in post_batch_list: - try: - response = post_batch(batch) - except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is not reachable") - response = json.loads(error.fp.read().decode('utf-8')) - LOGGER.info(response['error']['title']) - LOGGER.info(response['error']['message']) - - - for batch in expected_batches: - response = get_batch_statuses([batch]) - status = response['data'][0]['status'] - - batch_list = get_batches() - data['batch_ids'] = [batch['header_signature'] for batch in batch_list['data']] - data['batch_list'] = batch_list - transaction_list = get_transactions() - data['transaction_list'] = transaction_list - transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] - data['transaction_ids'] = transaction_ids - block_list = get_blocks() - data['block_list'] = block_list - block_ids = [block['header_signature'] for block in block_list['data']] - data['block_ids'] = block_ids[:-1] - batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] - expected_head = block_ids[0] - data['expected_head'] = expected_head - state_addresses = [state['address'] for state in get_state_list()['data']] - data['state_address'] = state_addresses - state_head_list = [get_state_address(address)['head'] for address in state_addresses] - data['state_head'] = state_head_list - data['address'] = address - data['limit'] = LIMIT - data['start'] = expected_batches[::-1][0] + ctx = Setup() + tasks=[] + txns = ctx._create_transactions() + batches = ctx._create_batches(txns) + expected_data = ctx._expected_data(txns,batches) + post_batch_list = ctx._create_batch_list(batches) + ctx._submit_batches(post_batch_list) + data = ctx._post_data(txns,batches) + data.update(expected_data) return data From 190e41edb09e5379ac8da2dd9c294b792ae354b8 Mon Sep 17 00:00:00 2001 From: adityasingh177 Date: Tue, 13 Nov 2018 16:35:04 +0530 Subject: [PATCH 48/64] Update test_rest_api_post.py --- rest_api/tests/api_test/post/test_rest_api_post.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py index 52bbbc2623..a8b528d399 100644 --- a/rest_api/tests/api_test/post/test_rest_api_post.py +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -175,7 +175,7 @@ async def test_rest_api_post_batch(self): message = response['data'][0]['invalid_transactions'][0]['message'] LOGGER.info(message) - for batch in batch_ids: + for batch in expected_batch_ids: if batch not in block_batch_ids: LOGGER.info("Block is not created for the respective batch") From d7aebdb1760c7ac4129863058b075c29463bee73 Mon Sep 17 00:00:00 2001 From: sandeeplandt Date: Mon, 3 Dec 2018 13:04:16 +0530 Subject: [PATCH 49/64] Added few intkey dep txns test cases Signed-off-by: sandeeplandt --- rest_api/tests/api_test/base.py | 35 + .../tests/api_test/dep_txns/pytest-logs.txt | 95 ++ rest_api/tests/api_test/dep_txns/report.json | 1 + .../dep_txns/test_rest_api_dep_txns.py | 830 ++++++++++++++++++ rest_api/tests/api_test/payload.py | 33 + rest_api/tests/api_test/utils.py | 8 +- 6 files changed, 1000 insertions(+), 2 deletions(-) create mode 100644 rest_api/tests/api_test/dep_txns/pytest-logs.txt create mode 100644 rest_api/tests/api_test/dep_txns/report.json create mode 100644 rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py index 3211ddb88a..b381dcbd1c 100644 --- a/rest_api/tests/api_test/base.py +++ b/rest_api/tests/api_test/base.py @@ -352,3 +352,38 @@ def wait_for_rest_apis(endpoints, tries=TRIES): '{}/blocks'.format(url), status_code=200, tries=tries) + def assert_check_txn_dependency_commit(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + + def assert_check_txn_dependency_invalid(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + + if response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + def assert_check_txn_dependency_unknown(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + + if response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + + + def assert_check_txn_dependency(self, response, txn_ids): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + if 'dependencies' in response['data']['header']: + dep_txn = response['data']['header']['dependencies'] + return any(txn in dep_txn for txn in txn_ids) \ No newline at end of file diff --git a/rest_api/tests/api_test/dep_txns/pytest-logs.txt b/rest_api/tests/api_test/dep_txns/pytest-logs.txt new file mode 100644 index 0000000000..a37d2ee5ec --- /dev/null +++ b/rest_api/tests/api_test/dep_txns/pytest-logs.txt @@ -0,0 +1,95 @@ +2018-12-03 12:54:05 INFO Starting Setup method for posting batches using intkey as payload +2018-12-03 12:54:05 INFO Creating intkey transactions with set operations +2018-12-03 12:54:05 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:05 INFO Gathering expected data before submission of batches +2018-12-03 12:54:05 INFO Calculating the expected count of batches, transactions, state +2018-12-03 12:54:05 INFO Calculating the initial count of batches,transactions, state before submission of batches +2018-12-03 12:54:28 INFO Expected transaction ids +2018-12-03 12:54:28 INFO Expected batch ids +2018-12-03 12:54:28 INFO Expected transaction ids +2018-12-03 12:54:28 INFO Expected batch ids +2018-12-03 12:54:29 INFO Expected batch ids +2018-12-03 12:54:31 INFO Starting test for batch post +2018-12-03 12:54:31 INFO Creating intkey transactions with set operations +2018-12-03 12:54:31 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:31 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:31 INFO Submitting batches to the handlers +2018-12-03 12:54:34 INFO Verifying the responses status +2018-12-03 12:54:34 INFO Batch with id 1966fa42a905f16c77e0ee9be1de1427c568118a301e917ae3d945ea2ee05ca26485f9854d773123597d222c1ac5a9366bd8a737c24ad8a531383eb1a54d5c0d is successfully got committed +2018-12-03 12:54:34 INFO Batch with id 950ea7bd67cf674da99d5404f77ed871d6ec5f3d55c2c257d6017f45340ededb25b7f758a12946f044fcd705adcf6f9735668f9353079cd70bd6052bd9868dbc is successfully got committed +2018-12-03 12:54:34 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:34 INFO Successfully got the dependencies for transaction id 63a943c0f60dc30a52ae4b3047dc33d718c032cff62d9f9fe630436a36adc1ec6bf4f8667ec161974cebb51447761fcf07f4b12caeaac38900361244ffe1b67c +2018-12-03 12:54:34 INFO The dependencies for transaction id is blank0efffa0d8e69a3c82198674a59b3e758424d43f16d7fa0b06f8b44fd930cfdd70f7aec164a96464c0e3ca96437719598748933d699c48143ef40121877c678bd +2018-12-03 12:54:35 INFO Checking Consensus on block number 1 +2018-12-03 12:54:35 INFO Starting test for batch post +2018-12-03 12:54:35 INFO Creating intkey transactions with set operations +2018-12-03 12:54:35 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:35 INFO Creating intkey transactions with inc operations with dependent transactions as first and second transaction +2018-12-03 12:54:35 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:35 INFO Submitting batches to the handlers +2018-12-03 12:54:41 INFO Verifying the responses status +2018-12-03 12:54:41 INFO Batch with id b9ebf1fcee5cb0f528a125ecf1dcb111768d11a2d369dd36c0ba95db1d00ed880e14e58e9fca6d89b27eac32c86055ff06268f4f66542d3db2aaba0aba86f4fb is successfully got committed +2018-12-03 12:54:41 INFO Batch with id 80141ee13cc8dd00bcfbd7b56ff755e8e5d5293349b0acc95046ed6e34b04a1935b44b5de974fda094c3768d9fd018f201047648a428575b1ba62e6c97248a09 is successfully got committed +2018-12-03 12:54:41 INFO Batch with id efc85981ba5b7258fcd098c6b58fe249b4d601ce9062a0c8a87553d49be1761a6155767950e706ba75af4e84d036ebcdadf369b64f9d87655f75b27c98bff958 is successfully got committed +2018-12-03 12:54:41 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:41 INFO Successfully got the dependencies for transaction id ffe4cd9297945ce45b5796f27daa669768c76c862d93d4bbe52379ec276249a30c220267a536747ef94793462009cebb28a22b834de82a43043986d40de9de80 +2018-12-03 12:54:41 INFO The dependencies for transaction id is blank89bf5d216e142fe29b67a790b06ea7cbfcaf08064054437305bec0f18ea164003bcb9dd36fbc84a67817690cd7445912763e78da8c1bbc866dd6e0b60237cb80 +2018-12-03 12:54:41 INFO Successfully got the dependencies for transaction id 3a20d74799ad6facd6ffbd63feb816e289d453abaad8563521012f901f093098151cb403c1558b03e8d485e83055402fbcc602deffe091ab80477677a4237f9b +2018-12-03 12:54:42 INFO Checking Consensus on block number 1 +2018-12-03 12:54:42 INFO Starting test for batch post +2018-12-03 12:54:42 INFO Creating intkey transactions with set operations +2018-12-03 12:54:42 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:42 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:42 INFO Submitting batches to the handlers +2018-12-03 12:54:42 INFO Verifying the responses status +2018-12-03 12:54:42 INFO Batch with id 12fd98d7ac23ae15d22336148c76c4684f483551da8cb07d42fbe95aefb8055f2dd6ffe941db3758c76ee0f17cf8f88d98ea04b2da7c904899860a779a443ad2 is successfully got committed +2018-12-03 12:54:42 INFO Batch with id cf788196417f0342de8b504362c0e8994dbf011675990b1b1d497b9b365684da3a0ba15aa99d38796c3de3276b947d190f0d32feb6859be2362df7cc6cc380f8 is successfully got committed +2018-12-03 12:54:42 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:42 INFO Successfully got the dependencies for transaction id 3f5411c7c74f985910ff0c3c144cf9bbb921b71ad2e49aa17197440b88fa85742344ce33944a56d33d8861c746b8a23ea9a4bc7a4f67c1078847ea60a0e81a6a +2018-12-03 12:54:42 INFO The dependencies for transaction id is blankade2fad6cabaa598b5bea4656a172119e6c788cde37eafad2b9c69916e67245b677f143c2d22935935d49f55615a6be467d264b61ac4f14e02da88094b3d582c +2018-12-03 12:54:43 INFO Checking Consensus on block number 1 +2018-12-03 12:54:43 INFO Starting test for batch post +2018-12-03 12:54:43 INFO Creating intkey transactions with set operations +2018-12-03 12:54:43 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:43 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:43 INFO Submitting batches to the handlers +2018-12-03 12:54:46 INFO Verifying the responses status +2018-12-03 12:54:46 INFO Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is successfully got committed +2018-12-03 12:54:46 INFO Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is not committed. Status is INVALID +2018-12-03 12:54:46 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:47 INFO Checking Consensus on block number 1 +2018-12-03 12:54:47 INFO Starting test for batch post +2018-12-03 12:54:47 INFO Creating intkey transactions with set operations +2018-12-03 12:54:47 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:47 INFO Creating intkey transactions with inc operations with dependent transactions as second transaction +2018-12-03 12:54:47 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:47 INFO Submitting batches to the handlers +2018-12-03 12:54:52 INFO Verifying the responses status +2018-12-03 12:54:52 INFO Batch with id 387e6eba49bcfeb449f5058f1c56455191ed38dea8ef104ad952bb37239044014dfa1fee27eb78d10773cedf833763d72d921b6a098fa273879b6e5b638ec926 is successfully got committed +2018-12-03 12:54:52 INFO Batch with id 4a716cedbf703e5348ff8355e5fe726851f35f8d07b01b433330c2889717edac146962b520cfd0b286fb4042d057c2374490dba922624a73f7a404da6e5ad177 is successfully got committed +2018-12-03 12:54:52 INFO Batch with id c85bad47724d9f00f360291fa854034f6d4dbcb8e2ad682a75b20ba3f38a592a75ba6d4dc60708956f334618ce3105fbe4ffea341c98404ab8c3603d81298ea8 is successfully got committed +2018-12-03 12:54:52 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:52 INFO Successfully got the dependencies for transaction id 699d7d97a2928f33ef1b21184b9d432baa22b506b38521f435d5872092667f784e4d7d637d21d5e9b3bb5d62622fba76876d85417661a54457889f45de8d46f1 +2018-12-03 12:54:52 INFO Successfully got the dependencies for transaction id 19613c531a09892e9ffc29d27347ca06343bc1f7ac6e9faa9db96f4e6ee489b426e9c3c35ca2b4c8da39b97602b963db367f411b5050ccc67010f84b011dd2ca +2018-12-03 12:54:52 INFO The dependencies for transaction id is blank07b20a8fe18b1a6f95b676de6529f70a7c640b07296e64bb38417d952550a1e060122d602a93794b614c577e2500e7ed0946687f24f0c432ba2ae9b7efc38311 +2018-12-03 12:54:53 INFO Checking Consensus on block number 1 +2018-12-03 12:54:53 INFO Starting test for batch post +2018-12-03 12:54:53 INFO Creating intkey transactions with set operations +2018-12-03 12:54:53 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:53 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:53 INFO Submitting batches to the handlers +2018-12-03 12:54:55 INFO Verifying the responses status +2018-12-03 12:54:55 INFO Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed +2018-12-03 12:54:55 INFO Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed +2018-12-03 12:54:55 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:54:55 INFO Checking Consensus on block number 1 +2018-12-03 12:54:55 INFO Starting test for batch post +2018-12-03 12:54:55 INFO Creating intkey transactions with set operations +2018-12-03 12:54:55 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction +2018-12-03 12:54:55 INFO Creating batches for transactions 1trn/batch +2018-12-03 12:54:55 INFO Submitting batches to the handlers +2018-12-03 12:55:01 INFO Verifying the responses status +2018-12-03 12:55:01 INFO Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is successfully got committed +2018-12-03 12:55:01 INFO Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is not committed. Status is UNKNOWN +2018-12-03 12:55:01 INFO Verifying the txn details listed under the dependencies +2018-12-03 12:55:01 INFO Checking Consensus on block number 1 diff --git a/rest_api/tests/api_test/dep_txns/report.json b/rest_api/tests/api_test/dep_txns/report.json new file mode 100644 index 0000000000..6db2ba4661 --- /dev/null +++ b/rest_api/tests/api_test/dep_txns/report.json @@ -0,0 +1 @@ +{"tests": [{"lineno": 80, "outcome": "passed", "setup": {"outcome": "passed", "duration": 26.179437160491943, "log": [{"lineno": 36, "filename": "conftest.py", "created": 1543821845.3672607, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "conftest", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/conftest.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 367.2606945037842, "funcName": "setup", "exc_text": null, "relativeCreated": 610.3627681732178, "module": "conftest", "msg": "Starting Setup method for posting batches using intkey as payload", "asctime": "2018-12-03 12:54:05"}, {"lineno": 73, "filename": "payload.py", "created": 1543821845.3732347, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 373.23474884033203, "funcName": "_create_transactions", "exc_text": null, "relativeCreated": 616.3368225097656, "module": "payload", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:05"}, {"lineno": 79, "filename": "payload.py", "created": 1543821845.374849, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 374.8490810394287, "funcName": "_create_batches", "exc_text": null, "relativeCreated": 617.9511547088623, "module": "payload", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:05"}, {"lineno": 168, "filename": "payload.py", "created": 1543821845.3753247, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.3247261047363, "funcName": "_expected_data", "exc_text": null, "relativeCreated": 618.4267997741699, "module": "payload", "msg": "Gathering expected data before submission of batches", "asctime": "2018-12-03 12:54:05"}, {"lineno": 155, "filename": "payload.py", "created": 1543821845.3754852, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.4851818084717, "funcName": "_expected_count", "exc_text": null, "relativeCreated": 618.5872554779053, "module": "payload", "msg": "Calculating the expected count of batches, transactions, state", "asctime": "2018-12-03 12:54:05"}, {"lineno": 146, "filename": "payload.py", "created": 1543821845.3756492, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.64921379089355, "funcName": "_initial_count", "exc_text": null, "relativeCreated": 618.7512874603271, "module": "payload", "msg": "Calculating the initial count of batches,transactions, state before submission of batches", "asctime": "2018-12-03 12:54:05"}, {"lineno": 111, "filename": "payload.py", "created": 1543821868.8435173, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 843.5173034667969, "funcName": "_expected_txn_ids", "exc_text": null, "relativeCreated": 24086.61937713623, "module": "payload", "msg": "Expected transaction ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821868.8442633, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.2633152008057, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24087.36538887024, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 111, "filename": "payload.py", "created": 1543821868.844543, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.5429801940918, "funcName": "_expected_txn_ids", "exc_text": null, "relativeCreated": 24087.645053863525, "module": "payload", "msg": "Expected transaction ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821868.844708, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.7079658508301, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24087.810039520264, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821869.4468331, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 446.83313369750977, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24689.935207366943, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:29"}]}, "call": {"outcome": "passed", "duration": 3.5685501098632812, "log": [{"lineno": 88, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5489635, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.9635467529297, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26792.065620422363, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:31"}, {"lineno": 99, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.55388, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 553.879976272583, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26796.982049942017, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:31"}, {"lineno": 113, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.555057, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 555.0570487976074, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26798.15912246704, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:31"}, {"lineno": 125, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5554729, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 555.4728507995605, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26798.574924468994, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:31"}, {"lineno": 140, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5561776, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 556.1776161193848, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26799.27968978882, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:31"}, {"lineno": 151, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5480816, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.081636428833, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.183710098267, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:34"}, {"lineno": 157, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5482955, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.2954978942871, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.39757156372, "module": "test_rest_api_dep_txns", "msg": "Batch with id 1966fa42a905f16c77e0ee9be1de1427c568118a301e917ae3d945ea2ee05ca26485f9854d773123597d222c1ac5a9366bd8a737c24ad8a531383eb1a54d5c0d is successfully got committed", "asctime": "2018-12-03 12:54:34"}, {"lineno": 157, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5484228, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.4228134155273, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.52488708496, "module": "test_rest_api_dep_txns", "msg": "Batch with id 950ea7bd67cf674da99d5404f77ed871d6ec5f3d55c2c257d6017f45340ededb25b7f758a12946f044fcd705adcf6f9735668f9353079cd70bd6052bd9868dbc is successfully got committed", "asctime": "2018-12-03 12:54:34"}, {"lineno": 165, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5485833, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.5832691192627, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.685342788696, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:34"}, {"lineno": 171, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5602443, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 560.2443218231201, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29803.346395492554, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 63a943c0f60dc30a52ae4b3047dc33d718c032cff62d9f9fe630436a36adc1ec6bf4f8667ec161974cebb51447761fcf07f4b12caeaac38900361244ffe1b67c", "asctime": "2018-12-03 12:54:34"}, {"lineno": 173, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5738556, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 573.8556385040283, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29816.957712173462, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank0efffa0d8e69a3c82198674a59b3e758424d43f16d7fa0b06f8b44fd930cfdd70f7aec164a96464c0e3ca96437719598748933d699c48143ef40121877c678bd", "asctime": "2018-12-03 12:54:34"}, {"lineno": 241, "filename": "utils.py", "created": 1543821875.0944118, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 94.41184997558594, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 30337.51392364502, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:35"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_set_inc_txn_dep", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_set_inc_txn_dep", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_set_inc_txn_dep", "teardown": {"outcome": "passed", "duration": 0.0003333091735839844, "log": []}}, {"lineno": 178, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.00044083595275878906, "log": []}, "call": {"outcome": "passed", "duration": 7.127191543579102, "log": [{"lineno": 187, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.119221, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 119.22097206115723, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30362.32304573059, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:35"}, {"lineno": 197, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1255586, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 125.55861473083496, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30368.66068840027, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:35"}, {"lineno": 211, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1272697, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 127.26974487304688, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30370.37181854248, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:35"}, {"lineno": 223, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1277328, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 127.73275375366211, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30370.834827423096, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first and second transaction", "asctime": "2018-12-03 12:54:35"}, {"lineno": 235, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1281967, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 128.19671630859375, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30371.298789978027, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:35"}, {"lineno": 250, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1291454, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 129.14538383483887, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30372.247457504272, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:35"}, {"lineno": 261, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6695542, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.5542335510254, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.65630722046, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6697662, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.7661876678467, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.86826133728, "module": "test_rest_api_dep_txns", "msg": "Batch with id b9ebf1fcee5cb0f528a125ecf1dcb111768d11a2d369dd36c0ba95db1d00ed880e14e58e9fca6d89b27eac32c86055ff06268f4f66542d3db2aaba0aba86f4fb is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6698918, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.8918342590332, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.99390792847, "module": "test_rest_api_dep_txns", "msg": "Batch with id 80141ee13cc8dd00bcfbd7b56ff755e8e5d5293349b0acc95046ed6e34b04a1935b44b5de974fda094c3768d9fd018f201047648a428575b1ba62e6c97248a09 is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.670008, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 670.0079441070557, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36913.11001777649, "module": "test_rest_api_dep_txns", "msg": "Batch with id efc85981ba5b7258fcd098c6b58fe249b4d601ce9062a0c8a87553d49be1761a6155767950e706ba75af4e84d036ebcdadf369b64f9d87655f75b27c98bff958 is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 275, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6701179, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 670.1178550720215, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36913.219928741455, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:41"}, {"lineno": 281, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6857107, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 685.7106685638428, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36928.81274223328, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id ffe4cd9297945ce45b5796f27daa669768c76c862d93d4bbe52379ec276249a30c220267a536747ef94793462009cebb28a22b834de82a43043986d40de9de80", "asctime": "2018-12-03 12:54:41"}, {"lineno": 283, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6968331, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 696.8331336975098, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36939.93520736694, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank89bf5d216e142fe29b67a790b06ea7cbfcaf08064054437305bec0f18ea164003bcb9dd36fbc84a67817690cd7445912763e78da8c1bbc866dd6e0b60237cb80", "asctime": "2018-12-03 12:54:41"}, {"lineno": 281, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.707487, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 707.4871063232422, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36950.589179992676, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 3a20d74799ad6facd6ffbd63feb816e289d453abaad8563521012f901f093098151cb403c1558b03e8d485e83055402fbcc602deffe091ab80477677a4237f9b", "asctime": "2018-12-03 12:54:41"}, {"lineno": 241, "filename": "utils.py", "created": 1543821882.2245405, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 224.54047203063965, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 37467.64254570007, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:42"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_double_dep_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_rest_api_double_dep_txns", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_double_dep_txns", "teardown": {"outcome": "passed", "duration": 0.0003056526184082031, "log": []}}, {"lineno": 288, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003654956817626953, "log": []}, "call": {"outcome": "passed", "duration": 1.0723152160644531, "log": [{"lineno": 296, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.2486768, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 248.67677688598633, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37491.77885055542, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:42"}, {"lineno": 312, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.256097, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 256.09707832336426, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37499.1991519928, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:42"}, {"lineno": 326, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.257406, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 257.40599632263184, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37500.508069992065, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:42"}, {"lineno": 341, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.2585185, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 258.5184574127197, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37501.62053108215, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:42"}, {"lineno": 356, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.259265, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 259.2649459838867, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37502.36701965332, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:42"}, {"lineno": 367, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7597337, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 759.7336769104004, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38002.835750579834, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:42"}, {"lineno": 375, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7599473, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 759.9472999572754, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.04937362671, "module": "test_rest_api_dep_txns", "msg": "Batch with id 12fd98d7ac23ae15d22336148c76c4684f483551da8cb07d42fbe95aefb8055f2dd6ffe941db3758c76ee0f17cf8f88d98ea04b2da7c904899860a779a443ad2 is successfully got committed", "asctime": "2018-12-03 12:54:42"}, {"lineno": 375, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.760076, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 760.0760459899902, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.178119659424, "module": "test_rest_api_dep_txns", "msg": "Batch with id cf788196417f0342de8b504362c0e8994dbf011675990b1b1d497b9b365684da3a0ba15aa99d38796c3de3276b947d190f0d32feb6859be2362df7cc6cc380f8 is successfully got committed", "asctime": "2018-12-03 12:54:42"}, {"lineno": 386, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7601895, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 760.1895332336426, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.291606903076, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:42"}, {"lineno": 392, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7722795, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 772.2795009613037, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38015.38157463074, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 3f5411c7c74f985910ff0c3c144cf9bbb921b71ad2e49aa17197440b88fa85742344ce33944a56d33d8861c746b8a23ea9a4bc7a4f67c1078847ea60a0e81a6a", "asctime": "2018-12-03 12:54:42"}, {"lineno": 394, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.788773, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 788.7730598449707, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38031.875133514404, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blankade2fad6cabaa598b5bea4656a172119e6c788cde37eafad2b9c69916e67245b677f143c2d22935935d49f55615a6be467d264b61ac4f14e02da88094b3d582c", "asctime": "2018-12-03 12:54:42"}, {"lineno": 241, "filename": "utils.py", "created": 1543821883.3003898, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 300.38976669311523, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 38543.49184036255, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:43"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_single_set_dep_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_single_set_dep_txns", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_single_set_dep_txns", "teardown": {"outcome": "passed", "duration": 0.00029778480529785156, "log": []}}, {"lineno": 399, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003311634063720703, "log": []}, "call": {"outcome": "passed", "duration": 4.070549011230469, "log": [{"lineno": 407, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3232832, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 323.28319549560547, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38566.38526916504, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:43"}, {"lineno": 423, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3293188, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 329.3187618255615, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38572.420835494995, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:43"}, {"lineno": 437, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3304896, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 330.4896354675293, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38573.59170913696, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:43"}, {"lineno": 450, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3309386, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 330.93857765197754, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38574.04065132141, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:43"}, {"lineno": 465, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3317018, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 331.70175552368164, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38574.803829193115, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:43"}, {"lineno": 476, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8563724, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.3723564147949, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.47443008423, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:46"}, {"lineno": 484, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8566306, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.6305637359619, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.732637405396, "module": "test_rest_api_dep_txns", "msg": "Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is successfully got committed", "asctime": "2018-12-03 12:54:46"}, {"lineno": 489, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8567727, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.7726612091064, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.87473487854, "module": "test_rest_api_dep_txns", "msg": "Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is not committed. Status is INVALID", "asctime": "2018-12-03 12:54:46"}, {"lineno": 494, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.856888, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.8880558013916, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.990129470825, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:46"}, {"lineno": 241, "filename": "utils.py", "created": 1543821887.372987, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 372.9870319366455, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 42616.08910560608, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:47"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "test_rest_api_single_set_dec_txns", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_txns", "teardown": {"outcome": "passed", "duration": 0.00030612945556640625, "log": []}}, {"lineno": 500, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.00034546852111816406, "log": []}, "call": {"outcome": "passed", "duration": 6.141465425491333, "log": [{"lineno": 509, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.3962643, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 396.26431465148926, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42639.36638832092, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:47"}, {"lineno": 519, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4013183, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 401.3183116912842, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42644.42038536072, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:47"}, {"lineno": 533, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4025242, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 402.5242328643799, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42645.62630653381, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:47"}, {"lineno": 545, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4029722, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 402.9722213745117, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42646.074295043945, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as second transaction", "asctime": "2018-12-03 12:54:47"}, {"lineno": 557, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4034097, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 403.4097194671631, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42646.5117931366, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:47"}, {"lineno": 572, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4043865, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 404.3865203857422, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42647.488594055176, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:47"}, {"lineno": 583, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9626763, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 962.6762866973877, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48205.77836036682, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9628952, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 962.8951549530029, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48205.99722862244, "module": "test_rest_api_dep_txns", "msg": "Batch with id 387e6eba49bcfeb449f5058f1c56455191ed38dea8ef104ad952bb37239044014dfa1fee27eb78d10773cedf833763d72d921b6a098fa273879b6e5b638ec926 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9630222, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.0222320556641, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.1243057251, "module": "test_rest_api_dep_txns", "msg": "Batch with id 4a716cedbf703e5348ff8355e5fe726851f35f8d07b01b433330c2889717edac146962b520cfd0b286fb4042d057c2374490dba922624a73f7a404da6e5ad177 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.963141, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.1409645080566, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.24303817749, "module": "test_rest_api_dep_txns", "msg": "Batch with id c85bad47724d9f00f360291fa854034f6d4dbcb8e2ad682a75b20ba3f38a592a75ba6d4dc60708956f334618ce3105fbe4ffea341c98404ab8c3603d81298ea8 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 597, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9632616, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.261604309082, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.363677978516, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:52"}, {"lineno": 603, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9750278, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 975.0277996063232, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48218.12987327576, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 699d7d97a2928f33ef1b21184b9d432baa22b506b38521f435d5872092667f784e4d7d637d21d5e9b3bb5d62622fba76876d85417661a54457889f45de8d46f1", "asctime": "2018-12-03 12:54:52"}, {"lineno": 603, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.986784, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 986.7839813232422, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48229.886054992676, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 19613c531a09892e9ffc29d27347ca06343bc1f7ac6e9faa9db96f4e6ee489b426e9c3c35ca2b4c8da39b97602b963db367f411b5050ccc67010f84b011dd2ca", "asctime": "2018-12-03 12:54:52"}, {"lineno": 605, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9994009, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 999.4008541107178, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48242.50292778015, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank07b20a8fe18b1a6f95b676de6529f70a7c640b07296e64bb38417d952550a1e060122d602a93794b614c577e2500e7ed0946687f24f0c432ba2ae9b7efc38311", "asctime": "2018-12-03 12:54:52"}, {"lineno": 241, "filename": "utils.py", "created": 1543821893.5171175, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 517.1175003051758, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 48760.21957397461, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:53"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_set_inc_inc_Txns_Dep", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "test_rest_api_set_inc_inc_Txns_Dep", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_set_inc_inc_Txns_Dep", "teardown": {"outcome": "passed", "duration": 0.00029659271240234375, "log": []}}, {"lineno": 610, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003414154052734375, "log": []}, "call": {"outcome": "passed", "duration": 2.0709407329559326, "log": [{"lineno": 618, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5401227, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 540.1227474212646, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48783.2248210907, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:53"}, {"lineno": 634, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5467072, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 546.7071533203125, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48789.809226989746, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:53"}, {"lineno": 648, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.547874, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 547.8739738464355, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48790.97604751587, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:53"}, {"lineno": 661, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5483227, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.3226776123047, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48791.42475128174, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:53"}, {"lineno": 676, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.549048, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 549.0479469299316, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48792.150020599365, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:53"}, {"lineno": 687, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.0764737, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.47371292114258, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.575786590576, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:55"}, {"lineno": 695, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.076688, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.68805122375488, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.79012489319, "module": "test_rest_api_dep_txns", "msg": "Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed", "asctime": "2018-12-03 12:54:55"}, {"lineno": 695, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.0768154, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.81536674499512, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.91744041443, "module": "test_rest_api_dep_txns", "msg": "Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed", "asctime": "2018-12-03 12:54:55"}, {"lineno": 705, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.077211, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 77.21090316772461, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50320.31297683716, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:55"}, {"lineno": 241, "filename": "utils.py", "created": 1543821895.589809, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 589.8089408874512, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 50832.911014556885, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:55"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_same_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "test_rest_api_single_set_dec_same_txns", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_same_txns", "teardown": {"outcome": "passed", "duration": 0.00031948089599609375, "log": []}}, {"lineno": 719, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003497600555419922, "log": []}, "call": {"outcome": "passed", "duration": 6.094623565673828, "log": [{"lineno": 727, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.613445, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 613.4450435638428, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50856.54711723328, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:55"}, {"lineno": 743, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6196325, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 619.6324825286865, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50862.73455619812, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:55"}, {"lineno": 757, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6207347, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 620.734691619873, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50863.83676528931, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:55"}, {"lineno": 770, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6211808, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 621.1807727813721, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50864.282846450806, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:55"}, {"lineno": 785, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6218703, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 621.8702793121338, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50864.97235298157, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:55"}, {"lineno": 796, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.1709635, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 170.96352577209473, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.06559944153, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:55:01"}, {"lineno": 804, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.171194, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.19407653808594, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.29615020752, "module": "test_rest_api_dep_txns", "msg": "Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is successfully got committed", "asctime": "2018-12-03 12:55:01"}, {"lineno": 813, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.1713386, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.33855819702148, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.440631866455, "module": "test_rest_api_dep_txns", "msg": "Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is not committed. Status is UNKNOWN", "asctime": "2018-12-03 12:55:01"}, {"lineno": 814, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.171468, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.46801948547363, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.57009315491, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:55:01"}, {"lineno": 241, "filename": "utils.py", "created": 1543821901.6870008, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 687.0007514953613, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 56930.102825164795, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:55:01"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_invalid_txns_id", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "test_rest_api_single_set_dec_invalid_txns_id", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_invalid_txns_id", "teardown": {"outcome": "passed", "duration": 0.0005271434783935547, "log": []}}], "duration": 56.4007306098938, "exitcode": 0, "summary": {"passed": 7, "total": 7}, "created": 1543821901.7112274, "root": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test", "collectors": [{"outcome": "passed", "children": [{"type": "Module", "nodeid": "dep_txns/test_rest_api_dep_txns.py"}], "nodeid": ""}, {"outcome": "passed", "children": [{"lineno": 80, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_set_inc_txn_dep", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_set_inc_txn_dep"}, {"lineno": 178, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_double_dep_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_double_dep_txns"}, {"lineno": 288, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_single_set_dep_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_single_set_dep_txns"}, {"lineno": 399, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_txns"}, {"lineno": 500, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_set_inc_inc_Txns_Dep", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_set_inc_inc_Txns_Dep"}, {"lineno": 610, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_same_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_same_txns"}, {"lineno": 719, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_invalid_txns_id", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_invalid_txns_id"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()"}, {"outcome": "passed", "children": [{"type": "Instance", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies"}, {"outcome": "passed", "children": [{"type": "Class", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py"}], "environment": {"Python": "3.5.2", "Plugins": {"ordering": "0.6", "json-report": "0.7.0", "aiohttp": "0.3.0", "metadata": "1.7.0"}, "Platform": "Linux-4.4.0-116-generic-x86_64-with-Ubuntu-16.04-xenial", "Packages": {"pluggy": "0.8.0", "pytest": "3.10.0", "py": "1.7.0"}}} \ No newline at end of file diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py new file mode 100644 index 0000000000..cdb93d0fa8 --- /dev/null +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -0,0 +1,830 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import aiohttp +import asyncio +import datetime +import random + +from google.protobuf.json_format import MessageToDict + +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains, post_batch_no_endpoint,\ + get_reciepts, _get_client_address, state_count, get_batch_id, get_transaction_id + +from utils import _get_client_address + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction, \ + create_intkey_transaction_dep, random_word_list + +from base import RestApiBaseTest + +from fixtures import setup_empty_trxs_batch, setup_invalid_txns,setup_invalid_txns_min,\ + setup_invalid_txns_max, setup_valinv_txns, setup_invval_txns, \ + setup_same_txns, setup_valid_txns, setup_invalid_txns_fn,\ + setup_invalid_invaddr + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT=300 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = [pytest.mark.dependent,pytest.mark.sixth] + +async def async_fetch_url(url, session,params=None): + try: + async with session.get(url) as response: + return await response.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + +async def async_post_batch(url, session, data, params=None,headers=None): + if headers: + headers=headers + else: + headers = {'Content-Type': 'application/octet-stream'} + try: + async with session.post(url,data=data,headers=headers) as response: + data = await response.json() + if 'link' in data: + link = data['link'] + return await async_fetch_url('{}&wait={}'.format(link, WAIT),session) + else: + return data + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + +#testing the Transaction dependencies +class TestPostTansactionDependencies(RestApiBaseTest): + + async def test_set_inc_txn_dep(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_double_dep_txns(self, setup): + """1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create third Transaction for decrement with first and second Transaction as dependecies + 4. Create Batch + 5. Call POST /batches + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first and second transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("dec", trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_single_set_dep_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + words = random_word_list(100) + name=random.choice(words) + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("set",trxn_ids, name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",trxn_ids , name, 60, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_set_inc_inc_Txns_Dep(self, setup): + """1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create third Transaction for increment with first and second Transaction as dependecies + 3. Create Batch + 4. Call POST /batches + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as second transaction") + trxn_ids = list(set(expected_trxn_ids)) + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_same_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + ''' + for txn_id in trxn_ids: + #txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + ''' + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_invalid_txns_id(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",[u'bbbbbb'] , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + ''' + for txn_id in trxn_ids: + #txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + ''' + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index de477b5922..93f2a0a327 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -661,3 +661,36 @@ def random_word_list(count): return [x.strip() for x in fd.readlines()[0:count]] else: return [random_word() for _ in range(0, count)] + +def create_intkey_transaction_dep(verb, deps, name, value, signer): +# words = random_word_list(count) +# self.name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=value) + + addr = make_intkey_address(name) + data = get_txns_data(addr,deps, payload, signer) + return data + +def get_txns_data(addr, deps, payload, signer): + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index 69a15f8585..3f0df838a5 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -273,11 +273,15 @@ def _make_http_address(node_number): node_number = node.replace('8800' , '8008') return node_number -def _get_client_address(): +def _get_client_address(): + command = "ifconfig lo | grep 'inet addr' | cut -d ':' -f 2 | cut -d ' ' -f 1" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + ''' command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - + ''' def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" From bf40a6dd1789279764229ca25a97a4277b8649ce Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Mon, 3 Dec 2018 13:10:11 +0530 Subject: [PATCH 50/64] Delete pytest-logs.txt --- .../tests/api_test/dep_txns/pytest-logs.txt | 95 ------------------- 1 file changed, 95 deletions(-) delete mode 100644 rest_api/tests/api_test/dep_txns/pytest-logs.txt diff --git a/rest_api/tests/api_test/dep_txns/pytest-logs.txt b/rest_api/tests/api_test/dep_txns/pytest-logs.txt deleted file mode 100644 index a37d2ee5ec..0000000000 --- a/rest_api/tests/api_test/dep_txns/pytest-logs.txt +++ /dev/null @@ -1,95 +0,0 @@ -2018-12-03 12:54:05 INFO Starting Setup method for posting batches using intkey as payload -2018-12-03 12:54:05 INFO Creating intkey transactions with set operations -2018-12-03 12:54:05 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:05 INFO Gathering expected data before submission of batches -2018-12-03 12:54:05 INFO Calculating the expected count of batches, transactions, state -2018-12-03 12:54:05 INFO Calculating the initial count of batches,transactions, state before submission of batches -2018-12-03 12:54:28 INFO Expected transaction ids -2018-12-03 12:54:28 INFO Expected batch ids -2018-12-03 12:54:28 INFO Expected transaction ids -2018-12-03 12:54:28 INFO Expected batch ids -2018-12-03 12:54:29 INFO Expected batch ids -2018-12-03 12:54:31 INFO Starting test for batch post -2018-12-03 12:54:31 INFO Creating intkey transactions with set operations -2018-12-03 12:54:31 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:31 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:31 INFO Submitting batches to the handlers -2018-12-03 12:54:34 INFO Verifying the responses status -2018-12-03 12:54:34 INFO Batch with id 1966fa42a905f16c77e0ee9be1de1427c568118a301e917ae3d945ea2ee05ca26485f9854d773123597d222c1ac5a9366bd8a737c24ad8a531383eb1a54d5c0d is successfully got committed -2018-12-03 12:54:34 INFO Batch with id 950ea7bd67cf674da99d5404f77ed871d6ec5f3d55c2c257d6017f45340ededb25b7f758a12946f044fcd705adcf6f9735668f9353079cd70bd6052bd9868dbc is successfully got committed -2018-12-03 12:54:34 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:34 INFO Successfully got the dependencies for transaction id 63a943c0f60dc30a52ae4b3047dc33d718c032cff62d9f9fe630436a36adc1ec6bf4f8667ec161974cebb51447761fcf07f4b12caeaac38900361244ffe1b67c -2018-12-03 12:54:34 INFO The dependencies for transaction id is blank0efffa0d8e69a3c82198674a59b3e758424d43f16d7fa0b06f8b44fd930cfdd70f7aec164a96464c0e3ca96437719598748933d699c48143ef40121877c678bd -2018-12-03 12:54:35 INFO Checking Consensus on block number 1 -2018-12-03 12:54:35 INFO Starting test for batch post -2018-12-03 12:54:35 INFO Creating intkey transactions with set operations -2018-12-03 12:54:35 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:35 INFO Creating intkey transactions with inc operations with dependent transactions as first and second transaction -2018-12-03 12:54:35 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:35 INFO Submitting batches to the handlers -2018-12-03 12:54:41 INFO Verifying the responses status -2018-12-03 12:54:41 INFO Batch with id b9ebf1fcee5cb0f528a125ecf1dcb111768d11a2d369dd36c0ba95db1d00ed880e14e58e9fca6d89b27eac32c86055ff06268f4f66542d3db2aaba0aba86f4fb is successfully got committed -2018-12-03 12:54:41 INFO Batch with id 80141ee13cc8dd00bcfbd7b56ff755e8e5d5293349b0acc95046ed6e34b04a1935b44b5de974fda094c3768d9fd018f201047648a428575b1ba62e6c97248a09 is successfully got committed -2018-12-03 12:54:41 INFO Batch with id efc85981ba5b7258fcd098c6b58fe249b4d601ce9062a0c8a87553d49be1761a6155767950e706ba75af4e84d036ebcdadf369b64f9d87655f75b27c98bff958 is successfully got committed -2018-12-03 12:54:41 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:41 INFO Successfully got the dependencies for transaction id ffe4cd9297945ce45b5796f27daa669768c76c862d93d4bbe52379ec276249a30c220267a536747ef94793462009cebb28a22b834de82a43043986d40de9de80 -2018-12-03 12:54:41 INFO The dependencies for transaction id is blank89bf5d216e142fe29b67a790b06ea7cbfcaf08064054437305bec0f18ea164003bcb9dd36fbc84a67817690cd7445912763e78da8c1bbc866dd6e0b60237cb80 -2018-12-03 12:54:41 INFO Successfully got the dependencies for transaction id 3a20d74799ad6facd6ffbd63feb816e289d453abaad8563521012f901f093098151cb403c1558b03e8d485e83055402fbcc602deffe091ab80477677a4237f9b -2018-12-03 12:54:42 INFO Checking Consensus on block number 1 -2018-12-03 12:54:42 INFO Starting test for batch post -2018-12-03 12:54:42 INFO Creating intkey transactions with set operations -2018-12-03 12:54:42 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:42 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:42 INFO Submitting batches to the handlers -2018-12-03 12:54:42 INFO Verifying the responses status -2018-12-03 12:54:42 INFO Batch with id 12fd98d7ac23ae15d22336148c76c4684f483551da8cb07d42fbe95aefb8055f2dd6ffe941db3758c76ee0f17cf8f88d98ea04b2da7c904899860a779a443ad2 is successfully got committed -2018-12-03 12:54:42 INFO Batch with id cf788196417f0342de8b504362c0e8994dbf011675990b1b1d497b9b365684da3a0ba15aa99d38796c3de3276b947d190f0d32feb6859be2362df7cc6cc380f8 is successfully got committed -2018-12-03 12:54:42 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:42 INFO Successfully got the dependencies for transaction id 3f5411c7c74f985910ff0c3c144cf9bbb921b71ad2e49aa17197440b88fa85742344ce33944a56d33d8861c746b8a23ea9a4bc7a4f67c1078847ea60a0e81a6a -2018-12-03 12:54:42 INFO The dependencies for transaction id is blankade2fad6cabaa598b5bea4656a172119e6c788cde37eafad2b9c69916e67245b677f143c2d22935935d49f55615a6be467d264b61ac4f14e02da88094b3d582c -2018-12-03 12:54:43 INFO Checking Consensus on block number 1 -2018-12-03 12:54:43 INFO Starting test for batch post -2018-12-03 12:54:43 INFO Creating intkey transactions with set operations -2018-12-03 12:54:43 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:43 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:43 INFO Submitting batches to the handlers -2018-12-03 12:54:46 INFO Verifying the responses status -2018-12-03 12:54:46 INFO Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is successfully got committed -2018-12-03 12:54:46 INFO Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is not committed. Status is INVALID -2018-12-03 12:54:46 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:47 INFO Checking Consensus on block number 1 -2018-12-03 12:54:47 INFO Starting test for batch post -2018-12-03 12:54:47 INFO Creating intkey transactions with set operations -2018-12-03 12:54:47 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:47 INFO Creating intkey transactions with inc operations with dependent transactions as second transaction -2018-12-03 12:54:47 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:47 INFO Submitting batches to the handlers -2018-12-03 12:54:52 INFO Verifying the responses status -2018-12-03 12:54:52 INFO Batch with id 387e6eba49bcfeb449f5058f1c56455191ed38dea8ef104ad952bb37239044014dfa1fee27eb78d10773cedf833763d72d921b6a098fa273879b6e5b638ec926 is successfully got committed -2018-12-03 12:54:52 INFO Batch with id 4a716cedbf703e5348ff8355e5fe726851f35f8d07b01b433330c2889717edac146962b520cfd0b286fb4042d057c2374490dba922624a73f7a404da6e5ad177 is successfully got committed -2018-12-03 12:54:52 INFO Batch with id c85bad47724d9f00f360291fa854034f6d4dbcb8e2ad682a75b20ba3f38a592a75ba6d4dc60708956f334618ce3105fbe4ffea341c98404ab8c3603d81298ea8 is successfully got committed -2018-12-03 12:54:52 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:52 INFO Successfully got the dependencies for transaction id 699d7d97a2928f33ef1b21184b9d432baa22b506b38521f435d5872092667f784e4d7d637d21d5e9b3bb5d62622fba76876d85417661a54457889f45de8d46f1 -2018-12-03 12:54:52 INFO Successfully got the dependencies for transaction id 19613c531a09892e9ffc29d27347ca06343bc1f7ac6e9faa9db96f4e6ee489b426e9c3c35ca2b4c8da39b97602b963db367f411b5050ccc67010f84b011dd2ca -2018-12-03 12:54:52 INFO The dependencies for transaction id is blank07b20a8fe18b1a6f95b676de6529f70a7c640b07296e64bb38417d952550a1e060122d602a93794b614c577e2500e7ed0946687f24f0c432ba2ae9b7efc38311 -2018-12-03 12:54:53 INFO Checking Consensus on block number 1 -2018-12-03 12:54:53 INFO Starting test for batch post -2018-12-03 12:54:53 INFO Creating intkey transactions with set operations -2018-12-03 12:54:53 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:53 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:53 INFO Submitting batches to the handlers -2018-12-03 12:54:55 INFO Verifying the responses status -2018-12-03 12:54:55 INFO Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed -2018-12-03 12:54:55 INFO Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed -2018-12-03 12:54:55 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:54:55 INFO Checking Consensus on block number 1 -2018-12-03 12:54:55 INFO Starting test for batch post -2018-12-03 12:54:55 INFO Creating intkey transactions with set operations -2018-12-03 12:54:55 INFO Creating intkey transactions with inc operations with dependent transactions as first transaction -2018-12-03 12:54:55 INFO Creating batches for transactions 1trn/batch -2018-12-03 12:54:55 INFO Submitting batches to the handlers -2018-12-03 12:55:01 INFO Verifying the responses status -2018-12-03 12:55:01 INFO Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is successfully got committed -2018-12-03 12:55:01 INFO Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is not committed. Status is UNKNOWN -2018-12-03 12:55:01 INFO Verifying the txn details listed under the dependencies -2018-12-03 12:55:01 INFO Checking Consensus on block number 1 From 370cab06b8ca9beca390edff531ba4a564dfea95 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Mon, 3 Dec 2018 13:10:21 +0530 Subject: [PATCH 51/64] Delete report.json --- rest_api/tests/api_test/dep_txns/report.json | 1 - 1 file changed, 1 deletion(-) delete mode 100644 rest_api/tests/api_test/dep_txns/report.json diff --git a/rest_api/tests/api_test/dep_txns/report.json b/rest_api/tests/api_test/dep_txns/report.json deleted file mode 100644 index 6db2ba4661..0000000000 --- a/rest_api/tests/api_test/dep_txns/report.json +++ /dev/null @@ -1 +0,0 @@ -{"tests": [{"lineno": 80, "outcome": "passed", "setup": {"outcome": "passed", "duration": 26.179437160491943, "log": [{"lineno": 36, "filename": "conftest.py", "created": 1543821845.3672607, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "conftest", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/conftest.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 367.2606945037842, "funcName": "setup", "exc_text": null, "relativeCreated": 610.3627681732178, "module": "conftest", "msg": "Starting Setup method for posting batches using intkey as payload", "asctime": "2018-12-03 12:54:05"}, {"lineno": 73, "filename": "payload.py", "created": 1543821845.3732347, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 373.23474884033203, "funcName": "_create_transactions", "exc_text": null, "relativeCreated": 616.3368225097656, "module": "payload", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:05"}, {"lineno": 79, "filename": "payload.py", "created": 1543821845.374849, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 374.8490810394287, "funcName": "_create_batches", "exc_text": null, "relativeCreated": 617.9511547088623, "module": "payload", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:05"}, {"lineno": 168, "filename": "payload.py", "created": 1543821845.3753247, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.3247261047363, "funcName": "_expected_data", "exc_text": null, "relativeCreated": 618.4267997741699, "module": "payload", "msg": "Gathering expected data before submission of batches", "asctime": "2018-12-03 12:54:05"}, {"lineno": 155, "filename": "payload.py", "created": 1543821845.3754852, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.4851818084717, "funcName": "_expected_count", "exc_text": null, "relativeCreated": 618.5872554779053, "module": "payload", "msg": "Calculating the expected count of batches, transactions, state", "asctime": "2018-12-03 12:54:05"}, {"lineno": 146, "filename": "payload.py", "created": 1543821845.3756492, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 375.64921379089355, "funcName": "_initial_count", "exc_text": null, "relativeCreated": 618.7512874603271, "module": "payload", "msg": "Calculating the initial count of batches,transactions, state before submission of batches", "asctime": "2018-12-03 12:54:05"}, {"lineno": 111, "filename": "payload.py", "created": 1543821868.8435173, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 843.5173034667969, "funcName": "_expected_txn_ids", "exc_text": null, "relativeCreated": 24086.61937713623, "module": "payload", "msg": "Expected transaction ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821868.8442633, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.2633152008057, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24087.36538887024, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 111, "filename": "payload.py", "created": 1543821868.844543, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.5429801940918, "funcName": "_expected_txn_ids", "exc_text": null, "relativeCreated": 24087.645053863525, "module": "payload", "msg": "Expected transaction ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821868.844708, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 844.7079658508301, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24087.810039520264, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:28"}, {"lineno": 97, "filename": "payload.py", "created": 1543821869.4468331, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "payload", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/payload.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 446.83313369750977, "funcName": "_expected_batch_ids", "exc_text": null, "relativeCreated": 24689.935207366943, "module": "payload", "msg": "Expected batch ids", "asctime": "2018-12-03 12:54:29"}]}, "call": {"outcome": "passed", "duration": 3.5685501098632812, "log": [{"lineno": 88, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5489635, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.9635467529297, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26792.065620422363, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:31"}, {"lineno": 99, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.55388, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 553.879976272583, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26796.982049942017, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:31"}, {"lineno": 113, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.555057, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 555.0570487976074, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26798.15912246704, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:31"}, {"lineno": 125, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5554729, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 555.4728507995605, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26798.574924468994, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:31"}, {"lineno": 140, "filename": "test_rest_api_dep_txns.py", "created": 1543821871.5561776, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 556.1776161193848, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 26799.27968978882, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:31"}, {"lineno": 151, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5480816, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.081636428833, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.183710098267, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:34"}, {"lineno": 157, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5482955, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.2954978942871, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.39757156372, "module": "test_rest_api_dep_txns", "msg": "Batch with id 1966fa42a905f16c77e0ee9be1de1427c568118a301e917ae3d945ea2ee05ca26485f9854d773123597d222c1ac5a9366bd8a737c24ad8a531383eb1a54d5c0d is successfully got committed", "asctime": "2018-12-03 12:54:34"}, {"lineno": 157, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5484228, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.4228134155273, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.52488708496, "module": "test_rest_api_dep_txns", "msg": "Batch with id 950ea7bd67cf674da99d5404f77ed871d6ec5f3d55c2c257d6017f45340ededb25b7f758a12946f044fcd705adcf6f9735668f9353079cd70bd6052bd9868dbc is successfully got committed", "asctime": "2018-12-03 12:54:34"}, {"lineno": 165, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5485833, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.5832691192627, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29791.685342788696, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:34"}, {"lineno": 171, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5602443, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 560.2443218231201, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29803.346395492554, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 63a943c0f60dc30a52ae4b3047dc33d718c032cff62d9f9fe630436a36adc1ec6bf4f8667ec161974cebb51447761fcf07f4b12caeaac38900361244ffe1b67c", "asctime": "2018-12-03 12:54:34"}, {"lineno": 173, "filename": "test_rest_api_dep_txns.py", "created": 1543821874.5738556, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 573.8556385040283, "funcName": "test_set_inc_txn_dep", "exc_text": null, "relativeCreated": 29816.957712173462, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank0efffa0d8e69a3c82198674a59b3e758424d43f16d7fa0b06f8b44fd930cfdd70f7aec164a96464c0e3ca96437719598748933d699c48143ef40121877c678bd", "asctime": "2018-12-03 12:54:34"}, {"lineno": 241, "filename": "utils.py", "created": 1543821875.0944118, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 94.41184997558594, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 30337.51392364502, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:35"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_set_inc_txn_dep", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_set_inc_txn_dep", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_set_inc_txn_dep", "teardown": {"outcome": "passed", "duration": 0.0003333091735839844, "log": []}}, {"lineno": 178, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.00044083595275878906, "log": []}, "call": {"outcome": "passed", "duration": 7.127191543579102, "log": [{"lineno": 187, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.119221, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 119.22097206115723, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30362.32304573059, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:35"}, {"lineno": 197, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1255586, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 125.55861473083496, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30368.66068840027, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:35"}, {"lineno": 211, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1272697, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 127.26974487304688, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30370.37181854248, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:35"}, {"lineno": 223, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1277328, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 127.73275375366211, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30370.834827423096, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first and second transaction", "asctime": "2018-12-03 12:54:35"}, {"lineno": 235, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1281967, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 128.19671630859375, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30371.298789978027, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:35"}, {"lineno": 250, "filename": "test_rest_api_dep_txns.py", "created": 1543821875.1291454, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 129.14538383483887, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 30372.247457504272, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:35"}, {"lineno": 261, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6695542, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.5542335510254, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.65630722046, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6697662, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.7661876678467, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.86826133728, "module": "test_rest_api_dep_txns", "msg": "Batch with id b9ebf1fcee5cb0f528a125ecf1dcb111768d11a2d369dd36c0ba95db1d00ed880e14e58e9fca6d89b27eac32c86055ff06268f4f66542d3db2aaba0aba86f4fb is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6698918, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 669.8918342590332, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36912.99390792847, "module": "test_rest_api_dep_txns", "msg": "Batch with id 80141ee13cc8dd00bcfbd7b56ff755e8e5d5293349b0acc95046ed6e34b04a1935b44b5de974fda094c3768d9fd018f201047648a428575b1ba62e6c97248a09 is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 267, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.670008, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 670.0079441070557, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36913.11001777649, "module": "test_rest_api_dep_txns", "msg": "Batch with id efc85981ba5b7258fcd098c6b58fe249b4d601ce9062a0c8a87553d49be1761a6155767950e706ba75af4e84d036ebcdadf369b64f9d87655f75b27c98bff958 is successfully got committed", "asctime": "2018-12-03 12:54:41"}, {"lineno": 275, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6701179, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 670.1178550720215, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36913.219928741455, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:41"}, {"lineno": 281, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6857107, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 685.7106685638428, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36928.81274223328, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id ffe4cd9297945ce45b5796f27daa669768c76c862d93d4bbe52379ec276249a30c220267a536747ef94793462009cebb28a22b834de82a43043986d40de9de80", "asctime": "2018-12-03 12:54:41"}, {"lineno": 283, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.6968331, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 696.8331336975098, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36939.93520736694, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank89bf5d216e142fe29b67a790b06ea7cbfcaf08064054437305bec0f18ea164003bcb9dd36fbc84a67817690cd7445912763e78da8c1bbc866dd6e0b60237cb80", "asctime": "2018-12-03 12:54:41"}, {"lineno": 281, "filename": "test_rest_api_dep_txns.py", "created": 1543821881.707487, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 707.4871063232422, "funcName": "test_rest_api_double_dep_txns", "exc_text": null, "relativeCreated": 36950.589179992676, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 3a20d74799ad6facd6ffbd63feb816e289d453abaad8563521012f901f093098151cb403c1558b03e8d485e83055402fbcc602deffe091ab80477677a4237f9b", "asctime": "2018-12-03 12:54:41"}, {"lineno": 241, "filename": "utils.py", "created": 1543821882.2245405, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 224.54047203063965, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 37467.64254570007, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:42"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_double_dep_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_rest_api_double_dep_txns", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_double_dep_txns", "teardown": {"outcome": "passed", "duration": 0.0003056526184082031, "log": []}}, {"lineno": 288, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003654956817626953, "log": []}, "call": {"outcome": "passed", "duration": 1.0723152160644531, "log": [{"lineno": 296, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.2486768, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 248.67677688598633, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37491.77885055542, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:42"}, {"lineno": 312, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.256097, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 256.09707832336426, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37499.1991519928, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:42"}, {"lineno": 326, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.257406, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 257.40599632263184, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37500.508069992065, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:42"}, {"lineno": 341, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.2585185, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 258.5184574127197, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37501.62053108215, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:42"}, {"lineno": 356, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.259265, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 259.2649459838867, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 37502.36701965332, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:42"}, {"lineno": 367, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7597337, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 759.7336769104004, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38002.835750579834, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:42"}, {"lineno": 375, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7599473, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 759.9472999572754, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.04937362671, "module": "test_rest_api_dep_txns", "msg": "Batch with id 12fd98d7ac23ae15d22336148c76c4684f483551da8cb07d42fbe95aefb8055f2dd6ffe941db3758c76ee0f17cf8f88d98ea04b2da7c904899860a779a443ad2 is successfully got committed", "asctime": "2018-12-03 12:54:42"}, {"lineno": 375, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.760076, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 760.0760459899902, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.178119659424, "module": "test_rest_api_dep_txns", "msg": "Batch with id cf788196417f0342de8b504362c0e8994dbf011675990b1b1d497b9b365684da3a0ba15aa99d38796c3de3276b947d190f0d32feb6859be2362df7cc6cc380f8 is successfully got committed", "asctime": "2018-12-03 12:54:42"}, {"lineno": 386, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7601895, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 760.1895332336426, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38003.291606903076, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:42"}, {"lineno": 392, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.7722795, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 772.2795009613037, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38015.38157463074, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 3f5411c7c74f985910ff0c3c144cf9bbb921b71ad2e49aa17197440b88fa85742344ce33944a56d33d8861c746b8a23ea9a4bc7a4f67c1078847ea60a0e81a6a", "asctime": "2018-12-03 12:54:42"}, {"lineno": 394, "filename": "test_rest_api_dep_txns.py", "created": 1543821882.788773, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 788.7730598449707, "funcName": "test_single_set_dep_txns", "exc_text": null, "relativeCreated": 38031.875133514404, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blankade2fad6cabaa598b5bea4656a172119e6c788cde37eafad2b9c69916e67245b677f143c2d22935935d49f55615a6be467d264b61ac4f14e02da88094b3d582c", "asctime": "2018-12-03 12:54:42"}, {"lineno": 241, "filename": "utils.py", "created": 1543821883.3003898, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 300.38976669311523, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 38543.49184036255, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:43"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_single_set_dep_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "test_single_set_dep_txns", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_single_set_dep_txns", "teardown": {"outcome": "passed", "duration": 0.00029778480529785156, "log": []}}, {"lineno": 399, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003311634063720703, "log": []}, "call": {"outcome": "passed", "duration": 4.070549011230469, "log": [{"lineno": 407, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3232832, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 323.28319549560547, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38566.38526916504, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:43"}, {"lineno": 423, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3293188, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 329.3187618255615, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38572.420835494995, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:43"}, {"lineno": 437, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3304896, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 330.4896354675293, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38573.59170913696, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:43"}, {"lineno": 450, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3309386, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 330.93857765197754, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38574.04065132141, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:43"}, {"lineno": 465, "filename": "test_rest_api_dep_txns.py", "created": 1543821883.3317018, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 331.70175552368164, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 38574.803829193115, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:43"}, {"lineno": 476, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8563724, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.3723564147949, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.47443008423, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:46"}, {"lineno": 484, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8566306, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.6305637359619, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.732637405396, "module": "test_rest_api_dep_txns", "msg": "Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is successfully got committed", "asctime": "2018-12-03 12:54:46"}, {"lineno": 489, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.8567727, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.7726612091064, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.87473487854, "module": "test_rest_api_dep_txns", "msg": "Batch with id ec4bb017adbc1327520947ab44f0e31c4b3e8a53acc642946746fc970097b9aa0dcf3f63f90fc94ab7a0cba686a0c07bfb2fb6084a96bfa964bb0f808ff47fd6 is not committed. Status is INVALID", "asctime": "2018-12-03 12:54:46"}, {"lineno": 494, "filename": "test_rest_api_dep_txns.py", "created": 1543821886.856888, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 856.8880558013916, "funcName": "test_rest_api_single_set_dec_txns", "exc_text": null, "relativeCreated": 42099.990129470825, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:46"}, {"lineno": 241, "filename": "utils.py", "created": 1543821887.372987, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 372.9870319366455, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 42616.08910560608, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:47"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "test_rest_api_single_set_dec_txns", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_txns", "teardown": {"outcome": "passed", "duration": 0.00030612945556640625, "log": []}}, {"lineno": 500, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.00034546852111816406, "log": []}, "call": {"outcome": "passed", "duration": 6.141465425491333, "log": [{"lineno": 509, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.3962643, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 396.26431465148926, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42639.36638832092, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:47"}, {"lineno": 519, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4013183, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 401.3183116912842, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42644.42038536072, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:47"}, {"lineno": 533, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4025242, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 402.5242328643799, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42645.62630653381, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:47"}, {"lineno": 545, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4029722, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 402.9722213745117, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42646.074295043945, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as second transaction", "asctime": "2018-12-03 12:54:47"}, {"lineno": 557, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4034097, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 403.4097194671631, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42646.5117931366, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:47"}, {"lineno": 572, "filename": "test_rest_api_dep_txns.py", "created": 1543821887.4043865, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 404.3865203857422, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 42647.488594055176, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:47"}, {"lineno": 583, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9626763, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 962.6762866973877, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48205.77836036682, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9628952, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 962.8951549530029, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48205.99722862244, "module": "test_rest_api_dep_txns", "msg": "Batch with id 387e6eba49bcfeb449f5058f1c56455191ed38dea8ef104ad952bb37239044014dfa1fee27eb78d10773cedf833763d72d921b6a098fa273879b6e5b638ec926 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9630222, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.0222320556641, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.1243057251, "module": "test_rest_api_dep_txns", "msg": "Batch with id 4a716cedbf703e5348ff8355e5fe726851f35f8d07b01b433330c2889717edac146962b520cfd0b286fb4042d057c2374490dba922624a73f7a404da6e5ad177 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 589, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.963141, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.1409645080566, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.24303817749, "module": "test_rest_api_dep_txns", "msg": "Batch with id c85bad47724d9f00f360291fa854034f6d4dbcb8e2ad682a75b20ba3f38a592a75ba6d4dc60708956f334618ce3105fbe4ffea341c98404ab8c3603d81298ea8 is successfully got committed", "asctime": "2018-12-03 12:54:52"}, {"lineno": 597, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9632616, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 963.261604309082, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48206.363677978516, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:52"}, {"lineno": 603, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9750278, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 975.0277996063232, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48218.12987327576, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 699d7d97a2928f33ef1b21184b9d432baa22b506b38521f435d5872092667f784e4d7d637d21d5e9b3bb5d62622fba76876d85417661a54457889f45de8d46f1", "asctime": "2018-12-03 12:54:52"}, {"lineno": 603, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.986784, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 986.7839813232422, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48229.886054992676, "module": "test_rest_api_dep_txns", "msg": "Successfully got the dependencies for transaction id 19613c531a09892e9ffc29d27347ca06343bc1f7ac6e9faa9db96f4e6ee489b426e9c3c35ca2b4c8da39b97602b963db367f411b5050ccc67010f84b011dd2ca", "asctime": "2018-12-03 12:54:52"}, {"lineno": 605, "filename": "test_rest_api_dep_txns.py", "created": 1543821892.9994009, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 999.4008541107178, "funcName": "test_rest_api_set_inc_inc_Txns_Dep", "exc_text": null, "relativeCreated": 48242.50292778015, "module": "test_rest_api_dep_txns", "msg": "The dependencies for transaction id is blank07b20a8fe18b1a6f95b676de6529f70a7c640b07296e64bb38417d952550a1e060122d602a93794b614c577e2500e7ed0946687f24f0c432ba2ae9b7efc38311", "asctime": "2018-12-03 12:54:52"}, {"lineno": 241, "filename": "utils.py", "created": 1543821893.5171175, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 517.1175003051758, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 48760.21957397461, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:53"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_set_inc_inc_Txns_Dep", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "test_rest_api_set_inc_inc_Txns_Dep", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_set_inc_inc_Txns_Dep", "teardown": {"outcome": "passed", "duration": 0.00029659271240234375, "log": []}}, {"lineno": 610, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003414154052734375, "log": []}, "call": {"outcome": "passed", "duration": 2.0709407329559326, "log": [{"lineno": 618, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5401227, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 540.1227474212646, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48783.2248210907, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:53"}, {"lineno": 634, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5467072, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 546.7071533203125, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48789.809226989746, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:53"}, {"lineno": 648, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.547874, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 547.8739738464355, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48790.97604751587, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:53"}, {"lineno": 661, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.5483227, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 548.3226776123047, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48791.42475128174, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:53"}, {"lineno": 676, "filename": "test_rest_api_dep_txns.py", "created": 1543821893.549048, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 549.0479469299316, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 48792.150020599365, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:53"}, {"lineno": 687, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.0764737, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.47371292114258, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.575786590576, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:54:55"}, {"lineno": 695, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.076688, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.68805122375488, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.79012489319, "module": "test_rest_api_dep_txns", "msg": "Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed", "asctime": "2018-12-03 12:54:55"}, {"lineno": 695, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.0768154, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 76.81536674499512, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50319.91744041443, "module": "test_rest_api_dep_txns", "msg": "Batch with id 755e812e3b09a8b4cafab088bc93e44bc50f00a1b44905a5a6ee09e75e80e0173948b4cf6ad8efc5ff118427dfe25b8219ba38d5833321d30e0826b6fad519e1 is successfully got committed", "asctime": "2018-12-03 12:54:55"}, {"lineno": 705, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.077211, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 77.21090316772461, "funcName": "test_rest_api_single_set_dec_same_txns", "exc_text": null, "relativeCreated": 50320.31297683716, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:54:55"}, {"lineno": 241, "filename": "utils.py", "created": 1543821895.589809, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 589.8089408874512, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 50832.911014556885, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:54:55"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_same_txns", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "test_rest_api_single_set_dec_same_txns", "api_test", "()", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_same_txns", "teardown": {"outcome": "passed", "duration": 0.00031948089599609375, "log": []}}, {"lineno": 719, "outcome": "passed", "setup": {"outcome": "passed", "duration": 0.0003497600555419922, "log": []}, "call": {"outcome": "passed", "duration": 6.094623565673828, "log": [{"lineno": 727, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.613445, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 613.4450435638428, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50856.54711723328, "module": "test_rest_api_dep_txns", "msg": "Starting test for batch post", "asctime": "2018-12-03 12:54:55"}, {"lineno": 743, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6196325, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 619.6324825286865, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50862.73455619812, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with set operations", "asctime": "2018-12-03 12:54:55"}, {"lineno": 757, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6207347, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 620.734691619873, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50863.83676528931, "module": "test_rest_api_dep_txns", "msg": "Creating intkey transactions with inc operations with dependent transactions as first transaction", "asctime": "2018-12-03 12:54:55"}, {"lineno": 770, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6211808, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 621.1807727813721, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50864.282846450806, "module": "test_rest_api_dep_txns", "msg": "Creating batches for transactions 1trn/batch", "asctime": "2018-12-03 12:54:55"}, {"lineno": 785, "filename": "test_rest_api_dep_txns.py", "created": 1543821895.6218703, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 621.8702793121338, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 50864.97235298157, "module": "test_rest_api_dep_txns", "msg": "Submitting batches to the handlers", "asctime": "2018-12-03 12:54:55"}, {"lineno": 796, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.1709635, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 170.96352577209473, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.06559944153, "module": "test_rest_api_dep_txns", "msg": "Verifying the responses status", "asctime": "2018-12-03 12:55:01"}, {"lineno": 804, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.171194, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.19407653808594, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.29615020752, "module": "test_rest_api_dep_txns", "msg": "Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is successfully got committed", "asctime": "2018-12-03 12:55:01"}, {"lineno": 813, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.1713386, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.33855819702148, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.440631866455, "module": "test_rest_api_dep_txns", "msg": "Batch with id 5f0d9ec090f07be2ebd13bbd8d0086970812a3d0b7863dd0dec3c2708d5eb4e650ebcd55471240453a37bf3c6e87359e2394992d8cc209d97bd087a34ff3fd49 is not committed. Status is UNKNOWN", "asctime": "2018-12-03 12:55:01"}, {"lineno": 814, "filename": "test_rest_api_dep_txns.py", "created": 1543821901.171468, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "test_rest_api_dep_txns", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 171.46801948547363, "funcName": "test_rest_api_single_set_dec_invalid_txns_id", "exc_text": null, "relativeCreated": 56414.57009315491, "module": "test_rest_api_dep_txns", "msg": "Verifying the txn details listed under the dependencies", "asctime": "2018-12-03 12:55:01"}, {"lineno": 241, "filename": "utils.py", "created": 1543821901.6870008, "processName": "MainProcess", "threadName": "MainThread", "exc_info": null, "stack_info": null, "args": null, "name": "utils", "pathname": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test/utils.py", "levelname": "INFO", "levelno": 20, "thread": 140494495176448, "process": 23580, "msecs": 687.0007514953613, "funcName": "check_for_consensus", "exc_text": null, "relativeCreated": 56930.102825164795, "module": "utils", "msg": "Checking Consensus on block number 1", "asctime": "2018-12-03 12:55:01"}]}, "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_invalid_txns_id", "path": "dep_txns/test_rest_api_dep_txns.py", "keywords": ["dependent", "sixth", "dep_txns/test_rest_api_dep_txns.py", "run", "api_test", "()", "test_rest_api_single_set_dec_invalid_txns_id", "TestPostTansactionDependencies"], "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_invalid_txns_id", "teardown": {"outcome": "passed", "duration": 0.0005271434783935547, "log": []}}], "duration": 56.4007306098938, "exitcode": 0, "summary": {"passed": 7, "total": 7}, "created": 1543821901.7112274, "root": "/home/test/NOV30/sawtooth-core/rest_api/tests/api_test", "collectors": [{"outcome": "passed", "children": [{"type": "Module", "nodeid": "dep_txns/test_rest_api_dep_txns.py"}], "nodeid": ""}, {"outcome": "passed", "children": [{"lineno": 80, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_set_inc_txn_dep", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_set_inc_txn_dep"}, {"lineno": 178, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_double_dep_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_double_dep_txns"}, {"lineno": 288, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_single_set_dep_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_single_set_dep_txns"}, {"lineno": 399, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_txns"}, {"lineno": 500, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_set_inc_inc_Txns_Dep", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_set_inc_inc_Txns_Dep"}, {"lineno": 610, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_same_txns", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_same_txns"}, {"lineno": 719, "path": "dep_txns/test_rest_api_dep_txns.py", "domain": "TestPostTansactionDependencies.test_rest_api_single_set_dec_invalid_txns_id", "type": "Function", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()::test_rest_api_single_set_dec_invalid_txns_id"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()"}, {"outcome": "passed", "children": [{"type": "Instance", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies::()"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies"}, {"outcome": "passed", "children": [{"type": "Class", "nodeid": "dep_txns/test_rest_api_dep_txns.py::TestPostTansactionDependencies"}], "nodeid": "dep_txns/test_rest_api_dep_txns.py"}], "environment": {"Python": "3.5.2", "Plugins": {"ordering": "0.6", "json-report": "0.7.0", "aiohttp": "0.3.0", "metadata": "1.7.0"}, "Platform": "Linux-4.4.0-116-generic-x86_64-with-Ubuntu-16.04-xenial", "Packages": {"pluggy": "0.8.0", "pytest": "3.10.0", "py": "1.7.0"}}} \ No newline at end of file From 84357fed63b5c741cf873613f90bd54940400cd0 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Mon, 3 Dec 2018 17:41:52 +0530 Subject: [PATCH 52/64] Update utils.py --- rest_api/tests/api_test/utils.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py index 3f0df838a5..a5a993a301 100644 --- a/rest_api/tests/api_test/utils.py +++ b/rest_api/tests/api_test/utils.py @@ -274,14 +274,10 @@ def _make_http_address(node_number): return node_number def _get_client_address(): - command = "ifconfig lo | grep 'inet addr' | cut -d ':' -f 2 | cut -d ' ' -f 1" - node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') - return 'http://' + node_ip + ':8008' - ''' command = "hostname -I | awk '{print $1}'" node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') return 'http://' + node_ip + ':8008' - ''' + def _start_validator(): LOGGER.info('Starting the validator') cmd = "sudo -u sawtooth sawtooth-validator -vv" From 73f76fb445a96a05d11488a59cc8b5bf734ccac7 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Wed, 5 Dec 2018 16:59:06 +0530 Subject: [PATCH 53/64] Updated with 3 more dep txns test cases Updated with 3 more dep txns test cases --- .../dep_txns/test_rest_api_dep_txns.py | 268 +++++++++++++++++- 1 file changed, 267 insertions(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index cdb93d0fa8..52f3dc6037 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -824,7 +824,273 @@ async def test_rest_api_single_set_dec_invalid_txns_id(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_single_set_dep_reverse(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 5, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + words = random_word_list(100) + name=random.choice(words) + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("set",trxn_ids, name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns[::-1]] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + response = await asyncio.gather(task) + #print(response) + responses = await asyncio.gather(*tasks) + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_valid_set_invalid_inc_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second invalid Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches + Verify the transactions. This shoud be an invalid transaction. The trird txn will be in PENDING state + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, -1, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + time.sleep(60) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_valid_set_invalid_inc_DiffKey_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second invalid Transaction for increment with first Transaction as dependecies with different key + 3. Create Batch + 4. Call POST /batches + Verify the transactions. This shoud be an invalid transaction. The trird txn will be in PENDING state + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + + name = random.choice("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz") + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, -1, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + time.sleep(60) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True - \ No newline at end of file From 70856e89065acc408bdb56abb09b9d6717b7c6bf Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Thu, 6 Dec 2018 15:26:50 +0530 Subject: [PATCH 54/64] Updated text_rest_api_dep_txns.py --- rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 52f3dc6037..05bd7cf6f8 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -996,7 +996,6 @@ async def test_valid_set_invalid_inc_txn_dep(self, setup): for batch in post_batch_list: task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) tasks.append(task) - time.sleep(60) responses = await asyncio.gather(*tasks) except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") @@ -1079,7 +1078,6 @@ async def test_valid_set_invalid_inc_DiffKey_txn_dep(self, setup): for batch in post_batch_list: task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) tasks.append(task) - time.sleep(60) responses = await asyncio.gather(*tasks) except aiohttp.client_exceptions.ClientResponseError as error: LOGGER.info("Rest Api is Unreachable") From 2649b316f7ddd319ed8dd66fb191900194bcbf78 Mon Sep 17 00:00:00 2001 From: sandeeplandt <37058842+sandeeplandt@users.noreply.github.com> Date: Fri, 7 Dec 2018 13:03:01 +0530 Subject: [PATCH 55/64] Updated test_rest_api_dep_txns.py Updated 2 more tests in test_rest_api_dep_txns.py --- .../dep_txns/test_rest_api_dep_txns.py | 160 ++++++++++++++++++ 1 file changed, 160 insertions(+) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 05bd7cf6f8..262bc16d66 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -1090,5 +1090,165 @@ async def test_valid_set_invalid_inc_DiffKey_txn_dep(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_set_Max_txn_dep(self, setup): + """1. Create first Transaction for set with max value + 2. Create second Transaction for increment with first Transaction as dependency + 3. Create Batch + 4. Call POST /batches + Verify the transactions. The first one shoud be an invalid transaction. The second txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 8888888888888888888888888, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'INVALID' == responses[0]['data'][0]['status'] + assert 'Validator Timed Out' == responses[1]['error']['title'] + assert 17 == responses[1]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + + async def test_invalid_set_txn_dep(self, setup): + """1. Create first invalid Transaction for set with negative value + 2. Create second Transaction for increment with first invalid Transaction as dependency + 3. Create Batch + 4. Call POST /batches + Verify the transactions. The first one shoud be an invalid transaction. The second txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, -1, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'INVALID' == responses[0]['data'][0]['status'] + assert 'Validator Timed Out' == responses[1]['error']['title'] + assert 17 == responses[1]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + From 77d4bffc4e7e180e9b3b4f1bee3c695fea8d9433 Mon Sep 17 00:00:00 2001 From: jyotsna Date: Fri, 14 Dec 2018 14:50:26 +0530 Subject: [PATCH 56/64] Dependent transactions some intkey tcs commit --- .../dep_txns/test_rest_api_dep_txns.py | 303 +++++++++++++++++- rest_api/tests/api_test/fixtures.py | 35 ++ rest_api/tests/api_test/payload.py | 13 + 3 files changed, 349 insertions(+), 2 deletions(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 262bc16d66..2664f0fff7 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -19,6 +19,7 @@ import asyncio import datetime import random +import time from google.protobuf.json_format import MessageToDict @@ -33,14 +34,14 @@ from payload import get_signer, create_intkey_transaction, create_batch,\ create_intkey_same_transaction, \ - create_intkey_transaction_dep, random_word_list + create_intkey_transaction_dep, random_word_list, create_invalid_Address_intkey_dep_txn from base import RestApiBaseTest from fixtures import setup_empty_trxs_batch, setup_invalid_txns,setup_invalid_txns_min,\ setup_invalid_txns_max, setup_valinv_txns, setup_invval_txns, \ setup_same_txns, setup_valid_txns, setup_invalid_txns_fn,\ - setup_invalid_invaddr + setup_invalid_invaddr, post_batch_txn, validate_Response_Status_txn LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) @@ -1249,6 +1250,304 @@ async def test_invalid_set_txn_dep(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_invalid_Address_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second dependent Transaction for increment and make the address invalid with first Transaction as dependency + 3. Create batch ,post batch and check the response status + 4. The second transaction will be an invalid transaction + 5. Create the third transaction for decrement with first and second as dependency + 6. Create a batch and post batch + Verify the transaction responses. The first one will be COMMITTED and second one shoud be an invalid transaction. The third txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_invalid_Address_intkey_dep_txn("inc", trxn_ids , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + LOGGER.info("Creating valid intkey transactions with dec operations with dependent transactions as first and second transaction") + trxn_ids = list(set(expected_trxn_ids)) + txns = [] + responses = [] + expected_batch_ids = [] + post_batch_list = [] + tasks = [] + txns.append(create_intkey_transaction_dep("dec", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + assert 'Validator Timed Out' == responses[0]['error']['title'] + assert 17 == responses[0]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_Multiple_Indep_Txn_txn_dep(self, setup): + """1.Create 5 independent Transactions for set + 2.Create second dependent transaction for set with 5 independent transactions as dependency + 3.Create third dependent Transaction for increment with second dependent Transaction as dependency + 4.Create a batch for all the dependent transaction and post batch + 5.Check for the status + 6.Now create the batch for independent transactions and post batch + 7. Check for the response status of both independent and dependent transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_trxn_ids_indep = [] + expected_batch_ids = [] + expected_batch_ids_indep = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_indep=[] + tasks_dep=[] + batch_ids_dep = [] + batch_ids_indep = [] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns_Indep = [ + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer),] + + for txn in txns_Indep: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_indep.append(trxn_id) + + txns = [ + create_intkey_transaction_dep("set", expected_trxn_ids_indep , name, 50, signer),] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_dep.append(task) + responses_dep = await asyncio.gather(*tasks_dep) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status of dependent txns before committing the independent txns") + + validate_Response_Status_txn(responses_dep) + + post_batch_list = [] + tasks = [] + post_batch_list = post_batch_txn(txns_Indep, expected_batch_ids_indep, signer) + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_indep.append(task) + responses_indep = await asyncio.gather(*tasks_indep) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the independent txns") + + validate_Response_Status_txn(responses_indep) + + time.sleep(300) + LOGGER.info("Waiting time to get the dependent txns to be committed") + LOGGER.info("Verifying the responses status of dependent txns after committing the independent txns") + validate_Response_Status_txn(responses_dep) + + async def test_inc_first_txn_dep(self, setup): + """ + 1.Create a dependent transactions for increment + 2.Create a dependent transaction for set for the same key with first transaction as dependent + 3.Create batch and post the first transaction(increment) first and check the response status + 4.Post the second transaction(set) and check the response status + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_inc=[] + tasks_set = [] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with inc operations") + + txns_inc = [ + create_intkey_transaction_dep("inc", [] , name, 10, signer),] + for txn in txns_inc: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + post_batch_list = post_batch_txn(txns_inc, expected_trxn_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_inc.append(task) + responses_inc = await asyncio.gather(*tasks_inc) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status for first transaction") + + validate_Response_Status_txn(responses_inc) + + expected_trxn_ids = [] + expected_batch_ids = [] + + LOGGER.info("Creating invalid intkey transactions with set operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns_set = [ + create_intkey_transaction_dep("set", trxn_ids , name, 20, signer),] + for txn in txns_set: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + post_batch_list = post_batch_txn(txns_set, expected_trxn_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_set.append(task) + responses_set = await asyncio.gather(*tasks_set) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status for 2nd transaction") + validate_Response_Status_txn(responses_set) + + time.sleep(50) + + LOGGER.info("Verifying the responses status for first transaction again") + validate_Response_Status_txn(responses_inc) \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index 4a06e42956..5e4837b6da 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -238,7 +238,42 @@ def setup_invalid_txns_fn(request): data = Txns.get_batch_invalid_txns_fam_name() return data +@pytest.fixture(scope="function") +def post_batch_txn(txns, expected_batch_ids, signer): + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + LOGGER.info("Submitting batches to the handlers") + return post_batch_list + +@pytest.fixture(scope="function") +def validate_Response_Status_txn(responses): + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py index 93f2a0a327..b84589420e 100644 --- a/rest_api/tests/api_test/payload.py +++ b/rest_api/tests/api_test/payload.py @@ -694,3 +694,16 @@ def get_txns_data(addr, deps, payload, signer): header_signature=signature) return transaction +def create_invalid_Address_intkey_dep_txn(verb, deps, name, value, signer): + + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + data = get_txns_data(addr,deps, payload, signer) + return data \ No newline at end of file From c53866f77e2b7f1f1b3862bf3990be100d590d31 Mon Sep 17 00:00:00 2001 From: shresthichauhan Date: Tue, 18 Dec 2018 12:34:41 +0530 Subject: [PATCH 57/64] Modify the trace to make it more verbose Signed-off-by: shresthichauhan --- rest_api/tests/api_test/get/test_rest_api_get_state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index 516de086e5..c7b5b8e22d 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -112,7 +112,7 @@ async def test_api_get_state_list_invalid_batch(self, invalid_batch): async with session.get(url='{}/state'.format(address)) as data: response = await data.json() except aiohttp.client_exceptions.ClientResponseError as error: - LOGGER.info("Rest Api is Unreachable") + LOGGER.info("Rest API state list unable to verify invalid batch") async def test_api_get_state_list_bad_head(self, setup): """Tests that GET /state is unreachable with bad head parameter From c64aee0422520d574f9ed63f1facbb231ba248e1 Mon Sep 17 00:00:00 2001 From: shresthichauhan Date: Tue, 18 Dec 2018 14:45:12 +0530 Subject: [PATCH 58/64] Make debug logs verbose to understand Signed-off-by: shresthichauhan --- rest_api/tests/api_test/get/test_rest_api_get_state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index c7b5b8e22d..3a8e38533a 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -71,7 +71,7 @@ async def test_api_get_state_list(self, setup): async with session.get(url='{}/state'.format(address), raise_for_status=True) as data: response = await data.json() except urllib.error.HTTPError as error: - LOGGER.info("Rest Api is Unreachable") + LOGGER.info("Rest Api unable to get state list") state_list = response['data'][::-1] From bcad95917c42b37155c877afccbab8c757430c44 Mon Sep 17 00:00:00 2001 From: jyotsna Date: Wed, 19 Dec 2018 15:03:57 +0530 Subject: [PATCH 59/64] Dependent transactions adding two intkey tcs --- .../dep_txns/test_rest_api_dep_txns.py | 272 +++++++++++++++++- rest_api/tests/api_test/fixtures.py | 38 ++- 2 files changed, 294 insertions(+), 16 deletions(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 2664f0fff7..fb9be2b4db 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -1550,4 +1550,274 @@ async def test_inc_first_txn_dep(self, setup): time.sleep(50) LOGGER.info("Verifying the responses status for first transaction again") - validate_Response_Status_txn(responses_inc) \ No newline at end of file + validate_Response_Status_txn(responses_inc) + + async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): + """1.Create 5 dependent transactions for set and second one is depend on first, third is depend on second, fourth is depend on third and fifth is depend on fourth + 2. Create a batch and post the fourth and fifth transactions. + 3. Check the response status. It should not be COMMITTED. + 4. Create batch and post first, second and third transactions and check the response status. It should be COMMITTED. + 5. Now check the response for the fourth and fifth transaction. It should be COMMITTED. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids_first = [] + expected_batch_ids_second = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + name=random.choice(words) + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + name=random.choice(words) + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 30, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + name=random.choice(words) + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 80, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + name=random.choice(words) + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + responses_last = [] + icounter = 3 + for txn in txns[3:5]: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_first, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_last.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + responses_first = [] + post_batch_list = [] + expected_batch_ids = [] + icounter = 0 + for txn in txns[0:3]: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_first.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for responses in responses_first: + validate_Response_Status_txn(responses) + for responses in responses_last: + validate_Response_Status_txn(responses) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): + """1.Create 5 dependent transactions for set and second one is depend on first, third is depend on second, + fourth is depend on third and fifth is depend on fourth. Fourth one will be an invalid txn + 2. Create a batch and post the fourth and fifth transactions. + 3. Check the response status. It should not be COMMITTED. + 4. Create batch and post first, second and third transactions and check the response status. It should be COMMITTED. + 5. Now check the response for the fourth and fifth transaction. It should be INVALID. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids_first = [] + expected_batch_ids_second = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + txns_last = [] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns_first = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + + for txn in txns_first: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + name=random.choice(words) + txns_first.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + for txn in txns_first: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as second transaction") + name=random.choice(words) + txns_first.append(create_intkey_transaction_dep("set", [trxn_id] , name, 30, signer)) + for txn in txns_first: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with set operations with dependent transactions as third transaction") + name=random.choice(words) + txns_last.append(create_intkey_transaction_dep("set", [trxn_id] , name, -10, signer)) + for txn in txns_last: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as fourth invalid transaction") + name=random.choice(words) + txns_last.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + for txn in txns_last: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + responses_last = [] + icounter = 3 + for txn in txns_last: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_first, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + responses_last.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + responses_first = [] + post_batch_list = [] + expected_batch_ids = [] + icounter = 0 + for txn in txns_first: + post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_first.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for responses in responses_first: + validate_Response_Status_txn(responses) + for responses in responses_last: + validate_Response_Status_txn(responses) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index 5e4837b6da..ee6bbf06f2 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -18,6 +18,7 @@ import urllib import json import os +import requests from sawtooth_signing import create_context from sawtooth_signing import CryptoFactory @@ -262,18 +263,25 @@ def post_batch_txn(txns, expected_batch_ids, signer): @pytest.fixture(scope="function") def validate_Response_Status_txn(responses): for response in responses: - batch_id = response['data'][0]['id'] - - if response['data'][0]['status'] == 'COMMITTED': - assert response['data'][0]['status'] == 'COMMITTED' - - LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) - - elif response['data'][0]['status'] == 'INVALID': - assert response['data'][0]['status'] == 'INVALID' - - LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) - - elif response['data'][0]['status'] == 'UNKNOWN': - assert response['data'][0]['status'] == 'UNKNOWN' - LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) \ No newline at end of file + + req = requests.get(response['link']) + response = req.json() + if 'error' in response: + assert 'Validator Timed Out' == response['error']['title'] + assert 17 == response['error']['code'] + LOGGER.info('Batch with id {} is not committed. Status is Validator Timed Out Error'.format(batch_id)) + else: + batch_id = response['data'][0]['id'] + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) \ No newline at end of file From 2fec0635d6926185d7585a837881194125358a6a Mon Sep 17 00:00:00 2001 From: jyotsna Date: Thu, 20 Dec 2018 11:39:23 +0530 Subject: [PATCH 60/64] Dependent transactions two intkey tcs with modified --- .../dep_txns/test_rest_api_dep_txns.py | 143 +++++------------- rest_api/tests/api_test/fixtures.py | 2 +- 2 files changed, 41 insertions(+), 104 deletions(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index fb9be2b4db..25f19b26ad 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -1586,54 +1586,22 @@ async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): expected_trxn_ids.append(trxn_id) LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") - trxn_ids = expected_trxn_ids - name=random.choice(words) - txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) - for txn in txns: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - - LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") - name=random.choice(words) - txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 30, signer)) - for txn in txns: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - - LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") - name=random.choice(words) - txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 80, signer)) - for txn in txns: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - - LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") - name=random.choice(words) - txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) - for txn in txns: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - + value = 20 + for i in range(4): + trxn_ids = expected_trxn_ids + name=random.choice(words) + + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + for txn in [txns[-1]]: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + value += 10 + responses_last = [] icounter = 3 for txn in txns[3:5]: @@ -1662,7 +1630,7 @@ async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): for txn in txns[0:3]: post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) - + LOGGER.info("Submitting batches to the handlers") try: @@ -1686,7 +1654,7 @@ async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True - + async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): """1.Create 5 dependent transactions for set and second one is depend on first, third is depend on second, fourth is depend on third and fifth is depend on fourth. Fourth one will be an invalid txn @@ -1704,16 +1672,15 @@ async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): address = _get_client_address() url='{}/batches'.format(address) tasks=[] - txns_last = [] words = random_word_list(200) name=random.choice(words) LOGGER.info("Creating intkey transactions with set operations") - txns_first = [ + txns = [ create_intkey_transaction_dep("set", [] , name, 50, signer),] - for txn in txns_first: + for txn in txns: data = MessageToDict( txn, including_default_value_fields=True, @@ -1723,57 +1690,28 @@ async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): expected_trxn_ids.append(trxn_id) LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") - trxn_ids = expected_trxn_ids - name=random.choice(words) - txns_first.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) - for txn in txns_first: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) + value = 30 + invalidValue = -20 + for i in range(4): + trxn_ids = expected_trxn_ids + name=random.choice(words) + if i == 2: + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, invalidValue, signer)) + else: + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, value, signer)) - LOGGER.info("Creating intkey transactions with set operations with dependent transactions as second transaction") - name=random.choice(words) - txns_first.append(create_intkey_transaction_dep("set", [trxn_id] , name, 30, signer)) - for txn in txns_first: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - - LOGGER.info("Creating invalid intkey transactions with set operations with dependent transactions as third transaction") - name=random.choice(words) - txns_last.append(create_intkey_transaction_dep("set", [trxn_id] , name, -10, signer)) - for txn in txns_last: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - - LOGGER.info("Creating intkey transactions with set operations with dependent transactions as fourth invalid transaction") - name=random.choice(words) - txns_last.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) - for txn in txns_last: - data = MessageToDict( - txn, - including_default_value_fields=True, - preserving_proto_field_name=True) - - trxn_id = data['header_signature'] - expected_trxn_ids.append(trxn_id) - + for txn in [txns[-1]]: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + responses_last = [] icounter = 3 - for txn in txns_last: + for txn in txns[3:5]: post_batch_list = post_batch_txn([txn], expected_batch_ids_first, signer) @@ -1793,9 +1731,8 @@ async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): responses_first = [] post_batch_list = [] - expected_batch_ids = [] icounter = 0 - for txn in txns_first: + for txn in txns[0:3]: post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) LOGGER.info("Submitting batches to the handlers") diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py index ee6bbf06f2..e2c60eb5e9 100644 --- a/rest_api/tests/api_test/fixtures.py +++ b/rest_api/tests/api_test/fixtures.py @@ -268,7 +268,7 @@ def validate_Response_Status_txn(responses): response = req.json() if 'error' in response: assert 'Validator Timed Out' == response['error']['title'] - assert 17 == response['error']['code'] + assert response['error']['code'] == 17 LOGGER.info('Batch with id {} is not committed. Status is Validator Timed Out Error'.format(batch_id)) else: batch_id = response['data'][0]['id'] From 439b004909f6fd1c6eae2b7311f40df940785b12 Mon Sep 17 00:00:00 2001 From: Jyotsnaranix <44489964+Jyotsnaranix@users.noreply.github.com> Date: Thu, 20 Dec 2018 11:59:29 +0530 Subject: [PATCH 61/64] modified the parameter 40 to variable name --- rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 25f19b26ad..094e618b5f 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -1591,7 +1591,7 @@ async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): trxn_ids = expected_trxn_ids name=random.choice(words) - txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, 40, signer)) + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, value, signer)) for txn in [txns[-1]]: data = MessageToDict( txn, @@ -1757,4 +1757,4 @@ async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) - assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True \ No newline at end of file + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True From 0ed981f6375cacad49fc8f249f9b17debacdcb2d Mon Sep 17 00:00:00 2001 From: chiranjeevix Date: Thu, 20 Dec 2018 22:57:13 -0800 Subject: [PATCH 62/64] Separate batch dependent transaction --- .../dep_txns/test_rest_api_dep_txns.py | 125 ++++++++++++++++++ 1 file changed, 125 insertions(+) diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py index 094e618b5f..051c9a2518 100644 --- a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -1758,3 +1758,128 @@ async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): node_list = _get_node_list() chains = _get_node_chains(node_list) assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_separate_batch_txn_dep(self, setup): + """ + 1.Create first batch having 3 transactions. + 2.Make third transaction invalid from first batch. + 3.Create second batch having 2 transactions + 4.Make transaction from second batch dependent on transaction from first batch. + 5.post batch and check for the status. + 6.Create third batch having 2 transaction. + 7.Make transaction from third batch dependent on transaction from second batch. + 8.post batch and check for the status. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_trxn_ids_new = [] + expected_batch_ids = [] + expected_batch_ids_new = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_new=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations for first batch having one invalid transaction") + + txns_batch_first = [ + create_intkey_transaction_dep("set", [] ,name, 10, signer), + create_intkey_transaction_dep("set", [] ,name, 20, signer), + create_intkey_transaction_dep("set", [] ,name,-40, signer), + ] + + for txn in txns_batch_first: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + LOGGER.info("Creating first batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_first, expected_batch_ids_new, signer) + LOGGER.info("Submitting first batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_first = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in first batch ") + validate_Response_Status_txn(responses_batch_first) + + LOGGER.info("Creating intkey transactions with set operations for second batch") + + txns_batch_second = [ + create_intkey_transaction_dep("set",expected_trxn_ids_new[2],name, 50, signer), + ] + + for txn in txns_batch_second: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + trxn_ids = expected_trxn_ids + txns_batch_second.append(create_intkey_transaction_dep("inc", trxn_ids , name, 60, signer)) + LOGGER.info("Creating second batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_second, expected_batch_ids_new, signer) + LOGGER.info("Submitting second batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_second = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in second batch") + validate_Response_Status_txn(responses_batch_second) + + LOGGER.info("Creating intkey transactions with set operations for third batch") + + txns_batch_third = [ + create_intkey_transaction_dep("set",expected_trxn_ids_new[1] ,name, 50, signer), + ] + + for txn in txns_batch_third: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + trxn_ids = expected_trxn_ids + txns_batch_third.append(create_intkey_transaction_dep("inc", trxn_ids , name, 60, signer)) + LOGGER.info("Creating third batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_third, expected_batch_ids_new, signer) + LOGGER.info("Submitting third batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_third = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in third batch") + validate_Response_Status_txn(responses_batch_third) + + From eb360304340e8cdab757f645e17141132f9e1ef6 Mon Sep 17 00:00:00 2001 From: shresthichauhan Date: Fri, 11 Jan 2019 14:28:41 +0530 Subject: [PATCH 63/64] State for deleted block in nodes Signed-off-by: shresthichauhan --- .../api_test/get/test_rest_api_get_state.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py index 3a8e38533a..0049a12d56 100644 --- a/rest_api/tests/api_test/get/test_rest_api_get_state.py +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -576,3 +576,50 @@ async def test_api_get_bad_address(self, setup): LOGGER.info(error) self.assert_valid_error(response, INVALID_STATE_ADDRESS) + +class TestStateDeleteRoot(RestApiBaseTest): + async def test_api_get_state_delete_root(self, setup): + """Tests/ validate the state of deleted block at root node + """ + address = setup['address'] + count = 0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] + for _ in enumerate(state_list): + count = count+1 + if count == 1: + LOGGER.info("Currently selected state is root/ genesis node") + address = setup['address'] + if address == "": + LOGGER.info("Merkle tree root state deleted") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State count not able to collect or not root/ genesis node") + + async def test_api_get_state_delete_not_root_node(self, setup): + """Tests/ validate the state of deleted block at root node + """ + address = setup['address'] + count = 0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] + for _ in enumerate(state_list): + count = count+1 + if count > 1: + LOGGER.info("Currently selected state is not root node") + address = setup['address'] + if address == "": + LOGGER.info("Merkle tree not root node state deleted") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State count not able to collect or not root/ genesis node") From 0c3268c396f2d6ee1110ae1f00082910b1fc7cee Mon Sep 17 00:00:00 2001 From: shresthichauhan <35533098+shresthichauhan@users.noreply.github.com> Date: Mon, 25 Feb 2019 15:53:43 +0530 Subject: [PATCH 64/64] Raft scenarios --- Raft scenarios.xlsx | Bin 0 -> 13977 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 Raft scenarios.xlsx diff --git a/Raft scenarios.xlsx b/Raft scenarios.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..17b569ff967d9c7a91e00dc56f12e65e54e4ef44 GIT binary patch literal 13977 zcmeHuWmp|swk__i!QI{6U4sU9ciFhR2X}(ILvVMuKyW8OaCd*4(|tRq)91bK{l8s5 z%684Mt5%J<)|zvUQjh@!Lj!^Uf&u~pA_DS^A13ku1_JU22LeI`f&$SJv9onDv31f@ zakn=C=+L{_SQF-ffl%fEfxKV;-{pUC3sfYI+4eFbiQOeVBE~hLGdU@$qWg;xjA1}O z0+VNvVuahcL%{fMgu9oNxlsnltn4=X<2JU9|28$#6hA4R?F{nUa z)^7O5x72|pd(2vfO`pTVG*nQz>s?*WkF2W4wcAcLl*m$^StZaI<~y3)2uMz$2agD~ zc`emAMezI0r~@k~7xRB051~SGFr?g3H1)%76LQrk#Krn-V8Y99e@Fp!GI>zp;-L4; z6EGbstCI6^&i>RBP8-1HLjwS#-v;M?e0R#XH&7sje{)$>-A)sbcbDCG z_ej`xm(_DLu?8^E|FQnBtNt(c&%Zo+MZBC0C=*i1S->07cn8l4HSvr!gV?^D@e43h z+KWC%T5Lh<%bmE8Q*I-^$Cql?Ecdh<)35n%`AD8FD9K{b0bfkNw|(lBd1-EjL{4xN z|9mXj>rZT5c2s`D@|k)!ckzZlqT)wh5IxzO-v?)jPXhsbE!q~G>Pe0tG_R!*%gD5o zcJV?suoWg)2PP$}JjI53g~xbF?zFv$OhR zockXS0)8Lt-rehe_fnxGC)3Y})B*b%%IKQmf{wi8#6WzkdV~lyQctx)PQ>DKy@5mB zs-r$H!vJa*;(j(UazXWRL*_Uswe*uZWp&) z2EUAr&nv`u$!0Grh2yJB>Xf=)wOa-J9d3$x-&8~xAFd_|AsPHXDuA> zbV&sR0z!Ph;+-;oB};|Mi0#UI`(W-1Ahq*o5}P2?f`@PC0Y?x^)@gfTDU(Dpij|S7 z@=!jW;Yq0;v8SM&h-sVNbDnh|?_oZP*D{!AvU2ZDD-L5>8c3INaQQa-a`0@AyMH62 z%xBu0+P@dOxSjAaJ9e=dPe%50;i~Sit0T0}8-!SmvrtqnU~UBdDANY5ZG|kYM)D{Q z?(iWC3|?|VB51r5qE96W&CN!1A?KDPObk40)V0BSEoeS$p_@&=Nrwlu$O77sF6nv- zl+x_;ur2$tx;jl>N6ZX#S`kAI4poIi(WL|!dX!k_CQh&r+ULYbcQSPT=&vm1D>2`$ znREZ7ZTehJR4LHK@=3!*@X-V+n;Re zrrh{b&q7WpG#UvYi@rKV?V#yxWo5^eoe$AFiA~+tPRRN|v8C@22KVMsGVG94q^KHtRDJ{Va^Uf_wARx4Va13B>;Amp3?Br-+YX?iQy;dVg*itJ_h$mAd62B^yglk~ceY zGGnw+B#oE5eD>Yr;eEN~4_~SrZcsVUFk8fNN|h8rA8~S@f7p7Q=&A3k-?Hd8N{XnP z4E&a9x99u%y>vcgtF4s(s3K}a{?Qw+O0?JcqY3dqM(`-A?YULw@ewU1?+gu*EXtik zV2`4}7V}As0(vRe5w5`saS{QH!zsS3Y34&|@|7}67slcNgK%ieIGnuuGRM_-{tm#I z-o_Paq}Y`MxiAUw?WcWW_Lb@>HR5&yp^9Fd2{(H6PhhqE&?FEB4ppRaA)P)H6A|Hy zmFOZcad+&PV-C_v1L2cyQ0PLR=q|OU)AWjR^-CFDsk;o6Sd@ALwH(Hk(4loY3gskT zmnxY9guJ6znuroCK42Q7zU7RKOBWb(w7BJDk0B0PmaajZMsec2JmJ5Adz&AVwCyq- z^B!+DDGV%vZOGeOK9Tz7D{b>TP8wnD#Pc_8hDQ;sZX@&UuYhX>y@u*`qSq9#3kV~a z3n-UjR5)ZYcqY3O*Z-6O-r7zbkX~b?bFfALMmJ7;EZ`Q})XLh#Kh8R2=xnq2tRmqvggLa9*&uS_C zwp+=xQF+gI4MzH-!E*7)kj4un24XS$K_Y8_Z=}17axuJRCh{IDVL^ zp?Tnue6RGU?LlQXD?QLgZ^+G@&8%eVz+mi0;9-Xrsy=-GdG%4UC}w;joW^$aD4e1= z^PK>U#8RQnt5k4A*~nRlV`=>@!A*epj~!ApT&LV0>TBVvVqQip6K7FMrgN}VE=oH4 z_z(Kkn#b8LwUV@)jyPhbM3g;*b>zA|M_>=G8TE^wk+764R(eM7m)r$QU_h*4H%uIX zfeEq5@@ag3f^u6Fo)vygqs4?|5A6lU?$yOgs#L}+6iW%HEF3#NSh$cVp=-)qTv1W8 z-Kc5-wmP(JQt&8@;53w5%F#1^cBHX$Nb8>b!Jp6)hTYp$G%6hKoq5Ct+Z@8t^XdFV zlI$`+EDOIDo6cBgY_S?`>E|aM0=#P>)ejIno3O4lSXEvo4!T(pZHJL7ke19KW_di+sBGcg@C1m8B) z>4dr9MV|L9Ezs@#gk;k4vh1aDGoUbXVqrO3Weej_dWO%k}UI?hED943yD< z*P1z#9}gUOa|FUotIw6o=c^RIozFb_n-YC=;KsmM=CrE?4q9-7!49ZCGRt=vi|9>(=&T!WF2S2=i%4zM!Ag>vmMe$eFA$WlvNm16)Cz=Jdp%T=ZI;C$ept zJAgK`7-Cx%Al0e$E1>*q*hgC#ETwk#OpN6}NEvO9N;vRQ>i{k#RDALl3{pyvK7}UH zwfeQ1t0Q_3O!&PTQ?f`KO8TKg87_S8J|f29?oj40zQr0T=@;Pnk;ttDnkd*|2rsv6 zS(8bP#pW?;Hl{ccYTk!;Dv}u&c|YBW(}dq9E)NJ;s5fUS96K%4Os9UZA!o@L)grB` z1s7}OF*}R6eQ<>`y~MayB=e(Z*5X$6K&ldKOtd$^$v6&a?iLE^R5{F&CXKO{P_`A$ zdd&qYBb#!B1NQ;LG=P;33)%Kp0Oiy0EnKZckVuvA^y=V^ZJQ}Fc;pP7}gJlAO zixFU;JWmgbM5!uG=Pn3D4;ZPF2O1qtN_(fBx8VSlzG(LN|gH8G- zly2a(b^&SpQih*NCG=~UDyUQ*cS3KD5wbH~un`fjUj{4)GYw3}AbHTxwX|unvAu*H zJBFQDo-K54glDNu+qrx6^yFvWGe*?2#T`bjOasW9I>^iXUI_0E;`sU}c^w?&&0rg7 zpf5#Q4a09klX{`vrn;`q*BBaN_$n`p)pt(C)eJ{R+1YPCMz)=b2XFRgQprSKw%c`65Uh zypZ`}pIpdJ6~O_Ey^9csPks|FAh!DISZwq54W+|&GrnCs;iCYr)YNR>YB|LtgIjR34PgC z=Q`N4J$RnwC|S0M{>K&jOGMwkZ$F-`O9-g-uZaw(cvNC!=2H1&@)Yhj(DG|5-&6PS^6~OOuBT9HnEoXfpfIv{C1+eOP{2^Le}4I;X1cgS&iW&-gRs3ZPATi`si+ z;yY_3J?3Isl+{3934wnPTmiZ-kxc7kL{Ya!7^e4+BuA*^P=SuU2 z4uI+eA-G&f3kJC`;U^|5(2g2%^K7nd?4AqVKq0vE;qCkgsX8sQyXAc#N3}mtYl5w-Ml@l$x9bjFIf|jtFvMFO4_AM97yJh z2t-8ED5W_xNUU&xyD)qI+#ySH zhh9V-j|l7Yr_NHnQb@b@Xiw^6erBAa)^=CI=E1$tef=prFUQdJXz|$0#rekYw{Z-Q z4YN$o`!B-pAuY+TI0j&DV&Vk&XB7PB0n@)?;GmeEh+alCu{+RL5jW3-`CvE^We(zI z)jd$d$7P6>l-N787tcCelkVMJ`>9>_j3?ew98-ii+Syi+;2k4O^O|$dWx6h*TF%2& zG-x{BScC8Mo#~+R^3vf$vFhlswZV#{FCY4R`j={>p-W@=e=bRdyEBeFMB;p_D6_P= z)Pt;NkZ(An-SJin+9M(u16Olr^QLaLIvMx-=jlwbn%0AhPRim7R z@^IHTiit&%%3a%ZTS7XGKIZ&~>+6FKj*SFXOop}3rGOx1dG#5Wa!l%~zyxHR1mk5F zI|RtWq9c3%c&0qs9vfHx7Eg<(mpl4AI^?YnJl_Piv`S9Ig~uC5P51*$`x$Dm?K7b0al~2IQi^J) zMGS^QRl_%7@n>eagzHawIgMO+`(1;N6#Bt=&D+p#_L7?zlBYkdheyDBH~4_wq}+}1 zL?K#PjyrkOV9#E8G3-|y(*U6#HT&cfTn8rcfeYp%kF##)tz5TN+D=W?VpF!&FP^e_ z+?4q*94{u_XIB7nkASSp9-dau0Bg}eccj58iGy4^w5Y*PFP1ck=r%cr6G zb900Y+1_uLOQ?Ihbwd=0Sy{aI2RmDR36KJ&jZx7&lxc@}dR|XU*?PXm&lfuxm@%C- z+bz*=H=!SW9v%QZNaR{>TMsEQzMhZw4{F)3uWKZ*?Fo6Hc>JVHKPOeaj#u@W#^v05 zjH3}}eCLfm5E)(zA}=>FN+#_Bq!cv};j8MZvqd`wVTi5)=Z2Xi4DJ(wIlY243zsO<9A0Ons4+=Pm!t%D)fBj>M+6n%(P+G-a;gFCXF2!_ zOjsn}v8NMq#;{7PNXCRte`@q`Z6BlX#rS9%XJ*tyxu?lIJ7xPED+V-fl2Y&i}FoQy7!l#h*AyO{o<&WX5Z}PKsWloy2 zJXE!vnmok?UZ|%6l$(64AFGVq%;sSid0u>qH%{MlyEr<=_NteI6#_#r%bgosVmRp6 zD1v6ZYjfOE%IA1OFIn*g^OrxBbnIHva71o0Z%}?{wun$6pfLGCLY(&kHK8A_r;0O8 zH6{14g~bT>(`%WAP51@Xisc()%-tgAO6{j|N7wXlNQpf2>icq+MVl}!MiIc1r;~cP79{N)$>&&rFi^+fD2dnNwK?QJPSVRXsD;%F3CnOK z1SN+cV{h~F+~g-*MLCy4l?r`qHGE)4W5u4}K(?YHF0S_{Z&mu%&t=;tv=;ik7CF6P z4f^dPxNj(Hxd zs6+X@Akw;~BVBA#(`Q_;H8|!h*>lN#g9Xik;yNvbds&)_kE(YIkYica2wUqhgF}6p`baIKPQ;{Q74jO(H~sP7?Nzd6tGwW-KVS>$QV%8IBO&(%jBWYAnKl8!`zCq|%_SSLHrG{R{hB9w z*LE|hji(I>tSK^*+RK@m*#=9)m?5)7-iMtl35Dy7l~=H*9)r z6bV~Qaawwo8~?(JuyRDkx#gdTj0nKfEts>h!- z`y5(`c>*qt&StCdMmSen&k&PxR`O zuD>0{%d>@&DBd%XQkXzM@c$UY08Z}KCjUqgsoTUXb0WR^%)EpSZ`4Woos$czVi2ny zsupX>w^oxobeHMn^&bm=yxXulCXJ=DJ4m+CDD{#W#1in~Wx+>e@o3r__aN1fFNBJ9 z-6M_+lN?l`1--uGt$!LGMaN(=Rf+|&k&0gb?E7YuSWZWRNF$sGjzo<3O}f-+pVPgb z7{`Mx)-Cjsh)f>FPe4S!d!LSkIMUu4n^O^TdF#^1y*6$?2vL3kUVrFFRElMG|A zwH92{wI7pd=@6B*93pqRM9)vnBi=ym{orchE@bOQZ7@nWovZIpR;KCspZ0Z_Z@)Ok zYh@27+@$QNd)YW`r=u+KI@Yk5Snu65x-h~;Ng#Pg7!!l_P;cz5psmtu=AhX!dtlb5 zV6A)5bv*=WGhVLF-Bg3qBAYUatf|R3sob}GZ_!3K^{90)qRK; zL|*~zW(XN15k~NV3yK)+cK_1E+N@~Y;Y_@(dQel(>Z6-M_${X9;)r_mk4i? z+PPD%Haxd$6{iI&w71Hw>8}%d(gC%wOxgx6Uuy~46R!%Z;rR()MQqm^wwP+=uvgNv zEKz?_H$7F$jG@cSiHehoJW4_V6FsjX9_s}}ur+7qRJ_$#m377u?2%MtUOD7D*HY?0 zy$Sn>MFvP_1fROcR3{+oW#R&y;Y;1^#Kj!jRoec&$;MgK#wku6LqRkLI-=cYyEE|< zSQp-OUpuz!gQNRJJTD|f0lL6UL#XYkkLvk6jjhUvwo5XYN2OnZS|zKsF}@UdCG~EC zykWMya7TUQdiA?-=F)`;%9b6Q9Z-N?7(Fo?xUI5obgH8g%yvqNp9^rREs|AqDEJ03 z?pDXJrn9D{*t%^V@wi5<@0UvIHZkjU$0Yc4Cm>4#h9X&dL*bn`DhOu&_NwTr!~1>S ze`eDN!56S)QOSax<7<|%uFnmt<+71^eIlQ6f7_icYtsb~NR#jLUvWHpD`ykz%73v~ zu2=ZznQ1VYBbA<%K{q0BJOrYG3iDX>1Xh2v3Rzk8w0@_}W8IducJQNf?X1z8=elFQ zekE=E`CGNao!;OY0B?%MV?4cGK;_%3k%~%Z8LtWfIEBBPDXRP~wAltI!P|TJ;y>#f zsx#OD3-7byECL{)5C0Jt{E7%FG-P5{IN>@OYTkOzI~<-z`y`bJqa#lY{0CeJ%wqCM z2m2lLJ-IV|yAu03Q3{=gDj70E3Mk{r9~5T}x0?=+y-eh^=2IGuJ{DE=(TjC3Fbp+3 z-RWiX(`px=T#oSzEMPXeY^WdktUf=E9X8lzt)LoZf@0I3Eb}?9v0bz`a@JDYQQRcy z_yI~XsLwXd0N2Y_eU}~TDw!Sn+?npGy1Lq>L>BA$`TV$}0Vjt)3Jp`_sVx?{&bnW3 zRdiP}8q3^bTB+@_^r{FVT3Z7miZ8WY?1W2?RKzYH+)6W^58!-!ukzcQ4=@DaZd5f` zowW#Bp-Zj`a*L*9><;{kCM@*=AbF4F`w*;Lk0>-a0UZ<4VEfte9gU-Jne@Joy^|ID zi};-hN9Xqfb2luQ&u|4)dWTq60^#>>)100z!+Miz**8aa6}(y83MhdBtChxYjYc-W z7hr4&gg?_G9^qE{y5v`cx(92JM)P|;m+@pP*v{Yj-5hkyiLD4ghKg7p3lFv;#&OGZGHZkMZHUScsop!P*|V> zypxE-c+9z57fF2aYw-0`Yxs839lB$YU>ceUT3ji8Een~K4lxT^fNn3SuSan=%Av}v zZ!*8w25L!0q&7+O2k5;pol@ff=Z+_$3pwc$;D|6gC6LR~1*WDa2;w5Uv*%+DyJmzf zcn0xTXl1Fmxe;g{sc-yLwd&GgRe9h9ATQss2AuaHq$3P-NW)Yx=#As`mswa&po>94 z2c7qCB_j-DNK1e-;{2qbzjr@C0dI3Vfs&cfYZl$-#n<}+K+_QW^Y z9v2Fz2x^!5fv@A8n$XmJWS#@jj0K5Dv2>@Ea*~67TQ{f&(}_m1s}gDct`=KBd65i; ztechtBUCpO+7brgWgh}Vravv~L^5!PzOim37BOkb_|)@2SL2*20&+fc zD(PA`E-Q^#?J8pU*qRwEgE?co!_B#x){W~~izs%)PAo1Sk#4i@=NulaDD{aobAj<6 zSC~6h)B17YER%4(ZpeIL?Lup@{0!*?B8b%x3gHCtzCrCrj79n85!5PN(eC=Fvai!7 zeI@}@9o^XplJ9~KOrD+O+xd#jtjYsFj>aRzRq8T`R-`c8Pqj}+vKX#Vwrrtax){EQ z6`Pfu8#^$`1T-fhs(Qjoh#rh4D;5D=)(Nf}nvxolS3p%@2+IjyB4u7LQYu8ll#iFL zfW46sxz0(e&b>;5D(fw!Bm);*kHVs)29rjG90gn}9FFiPsdU^9ZQ&Bnko1l&4{MGP zk^&^8ETuzQ>XH|0N)e2tZCVRRG$QHEu`DK~QdY4S>08N3A{wBr?G7-!%0&%TnE8lH zN=W6XYK<^V*jhIn1a|P=EO-a?D6q7Y?5i}QOk55~k;=8f{0>229WLxx%W*c$cMt`h zCOz$2xoTEjxo|A}Wg8 zT;(dRI^sCTN8qm(gBcN)DF`nYi>frxNky1)@*U(7%cx<48U^ATJ>}!YtFXv@_#B0u z!i>Q(#OVh?ZcZ#^lcgkSqZa2-db_WV9qpecEaiK8chEzyWJxbExQ5kabYhueHh7XI z_OoXgCHjvB${LeYiU*@g-*TEiG`RQSUPxJCdk;*(spPHbh7t1My%W&T?I z47OO>aB~^#P_s3G|#TRm!}ult9|UH8a$bC40X(O-`M~x_qxN8`*n=*c)EHc1tG32)+J%3Wlm&! zQ5|fn_{w{f#7@b^dA?`HrCy=guc3%C`abMj_HMFDG!&o+*)4Qd-D9(dCJ)7sD zW&LU4dDv$v!^qttlZ7X2Na2Gs_JbyjnvK1Ab@g+2qQEBK9A3M}x=-e&K}Ju;xD$xn z4~2!b6G+bC8y|zUvANO=6nu>Vt()bJdAWrh6Ug^EDDNhF_T5*|TZW^Ix4)fUW3uJa zr@tr2s6l~%klytn#&$*uj&}9{1_OKhe@Y<#s|tB;@lKiyF?7_vx$Y`*t_LOHKaLLfZ+J|Ncb!Li zQ}P9{OGLq=XCq*=#{(3Z)-aBhU7ThM>1*8H(T8ky+xKk-$1 z)SvvG|{@?di|8c*Nx3UnC8E+lNlYOD$NTR`SfG>DM;C-Mi_CPWe%NI>PExk zYO^CiphCf>gz-q=pL^CZrM(KZe?g6bLp=++>N~%0!~pp|l=5YJ;bEX_U44i?&3;x7 z#GhfFsW^2glhQqYRt*#Xg8$@4B@ySu!`G6=$`OSt%Slg8%vrn`&Zi#Zwe*_M$Jr~T z4^=-5xV<~PnkY9C+s`%RM)pV+eE*iub9Q*?rtf?%c^9xC|CP^1c8(_hCG>ln{d0+m zSNs^rh!(sd_l#IDCG2l9h^!x;%Ex6yJUD>)z1}q+i>~0r_}LTIdiV#vSHa$D48RtL zAtukMQ^-Ph1PnYY1~<0eQ2P|+I;(mWa>rbRl*ELLNQln7Ue?aeP*b8_>Ab8AJ8>%q zPbQu_&p80b=n%?C4i-t&bDpTOxiXP((5DQq3VJO8Z}Z#TSuK#r2PQCA!6y9h{UBo} z(RoPKTOe_!L_$IA&!7z5UAmys`{ke=%nmTN%gZj@1DRj3@cO5{XT4b2<+j=C_(LWL zHYXk*Dyt72mrf!;q>aQ$%KTEL3pIogz~fw_>Cw!U&*h>h9Ggb@>g5Vk7t}=FM~lo}AaK9Mb7O0BCd43gnoBe6xy3i2S9M;N6}jFDcWUpv<<(f1ER_*$M0dd(lmh`P+NPg-|W zt|R%Lb`F<@3hU-C5a%l{@Tpz3VahL)YSkXND~m71&R?OQ#_3%aK~>BSzHXG*Zstu< z_Tk?t!$G}b9@sNvA+L}R@r&==Jp^%obpHB<4J*%&k`N{gOM^4V;!cZq z8r;5*x$j>d{2FtOoB>XDHvjFV{@F#nZ`u=ttlkT9LuY~Bq4A#9*cq16<>uvmV@MUK zE?Biin&$B{q8wOHr;*n3c*kG2qPO^7yYzG~^IjuMzTnw8l^VmfV}Kx}v}h(d;sv_1 zhv;mY_w7WeN?(3BZh?4NKapd#QJKI|c05^xDmAKMDnigv+ryzxwO#sJpCrr{faPgD zUqv0kyjMEf6d<#07zDzxVVMtB(e$7(0vwktbS8p3<&uO#!b*dmLH6}MX zsrVu=ZhLFnG4cz^$O%&!;nkG;S7Rz^YQ-vRzztN&Mkx_8?D zMY;dG;lC@E|7!U2J!$iQ*DwDE&Tlf-Ur1Q*#nRtIt-l-pu21}B><#;;@qefle@FPe z`uq!F>%I5?=imRoYSF&~{$9-e1-OmzC*bd;-QNLz&!qnXD1O&yzTeTWy!!8^zo*Q8 znLZHx{xH81XuqTUd#v#b0|*F$3JB=;$m4hGf1la?)q0-hFV_Dv&->l{_p$dE0D%6_ uNBI94iGK(E_n!P0BoL4X<6rOTU;Vm*4EQ^V|EOfZ1DU=10WQlQNB;*M_(ymE literal 0 HcmV?d00001