diff --git a/Raft scenarios.xlsx b/Raft scenarios.xlsx new file mode 100644 index 0000000000..17b569ff96 Binary files /dev/null and b/Raft scenarios.xlsx differ diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..b381dcbd1c --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,389 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import aiohttp +import logging +from base64 import b64decode + +from utils import _get_node_list + + +CONSENSUS_ALGO = b'Devmode' +FAMILY_NAME = 'intkey' +FAMILY_VERSION = '1.0' +DEFAULT_LIMIT = 100 +TRACE = False +NONCE = '' +TRIES=5 + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +class RestApiBaseTest(object): + """Base class for Rest Api tests that simplifies making assertions + for the test cases + """ + + def assert_status(self, response, status): + for data in response['data']: + assert data['status'] == status + + def assert_equal(self, response, data): + assert response == data + + def assert_check_nonce(self, response): + """Asserts response has nonce parameter + """ + assert 'nonce' in response['header'] + assert response['header']['nonce'] == NONCE + + def assert_check_family(self, response): + """Asserts family name and versions in response + """ + assert 'family_name' in response['header'] + assert 'family_version' in response['header'] + assert response['header']['family_name'] == FAMILY_NAME + assert response['header']['family_version'] == FAMILY_VERSION + + def assert_check_dependency(self, response): + """Asserts transaction dependencies in response + """ + assert 'dependencies' in response['header'] + + def assert_content(self, response): + """Asserts response has inputs and outputs parameter + """ + assert 'inputs' in response['header'] + assert 'outputs' in response['header'] + + def assert_payload_algo(self, response): + """Asserts payload has been created with + proper algorithm + """ + assert 'payload_sha512' in response['header'] + + def assert_payload(self, txn, payload): + """Asserts payload is constructed properly + """ + assert 'payload' in txn + assert payload == txn['payload'] + self.assert_payload_algo(txn) + + def assert_batcher_public_key(self, response, public_key): + """Asserts batcher public key in response + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_signer_public_key(self, response, public_key): + """Asserts that signer public key is proper + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_trace(self, response): + """Asserts whether the response has trace parameter + """ + assert 'trace' in response +# assert bool(response['trace']) + assert response['trace'] == TRACE + + def assert_check_consensus(self, response): + """Asserts response has consensus as parameter + """ + assert 'consensus' in response + + def assert_state_root_hash(self, response): + """Asserts the response has state root hash + """ + assert 'state_root_hash' in response + + def assert_previous_block_id(self, response): + """Asserts that response has previous block id + """ + assert 'previous_block_id' in response + + def assert_block_num(self, response): + """Asserts that response has proper block number + """ + assert 'block_num' in response + + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_valid_head(self, response, expected): + """Asserts a response has a head string with an + expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_valid_link(self, response, expected_link): + """Asserts a response has a link url string with an + expected ending + """ + print(response['link']) + print(expected_link) + assert 'link' in response + assert response['link'] == expected_link + self.assert_valid_url(response['link'], expected_link) + + def assert_valid_url(self, url, expected_link): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_link) + + def assert_transaction_ids(self, response, expected): + """Asserts a response has a link url string with an + expected ending + """ + assert 'transaction_ids' in response['header'] + assert response['header']['transaction_ids'][0] == expected + + def assert_valid_paging(self, response, expected_link): + """Asserts a response has a paging dict with the + expected values. + """ + assert 'paging' in response + paging = response['paging'] + + if 'next' in paging and expected_link is not None: + assert 'next' in paging + assert 'next_position' in paging + self.assert_valid_url(response['link'], expected_link) + else: + assert 'next' not in paging + assert paging['start'] == None + assert paging['limit'] == None + + def assert_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an + expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_valid_data(self, response): + """Asserts a response has a data list of dicts + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + self.assert_items(data, dict) + + def assert_valid_data_length(self, response, expected_length): + """Asserts a response has a data list of dicts of an + expected length. + """ + LOGGER.info(len(response)) + LOGGER.info(expected_length) + assert len(response) == expected_length + + def assert_check_block_seq(self, blocks, expected_batches, expected_txns, payload, signer_key): + """Asserts block is constructed properly after submitting batches + """ + if not isinstance(blocks, list): + blocks = [blocks] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + if not isinstance(payload, list): + payload = [payload] + + + ep = list(zip(blocks, expected_batches, expected_txns, payload)) + + for block, expected_batch, expected_txn, payload in ep: + assert isinstance(block, dict) + assert isinstance(block['header'], dict) + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 + self.assert_check_batch_seq(batches, expected_batch, expected_txn, payload, signer_key) + + def assert_check_batch_seq(self, batches, expected_batches, expected_txns, + payload, signer_key): + """Asserts batch is constructed properly + """ + + if not isinstance(batches, list): + batches = [batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + if not isinstance(payload, list): + payload = [payload] + + for batch, expected_batch , expected_txn, payload in zip(batches, expected_batches , expected_txns, payload): + assert expected_batch == batch['header_signature'] + assert isinstance(batch['header'], dict) + txns = batch['transactions'] + assert isinstance(txns, list) + assert len(txns) == 1 + self.assert_items(txns, dict) + self.assert_transaction_ids(batch, expected_txn) + self.assert_signer_public_key(batch, signer_key) + self.assert_trace(batch) + self.assert_check_transaction_seq(txns, expected_txn, + payload, signer_key) + + + def assert_check_transaction_seq(self, txns, expected_ids, + payload, signer_key): + """Asserts transactions are constructed properly + """ + if not isinstance(txns, list): + txns = [txns] + + if not isinstance(expected_ids, list): + expected_ids = [expected_ids] + + if not isinstance(payload, list): + payload = [payload] + + + for txn, expected_id, payload in zip(txns, expected_ids, payload): + assert expected_id == txn['header_signature'] + assert isinstance(txn['header'], dict) + self.assert_payload(txn, payload) + self.assert_check_family(txn) + self.assert_check_nonce(txn) + self.assert_check_dependency(txn) + self.assert_content(txn) + self.assert_signer_public_key(txn, signer_key) + self.assert_batcher_public_key(txn, signer_key) + + def assert_check_state_seq(self, response, expected): + """Asserts state is updated properly + """ + self.assertEqual(len(proto_entries), len(json_entries)) + for pb_leaf, js_leaf in zip(proto_entries, json_entries): + self.assertIn('address', js_leaf) + self.assertIn('data', js_leaf) + self.assertEqual(pb_leaf.address, js_leaf['address']) + self.assertEqual(pb_leaf.data, b64decode(js_leaf['data'])) + + def wait_until_status(url, status_code=200, tries=5): + """Pause the program until the given url returns the required status. + + Args: + url (str): The url to query. + status_code (int, optional): The required status code. Defaults to 200. + tries (int, optional): The number of attempts to request the url for + the given status. Defaults to 5. + Raises: + AssertionError: If the status is not recieved in the given number of + tries. + """ + attempts = tries + while attempts > 0: + try: + response = urlopen(url) + if response.getcode() == status_code: + return + + except HTTPError as err: + if err.code == status_code: + return + + LOGGER.debug('failed to read url: %s', str(err)) + except URLError as err: + LOGGER.debug('failed to read url: %s', str(err)) + + sleep_time = (tries - attempts + 1) * 2 + LOGGER.debug('Retrying in %s secs', sleep_time) + time.sleep(sleep_time) + + attempts -= 1 + + raise AssertionError( + "{} is not available within {} attempts".format(url, tries)) + + def wait_for_rest_apis(endpoints, tries=TRIES): + """Pause the program until all the given REST API endpoints are available. + + Args: + endpoints (list of str): A list of host:port strings. + tries (int, optional): The number of attempts to request the url for + availability. + """ + for endpoint in endpoints: + http = 'http://' + url = endpoint if endpoint.startswith(http) else http + endpoint + wait_until_status( + '{}/blocks'.format(url), + status_code=200, + tries=tries) + def assert_check_txn_dependency_commit(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + + def assert_check_txn_dependency_invalid(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + + if response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + def assert_check_txn_dependency_unknown(self, response): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + + if response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + + + def assert_check_txn_dependency(self, response, txn_ids): + """Asserts transaction dependencies in response + and verifying if the txn_id is listed under dependncies + """ + + if 'dependencies' in response['data']['header']: + dep_txn = response['data']['header']['dependencies'] + return any(txn in dep_txn for txn in txn_ids) \ No newline at end of file diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py new file mode 100644 index 0000000000..1f2e409364 --- /dev/null +++ b/rest_api/tests/api_test/conftest.py @@ -0,0 +1,138 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import sys +import platform +import inspect +import logging +import urllib +import json +import os + +from payload import Setup + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +@pytest.fixture(scope="session") +def setup(request): + """Setup method for posting batches and returning the + response + """ + LOGGER.info("Starting Setup method for posting batches using intkey as payload") + data = {} + ctx = Setup() + tasks=[] + txns = ctx._create_transactions() + batches = ctx._create_batches(txns) + expected_data = ctx._expected_data(txns,batches) + post_batch_list = ctx._create_batch_list(batches) + ctx._submit_batches(post_batch_list) + data = ctx._post_data(txns,batches) + data.update(expected_data) + return data + + +def pytest_addoption(parser): + """Contains parsers for pytest cli commands + """ + parser.addoption( + "--get", action="store_true", default=False, help="run get tests" + ) + + parser.addoption( + "--post", action="store_true", default=False, help="run post tests" + ) + + parser.addoption( + "--sn", action="store_true", default=False, help="run scenario based tests" + ) + + parser.addoption("--batch", action="store", metavar="NAME", + help="only run batch tests." + ) + + parser.addoption("--transaction", action="store", metavar="NAME", + help="only run transaction tests." + ) + + parser.addoption("--state", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("--block", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("-E", action="store", metavar="NAME", + help="only run tests matching the environment NAME." + ) + + parser.addoption("-N", action="store", metavar="NAME", + help="only run tests matching the Number." + ) + + parser.addoption("-O", action="store", metavar="NAME", + help="only run tests matching the OS release version." + ) + + +def pytest_collection_modifyitems(config, items): + """Filters tests based on markers when parameters passed + through the cli + """ + try: + num = int(config.getoption("-N")) + except: + num = None + + selected_items = [] + deselected_items = [] + if config.getoption("--get"): + for item in items: + for marker in list(item.iter_markers()): + if marker.name == 'get': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--post"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'post': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--sn"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'scenario': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + else: + selected_items = items[:num] + items[:] = selected_items + return items diff --git a/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py new file mode 100644 index 0000000000..051c9a2518 --- /dev/null +++ b/rest_api/tests/api_test/dep_txns/test_rest_api_dep_txns.py @@ -0,0 +1,1885 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import aiohttp +import asyncio +import datetime +import random +import time + +from google.protobuf.json_format import MessageToDict + +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains, post_batch_no_endpoint,\ + get_reciepts, _get_client_address, state_count, get_batch_id, get_transaction_id + +from utils import _get_client_address + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction, \ + create_intkey_transaction_dep, random_word_list, create_invalid_Address_intkey_dep_txn + +from base import RestApiBaseTest + +from fixtures import setup_empty_trxs_batch, setup_invalid_txns,setup_invalid_txns_min,\ + setup_invalid_txns_max, setup_valinv_txns, setup_invval_txns, \ + setup_same_txns, setup_valid_txns, setup_invalid_txns_fn,\ + setup_invalid_invaddr, post_batch_txn, validate_Response_Status_txn + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT=300 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = [pytest.mark.dependent,pytest.mark.sixth] + +async def async_fetch_url(url, session,params=None): + try: + async with session.get(url) as response: + return await response.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + +async def async_post_batch(url, session, data, params=None,headers=None): + if headers: + headers=headers + else: + headers = {'Content-Type': 'application/octet-stream'} + try: + async with session.post(url,data=data,headers=headers) as response: + data = await response.json() + if 'link' in data: + link = data['link'] + return await async_fetch_url('{}&wait={}'.format(link, WAIT),session) + else: + return data + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + +#testing the Transaction dependencies +class TestPostTansactionDependencies(RestApiBaseTest): + + async def test_set_inc_txn_dep(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_double_dep_txns(self, setup): + """1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create third Transaction for decrement with first and second Transaction as dependecies + 4. Create Batch + 5. Call POST /batches + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first and second transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("dec", trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_single_set_dep_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + words = random_word_list(100) + name=random.choice(words) + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("set",trxn_ids, name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",trxn_ids , name, 60, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_set_inc_inc_Txns_Dep(self, setup): + """1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create third Transaction for increment with first and second Transaction as dependecies + 3. Create Batch + 4. Call POST /batches + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + name = random.choice("abcdefghijklmnopqrstuv") + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as second transaction") + trxn_ids = list(set(expected_trxn_ids)) + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + for txn_id in trxn_ids: + txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_same_txns(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",trxn_ids , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + ''' + for txn_id in trxn_ids: + #txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + ''' + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_single_set_dec_invalid_txns_id(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877yuyiipp879798788') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("dec",[u'bbbbbb'] , name, 50, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + #batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + ''' + for txn_id in trxn_ids: + #txn_details = get_transaction_id(txn_id) + if (self.assert_check_txn_dependency(txn_details, trxn_ids)): + LOGGER.info("Successfully got the dependencies for transaction id "+ txn_id) + else: + LOGGER.info("The dependencies for transaction id is blank"+ txn_id) + ''' + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_single_set_dep_reverse(self, setup): + """"1. Create first Transaction for set + 2. Create second Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches " + Verify the transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + t = datetime.datetime.now() + date = t.strftime('%H%M%S') + words = random_word_list(100) + name=random.choice(words) + + #name=random.choice('123456734558909877') + + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 5, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + words = random_word_list(100) + name=random.choice(words) + #name=random.choice('123456734558909877') + txns.append(create_intkey_transaction_dep("set",trxn_ids, name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns[::-1]] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + response = await asyncio.gather(task) + #print(response) + responses = await asyncio.gather(*tasks) + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for response in responses: + batch_id = response['data'][0]['id'] + + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) + + LOGGER.info("Verifying the txn details listed under the dependencies") + trxn_ids = list(set(expected_trxn_ids)) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_valid_set_invalid_inc_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second invalid Transaction for increment with first Transaction as dependecies + 3. Create Batch + 4. Call POST /batches + Verify the transactions. This shoud be an invalid transaction. The trird txn will be in PENDING state + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, -1, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_valid_set_invalid_inc_DiffKey_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second invalid Transaction for increment with first Transaction as dependecies with different key + 3. Create Batch + 4. Call POST /batches + Verify the transactions. This shoud be an invalid transaction. The trird txn will be in PENDING state + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + + name = random.choice("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz") + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, -1, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_set_Max_txn_dep(self, setup): + """1. Create first Transaction for set with max value + 2. Create second Transaction for increment with first Transaction as dependency + 3. Create Batch + 4. Call POST /batches + Verify the transactions. The first one shoud be an invalid transaction. The second txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 8888888888888888888888888, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'INVALID' == responses[0]['data'][0]['status'] + assert 'Validator Timed Out' == responses[1]['error']['title'] + assert 17 == responses[1]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + + async def test_invalid_set_txn_dep(self, setup): + """1. Create first invalid Transaction for set with negative value + 2. Create second Transaction for increment with first invalid Transaction as dependency + 3. Create Batch + 4. Call POST /batches + Verify the transactions. The first one shoud be an invalid transaction. The second txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, -1, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 2, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'INVALID' == responses[0]['data'][0]['status'] + assert 'Validator Timed Out' == responses[1]['error']['title'] + assert 17 == responses[1]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_invalid_Address_txn_dep(self, setup): + """1. Create first Transaction for set + 2. Create second dependent Transaction for increment and make the address invalid with first Transaction as dependency + 3. Create batch ,post batch and check the response status + 4. The second transaction will be an invalid transaction + 5. Create the third transaction for decrement with first and second as dependency + 6. Create a batch and post batch + Verify the transaction responses. The first one will be COMMITTED and second one shoud be an invalid transaction. The third txn will be with error code 17 and Validator Timed Out + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_invalid_Address_intkey_dep_txn("inc", trxn_ids , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'COMMITTED' == responses[0]['data'][0]['status'] + assert 'INVALID' == responses[1]['data'][0]['status'] + + LOGGER.info("Creating valid intkey transactions with dec operations with dependent transactions as first and second transaction") + trxn_ids = list(set(expected_trxn_ids)) + txns = [] + responses = [] + expected_batch_ids = [] + post_batch_list = [] + tasks = [] + txns.append(create_intkey_transaction_dep("dec", trxn_ids , name, 20, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + assert 'Validator Timed Out' == responses[0]['error']['title'] + assert 17 == responses[0]['error']['code'] + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_Multiple_Indep_Txn_txn_dep(self, setup): + """1.Create 5 independent Transactions for set + 2.Create second dependent transaction for set with 5 independent transactions as dependency + 3.Create third dependent Transaction for increment with second dependent Transaction as dependency + 4.Create a batch for all the dependent transaction and post batch + 5.Check for the status + 6.Now create the batch for independent transactions and post batch + 7. Check for the response status of both independent and dependent transactions + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_trxn_ids_indep = [] + expected_batch_ids = [] + expected_batch_ids_indep = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_indep=[] + tasks_dep=[] + batch_ids_dep = [] + batch_ids_indep = [] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns_Indep = [ + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer), + create_intkey_transaction("set", [] , 50, signer),] + + for txn in txns_Indep: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_indep.append(trxn_id) + + txns = [ + create_intkey_transaction_dep("set", expected_trxn_ids_indep , name, 50, signer),] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating invalid intkey transactions with inc operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns.append(create_intkey_transaction_dep("inc", trxn_ids , name, 40, signer)) + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + post_batch_list = post_batch_txn(txns, expected_batch_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_dep.append(task) + responses_dep = await asyncio.gather(*tasks_dep) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status of dependent txns before committing the independent txns") + + validate_Response_Status_txn(responses_dep) + + post_batch_list = [] + tasks = [] + post_batch_list = post_batch_txn(txns_Indep, expected_batch_ids_indep, signer) + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_indep.append(task) + responses_indep = await asyncio.gather(*tasks_indep) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the independent txns") + + validate_Response_Status_txn(responses_indep) + + time.sleep(300) + LOGGER.info("Waiting time to get the dependent txns to be committed") + LOGGER.info("Verifying the responses status of dependent txns after committing the independent txns") + validate_Response_Status_txn(responses_dep) + + async def test_inc_first_txn_dep(self, setup): + """ + 1.Create a dependent transactions for increment + 2.Create a dependent transaction for set for the same key with first transaction as dependent + 3.Create batch and post the first transaction(increment) first and check the response status + 4.Post the second transaction(set) and check the response status + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_inc=[] + tasks_set = [] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with inc operations") + + txns_inc = [ + create_intkey_transaction_dep("inc", [] , name, 10, signer),] + for txn in txns_inc: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + post_batch_list = post_batch_txn(txns_inc, expected_trxn_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_inc.append(task) + responses_inc = await asyncio.gather(*tasks_inc) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status for first transaction") + + validate_Response_Status_txn(responses_inc) + + expected_trxn_ids = [] + expected_batch_ids = [] + + LOGGER.info("Creating invalid intkey transactions with set operations with dependent transactions as first transaction") + trxn_ids = expected_trxn_ids + txns_set = [ + create_intkey_transaction_dep("set", trxn_ids , name, 20, signer),] + for txn in txns_set: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + post_batch_list = post_batch_txn(txns_set, expected_trxn_ids, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_set.append(task) + responses_set = await asyncio.gather(*tasks_set) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status for 2nd transaction") + validate_Response_Status_txn(responses_set) + + time.sleep(50) + + LOGGER.info("Verifying the responses status for first transaction again") + validate_Response_Status_txn(responses_inc) + + async def test_Multiple_dep_Txn_Consecutive_dep(self, setup): + """1.Create 5 dependent transactions for set and second one is depend on first, third is depend on second, fourth is depend on third and fifth is depend on fourth + 2. Create a batch and post the fourth and fifth transactions. + 3. Check the response status. It should not be COMMITTED. + 4. Create batch and post first, second and third transactions and check the response status. It should be COMMITTED. + 5. Now check the response for the fourth and fifth transaction. It should be COMMITTED. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids_first = [] + expected_batch_ids_second = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + value = 20 + for i in range(4): + trxn_ids = expected_trxn_ids + name=random.choice(words) + + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, value, signer)) + for txn in [txns[-1]]: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + value += 10 + + responses_last = [] + icounter = 3 + for txn in txns[3:5]: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_first, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_last.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + responses_first = [] + post_batch_list = [] + expected_batch_ids = [] + icounter = 0 + for txn in txns[0:3]: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_first.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for responses in responses_first: + validate_Response_Status_txn(responses) + for responses in responses_last: + validate_Response_Status_txn(responses) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_Multiple_invalid_dep_Txn_Consecutive_dep(self, setup): + """1.Create 5 dependent transactions for set and second one is depend on first, third is depend on second, + fourth is depend on third and fifth is depend on fourth. Fourth one will be an invalid txn + 2. Create a batch and post the fourth and fifth transactions. + 3. Check the response status. It should not be COMMITTED. + 4. Create batch and post first, second and third transactions and check the response status. It should be COMMITTED. + 5. Now check the response for the fourth and fifth transaction. It should be INVALID. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids_first = [] + expected_batch_ids_second = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction_dep("set", [] , name, 50, signer),] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating intkey transactions with set operations with dependent transactions as first transaction") + value = 30 + invalidValue = -20 + for i in range(4): + trxn_ids = expected_trxn_ids + name=random.choice(words) + if i == 2: + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, invalidValue, signer)) + else: + txns.append(create_intkey_transaction_dep("set", [trxn_id] , name, value, signer)) + + for txn in [txns[-1]]: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + responses_last = [] + icounter = 3 + for txn in txns[3:5]: + + post_batch_list = post_batch_txn([txn], expected_batch_ids_first, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + responses_last.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + responses_first = [] + post_batch_list = [] + icounter = 0 + for txn in txns[0:3]: + post_batch_list = post_batch_txn([txn], expected_batch_ids_second, signer) + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + validate_Response_Status_txn(responses) + responses_first.append(responses) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses status") + + for responses in responses_first: + validate_Response_Status_txn(responses) + for responses in responses_last: + validate_Response_Status_txn(responses) + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_separate_batch_txn_dep(self, setup): + """ + 1.Create first batch having 3 transactions. + 2.Make third transaction invalid from first batch. + 3.Create second batch having 2 transactions + 4.Make transaction from second batch dependent on transaction from first batch. + 5.post batch and check for the status. + 6.Create third batch having 2 transaction. + 7.Make transaction from third batch dependent on transaction from second batch. + 8.post batch and check for the status. + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_trxn_ids_new = [] + expected_batch_ids = [] + expected_batch_ids_new = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks_new=[] + words = random_word_list(200) + name=random.choice(words) + + LOGGER.info("Creating intkey transactions with set operations for first batch having one invalid transaction") + + txns_batch_first = [ + create_intkey_transaction_dep("set", [] ,name, 10, signer), + create_intkey_transaction_dep("set", [] ,name, 20, signer), + create_intkey_transaction_dep("set", [] ,name,-40, signer), + ] + + for txn in txns_batch_first: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + LOGGER.info("Creating first batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_first, expected_batch_ids_new, signer) + LOGGER.info("Submitting first batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_first = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in first batch ") + validate_Response_Status_txn(responses_batch_first) + + LOGGER.info("Creating intkey transactions with set operations for second batch") + + txns_batch_second = [ + create_intkey_transaction_dep("set",expected_trxn_ids_new[2],name, 50, signer), + ] + + for txn in txns_batch_second: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + trxn_ids = expected_trxn_ids + txns_batch_second.append(create_intkey_transaction_dep("inc", trxn_ids , name, 60, signer)) + LOGGER.info("Creating second batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_second, expected_batch_ids_new, signer) + LOGGER.info("Submitting second batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_second = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in second batch") + validate_Response_Status_txn(responses_batch_second) + + LOGGER.info("Creating intkey transactions with set operations for third batch") + + txns_batch_third = [ + create_intkey_transaction_dep("set",expected_trxn_ids_new[1] ,name, 50, signer), + ] + + for txn in txns_batch_third: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids_new.append(trxn_id) + + trxn_ids = expected_trxn_ids + txns_batch_third.append(create_intkey_transaction_dep("inc", trxn_ids , name, 60, signer)) + LOGGER.info("Creating third batch to the handlers") + post_batch_list = post_batch_txn(txns_batch_third, expected_batch_ids_new, signer) + LOGGER.info("Submitting third batch to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks_new.append(task) + responses_batch_third = await asyncio.gather(*tasks_new) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + LOGGER.info("Verifying the responses of the txns in third batch") + validate_Response_Status_txn(responses_batch_third) + + diff --git a/rest_api/tests/api_test/exceptions.py b/rest_api/tests/api_test/exceptions.py new file mode 100644 index 0000000000..af131f05c1 --- /dev/null +++ b/rest_api/tests/api_test/exceptions.py @@ -0,0 +1,17 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +class RestApiError(Exception): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..e2c60eb5e9 --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,287 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import urllib +import json +import os +import requests + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp + +from payload import get_signer, create_intkey_transaction , create_batch,\ + create_invalid_intkey_transaction, create_intkey_same_transaction, random_word_list, IntKeyPayload, \ + make_intkey_address, Transactions + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +@pytest.fixture(scope="function") +def break_genesis(request): + """Setup Function for deleting the genesis data + and restarting the validator with no genesis + + Waits for services to start again before + sending the request again + """ + _stop_validator() + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _start_validator() + + +@pytest.fixture(scope="function") +def setup_settings_tp(request): + _stop_settings_tp() + print("settings tp is connected") + + def teardown(): + print("Connecting settings tp") + _start_settings_tp() + + request.addfinalizer(teardown) + + +@pytest.fixture(scope="function") +def invalid_batch(): + """Setup method for creating invalid batches + """ + signer = get_signer() + data = {} + expected_trxns = {} + expected_batches = [] + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_invalid_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['address'] = address + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + return data + + +@pytest.fixture(scope="function") +def setup_empty_trxs_batch(): + signer = get_signer() + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=[]) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=[], + header_signature=signature) + + return batch + +@pytest.fixture(scope="function") +def setup_valinv_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valinv_txns() + return data + + +@pytest.fixture(scope="function") +def setup_invval_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invval_txns() + return data + + +@pytest.fixture(scope="function") +def setup_invalid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_invalid_txns() + return data + + +@pytest.fixture(scope="function") +def setup_invalid_invaddr(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="invaddr") + data = Txns.get_batch_invalid_txns() + return data + + +@pytest.fixture(scope="function") +def setup_same_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_same_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_min(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="min") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_max(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="max") + data = Txns.get_batch_invalid_txns() + return data + +@pytest.fixture(scope="function") +def setup_valid_txns(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="addr") + data = Txns.get_batch_valid_txns() + return data + +@pytest.fixture(scope="function") +def setup_invalid_txns_fn(request): + """Setup method for posting batches and returning the + response + """ + Txns=Transactions(invalidtype="fn") + data = Txns.get_batch_invalid_txns_fam_name() + return data + +@pytest.fixture(scope="function") +def post_batch_txn(txns, expected_batch_ids, signer): + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + return post_batch_list + +@pytest.fixture(scope="function") +def validate_Response_Status_txn(responses): + for response in responses: + + req = requests.get(response['link']) + response = req.json() + if 'error' in response: + assert 'Validator Timed Out' == response['error']['title'] + assert response['error']['code'] == 17 + LOGGER.info('Batch with id {} is not committed. Status is Validator Timed Out Error'.format(batch_id)) + else: + batch_id = response['data'][0]['id'] + if response['data'][0]['status'] == 'COMMITTED': + assert response['data'][0]['status'] == 'COMMITTED' + + LOGGER.info('Batch with id {} is successfully got committed'.format(batch_id)) + + elif response['data'][0]['status'] == 'INVALID': + assert response['data'][0]['status'] == 'INVALID' + + LOGGER.info('Batch with id {} is not committed. Status is INVALID'.format(batch_id)) + + elif response['data'][0]['status'] == 'UNKNOWN': + assert response['data'][0]['status'] == 'UNKNOWN' + LOGGER.info('Batch with id {} is not committed. Status is UNKNOWN'.format(batch_id)) \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py new file mode 100644 index 0000000000..4bee2de7f6 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -0,0 +1,684 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp +import asyncio + + +from fixtures import break_genesis, invalid_batch + +from utils import get_batches, get_batch_id, post_batch,\ + get_batch_statuses, post_batch_statuses,\ + _create_expected_link, _get_batch_list + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.batch, pytest.mark.second] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BATCH_NOT_FOUND = 71 +STATUS_ID_QUERY_INVALID = 66 +STATUS_BODY_INVALID = 43 +STATUS_WRONG_CONTENT_TYPE = 46 +WAIT = 10 + +async def fetch(url, session,params=None): + async with session.get(url) as response: + return await response.json() + + +class TestBatchList(RestApiBaseTest): + """This class tests the batch list with different parameters + """ + async def test_api_get_batch_list(self, setup): + """Tests the batch list by submitting intkey batches + """ + LOGGER.info("Starting tests for batch list") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + url='{}/batches'.format(address) + tasks=[] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/batches?head={}&start={}'.format(address,\ + expected_head, start) + + try: + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(fetch(url, session)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + batches = _get_batch_list(response[0]) + + self.assert_valid_data(response[0]) + self.assert_valid_head(response[0], expected_head) + self.assert_valid_data_length(batches, expected_length) + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response[0], expected_link) + self.assert_valid_paging(response[0], expected_link) + + async def test_api_get_batch_list_head(self, setup): + """Tests that GET /batches is reachable with head parameter + """ + LOGGER.info("Starting test for batch with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_head = setup['expected_head'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + params={'head': expected_head} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + async def test_api_get_batch_list_bad_head(self, setup): + """Tests that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + params={'head': BAD_HEAD} + address = setup['address'] + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_list_id(self, setup): + """Tests that GET /batches is reachable with id as parameter + """ + LOGGER.info("Starting test for batch with id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + params={'id': expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_batch_list_bad_id(self, setup): + """Tests that GET /batches is unreachable with bad id parameter + """ + LOGGER.info("Starting test for batch with bad id parameter") + address = setup['address'] + params={'head': BAD_ID} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_list_head_and_id(self, setup): + """Tests GET /batches is reachable with head and id as parameters + """ + LOGGER.info("Starting test for batch with head and id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + params={'head':expected_head,'id':expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_paginated_batch_list(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + batch_ids = setup['batch_ids'] + address = setup['address'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + + params={'limit':1, 'start':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_batch_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_id = batch_ids[0] + start = setup['start'] + address = setup['address'] + params={'limit':1} + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, 1) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + async def test_api_get_batch_list_invalid_start(self, setup): + """Tests that GET /batches is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + batch_ids = setup['batch_ids'] + address = setup['address'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params={'start':-1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + + async def test_api_get_batch_list_invalid_limit(self, setup): + """Tests that GET /batches is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + batch_ids = setup['batch_ids'] + address = setup['address'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params={'limit':0} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + async def test_api_get_batch_list_reversed(self, setup): + """verifies that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch list as reversed") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['batch_ids'][::-1][0] + print(setup['batch_ids']) + print(start) + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}&reverse'.format(address,\ + expected_head, start, limit) + + params = 'reverse' + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + batches = response['data'][::-1][:-1] + + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + async def test_api_get_batch_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + async def test_api_get_batch_param_link_val(self, setup): + """Tests/ validate the batch parameters with batches, head, start and limit + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: + response = await data.json() + + for link in response: + if(link == 'link'): + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'batches' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + assert response.code == 400 + LOGGER.info("Link is not proper for batch and parameters are missing") + + async def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list + """ + address = setup['address'] + count =0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), raise_for_status=True) as data: + response = await data.json() + + for batch in enumerate(response['data']): + count = count+1 + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + +class TestBatchGet(RestApiBaseTest): + async def test_api_get_batch_id(self, setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + batch_ids = setup['batch_ids'] + expected_id = batch_ids[0] + payload = setup['payload'] + address = setup['address'] + + expected_link = '{}/batches/{}'.format(address, expected_batches[0]) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches/{}'.format(address,expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + + async def test_api_get_bad_batch_id(self, setup): + """verifies that GET /batches/{bad_batch_id} + is unreachable with bad head parameter + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches/{}'.format(address,BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + +class TestBatchStatusesList(RestApiBaseTest): + """This class tests the batch status list with different parameters + """ + async def test_api_post_batch_status_15ids(self, setup): + """verifies that POST /batches_statuses with more than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + batch_ids = setup['batch_ids'] + address = setup['address'] + data_str=json.dumps(batch_ids).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/batch_statuses'.format(address), + data=data_str,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_post_batch_status_10ids(self,setup): + """verifies that POST /batches_status with less than 15 ids + """ + LOGGER.info("Starting test for post batch statuses with less than 15 ids") + batch_ids = setup['batch_ids'] + address = setup['address'] + data_str=json.dumps(batch_ids).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/batch_statuses'.format(address), + data=data_str,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_get_batch_statuses(self,setup): + signer_key = setup['signer_key'] + address = setup['address'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + status = "COMMITTED" + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + params = {'id': expected_batches[0]} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + async def test_api_get_batch_statuses_many_ids(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + batches = ",".join(expected_batches) + params = {'id': batches} + + expected_link = '{}/batch_statuses?id={}'.format(address, batches) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + async def test_api_get_batch_statuses_bad_id(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + params = {'id': BAD_ID} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_batch_statuses_invalid_query(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, STATUS_ID_QUERY_INVALID) + + async def test_api_get_batch_statuses_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait={}'.format(address, expected_batches[0], WAIT) + + params = {'id': expected_batches[0], 'wait':WAIT} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + async def test_api_get_batch_statuses_invalid(self, invalid_batch): + expected_batches = invalid_batch['expected_batches'] + address = invalid_batch['address'] + status = "INVALID" + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + params = {'id': expected_batches[0]} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + async def test_api_get_batch_statuses_unknown(self, setup): + address = setup['address'] + expected_batches = setup['expected_batches'] + batch = expected_batches[0] + unknown_batch = batch[:1] + "b" + batch[1+1:] + status = "UNKNOWN" + params = {'id': unknown_batch} + + expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + async def test_api_get_batch_statuses_default_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + expected_link = '{}/batch_statuses?id={}&wait=300'.format(address, expected_batches[0]) + params = {'id': expected_batches[0], 'wait':300} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batch_statuses'.format(address), + params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py new file mode 100644 index 0000000000..f48f2b5a6f --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -0,0 +1,533 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp + +from utils import get_blocks, get_block_id, get_batches, get_transactions + +from base import RestApiBaseTest + + +pytestmark = [pytest.mark.get , pytest.mark.block, pytest.mark.fourth] + + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BLOCK_NOT_FOUND = 70 +HEAD_LENGTH = 128 +MAX_BATCH_IN_BLOCK = 100 +FAMILY_NAME = 'xo' +TIMEOUT=5 + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +class TestBlockList(RestApiBaseTest): + """This class tests the blocks list with different parameters + """ + async def test_api_get_block_list(self, setup): + """Tests the block list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + payload = setup['payload'] + + expected_link = '{}/blocks?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/blocks?head={}&start={}'.format(address,\ + expected_head, start) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) + + async def test_api_get_block_list_head(self, setup): + """Tests that GET /blocks is reachable with head parameter + """ + LOGGER.info("Starting test for blocks with head parameter") + address = setup['address'] + expected_head = setup['expected_head'] + params={'head': expected_head} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + self.assert_valid_head(response, expected_head) + + async def test_api_get_block_list_bad_head(self, setup): + """Tests that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for blocks with bad head parameter") + address = setup['address'] + params={'head': BAD_HEAD} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_block_list_id(self, setup): + """Tests that GET /blocks is reachable with id as parameter + """ + LOGGER.info("Starting test for blocks with id parameter") + address = setup['address'] + signer_key = setup['signer_key'] + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + address = setup['address'] + payload = setup['payload'] + + expected_link = '{}/blocks?head={}&start&limit=0&id={}'.format(address,\ + expected_head, expected_id) + + params={'id': expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_block_list_bad_id(self, setup): + """Tests that GET /blocks is unreachable with bad id parameter + """ + LOGGER.info("Starting test for blocks with bad id parameter") + address = setup['address'] + params={'head': BAD_ID} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + + + async def test_api_get_paginated_block_list(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + address = setup['address'] + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + params={'limit':1, 'start':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_block_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + payload = setup['payload'] + params={'limit':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + self.assert_valid_head(response, expected_head) + + + async def test_api_get_block_list_invalid_start(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for block with invalid start parameter") + address = setup['address'] + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + params={'start':-1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + + async def test_api_get_block_list_invalid_limit(self, setup): + """Tests that GET /blocks is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for block with bad limit parameter") + address = setup['address'] + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + params={'limit':0} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/batches'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + async def test_api_get_block_list_reversed(self, setup): + """verifies that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for blocks with reversed list") + address = setup['address'] + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + params = 'reverse' + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + assert response['head'] == expected_head , "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + async def test_api_get_block_link_val(self, setup): + """Tests/ validate the block parameters with blocks, head, start and limit + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for link in response: + if(link == 'link'): + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'blocks' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + LOGGER.info("Link is not proper for state and parameters are missing") + + async def test_api_get_block_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + async def test_api_get_each_block_batch_id_length(self, setup): + """Tests the each batch id length should be 128 hex character long + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for batch in response['data']: + expected_head = batch['header']['batch_ids'][0] + head_len = len(expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Batch id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + async def test_api_get_first_block_id_length(self, setup): + """Tests the first block id length should be 128 hex character long + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for block_list in get_blocks(): + batch_list = get_batches() + for block in batch_list: + expected_head = batch_list['head'] + head_len = len(expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Block id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + async def test_rest_api_check_post_max_batches(self, setup): + """Tests that allow max post batches in block + Handled max 100 batches post in block and handle for extra batch + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + for batchcount, _ in enumerate(block_list, start=1): + if batchcount == MAX_BATCH_IN_BLOCK: + print("Max 100 Batches are present in Block") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + head_signature = [block['batches'][0]['header_signature'] for block in block_list] + for i, _ in enumerate(block_list): + head_sig = json.dumps(head_signature[i]).encode('utf8') + assert head_signature[i] is not None, "Head signature is available for all batches in block" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_family_version(self, setup): + """Test batch transaction family version should be present + for each transaction header + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] + for i, _ in enumerate(block_list): + assert family_version[i] is not None, "family version present for all batches in block" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_input_output_content(self,setup): + """Test batch input and output content should be same for + each batch and unique from other + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] + txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] + if(txn_input == txn_output): + return True + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_signer_public_key(self, setup): + """Tests that signer public key is calculated for a block + properly + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + block_list = response['data'] + signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] + assert signer_public_key is not None, "signer public key is available" + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_rest_api_check_blocks_count(self, setup): + """Tests blocks count from block list + """ + address = setup['address'] + count =0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for block in enumerate(response['data']): + count = count+1 + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("BLock count not able to collect") + + async def test_rest_api_blk_content_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for batch in response['data']: + batch_list = get_batches() + for batch in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + head_signature = trans['header_signature'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Header signature is missing in some of the batches") + assert head_signature is not None, "Head signature is available for all batches in block" + +class TestBlockGet(RestApiBaseTest): + async def test_api_get_block_id(self, setup): + """Tests that GET /blocks/{block_id} is reachable + """ + LOGGER.info("Starting test for blocks/{block_id}") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_id = setup['block_ids'][0] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + address = setup['address'] + expected_link = '{}/blocks/{}'.format(address, expected_id) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks/{}'.format(address,expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'] + + self.assert_check_block_seq(blocks,expected_batches, + expected_txns,payload,signer_key) + + async def test_api_get_bad_block_id(self, setup): + """Tests that GET /blocks/{bad_block_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for blocks/{bad_block_id}") + address = setup['address'] + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/blocks/{}'.format(address,BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + diff --git a/rest_api/tests/api_test/get/test_rest_api_get_peers.py b/rest_api/tests/api_test/get/test_rest_api_get_peers.py new file mode 100644 index 0000000000..826e1c37dd --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_peers.py @@ -0,0 +1,51 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp + +from utils import get_peers + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.peers] + +PEER_LIST = [] + +class TestPeerList(RestApiBaseTest): + """This class tests the peer list with different parameters + """ + async def test_api_get_peer_list(self, setup): + """Tests the peer list + """ + address = setup['address'] + expected_link = '{}/peers'.format(address) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/peers'.format(address), raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_link(response, expected_link) + diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py new file mode 100644 index 0000000000..d7ae7f7de7 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -0,0 +1,149 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +# +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp + +from utils import get_state_list, get_reciepts, post_receipts +from base import RestApiBaseTest + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.receipts, pytest.mark.fifth] + +RECEIPT_NOT_FOUND = 80 +RECEIPT_WRONG_CONTENT_TYPE = 81 +RECEIPT_BODY_INVALID = 82 +RECEIPT_Id_QUERYINVALID = 83 +INVALID_RESOURCE_ID = 60 +TIMEOUT=5 + + +class TestReceiptsList(RestApiBaseTest): + """This class tests the receipt list with different parameters + """ + async def test_api_get_reciept_invalid_id(self,setup): + """Tests the reciepts after submitting invalid transaction + """ + address = setup['address'] + transaction_id="s" + params={'id':transaction_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_reciepts_multiple_transactions(self, setup): + """Test the get reciepts for multiple transaction. + """ + transaction_list="" + expected_txns = setup['expected_txns'] + address = setup['address'] + print(expected_txns) + + for txn in expected_txns: + transaction_list=txn+","+transaction_list + + trans_list = str(transaction_list)[:-1] + params={'id':trans_list} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + for res,txn in zip(response['data'],reversed(expected_txns)): + assert str(res['id']) == txn + + async def test_api_get_reciepts_single_transactions(self,setup): + """Tests get reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + address = setup['address'] + transaction_id=str(expected_transaction)[2:-2] + params={'id':transaction_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/receipts'.format(address),params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + async def test_api_post_reciepts_single_transactions(self,setup): + """Test post reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + address = setup['address'] + transaction_json=json.dumps(expected_transaction).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=transaction_json,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_post_reciepts_invalid_transactions(self,setup): + """test reciepts post for invalid transaction""" + + expected_transaction="few" + address = setup['address'] + transaction_json=json.dumps(expected_transaction).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=transaction_json,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_api_post_reciepts_multiple_transactions(self, setup): + """Test the post reciepts response for multiple transaction. + """ + address = setup['address'] + expected_txns = setup['expected_txns'] + json_list=json.dumps(expected_txns).encode() + headers = {'content-type': 'application/json'} + + try: + async with aiohttp.ClientSession() as session: + async with session.post(url='{}/receipts'.format(address), + data=json_list,headers=headers) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + for res,txn in zip(response['data'], expected_txns): + assert str(res['id']) == txn diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py new file mode 100644 index 0000000000..0049a12d56 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -0,0 +1,625 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp +import asyncio + + +from utils import get_state_list, get_state_address +from fixtures import invalid_batch + + +from base import RestApiBaseTest + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get, pytest.mark.state, pytest.mark.third] + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +BAD_ADDRESS = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +STATE_ADDRESS_LENGTH = 70 +STATE_NOT_FOUND = 75 +INVALID_STATE_ADDRESS = 62 +HEAD_LENGTH = 128 +TIMEOUT=5 + + +class TestStateList(RestApiBaseTest): + """This class tests the state list with different parameters + """ + async def test_api_get_state_list(self, setup): + """Tests the state list by submitting intkey batches + """ + address = setup['address'] + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_address = setup['state_address'][0] + expected_link = "{}/state?head={}&start={}&limit=100".format(address, expected_head,\ + expected_address) + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), raise_for_status=True) as data: + response = await data.json() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api unable to get state list") + + + state_list = response['data'][::-1] + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_state_list_head(self, setup): + """Tests that GET /state is reachable with head parameter + """ + LOGGER.info("Starting test for state with head parameter") + + address = setup['address'] + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_head = setup['expected_head'] + state_address = setup['state_address'][0] + expected_link = "{}/state?head={}&start={}&limit=100".format(address, expected_head,\ + state_address) + params={'head': expected_head} + + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address),params=params) as data: + response = await data.json() + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + async def test_api_get_state_list_invalid_batch(self, invalid_batch): + """Tests that state is not updated for when + submitting invalid intkey batches + """ + address = invalid_batch['address'] + batches = invalid_batch['expected_batches'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest API state list unable to verify invalid batch") + + async def test_api_get_state_list_bad_head(self, setup): + """Tests that GET /state is unreachable with bad head parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with bad head parameter") + bad_head = 'f' + + params={'head': BAD_HEAD} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + + async def test_api_get_state_list_address(self, setup): + """Tests that GET /state is reachable with address parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with address parameter") + expected_head = setup['expected_head'] + state_address = setup['state_address'][0] + params = {'address': state_address} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_head(response , expected_head) + + async def test_api_get_state_list_bad_address(self, setup): + """Tests that GET /state is unreachable with bad address parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with bad address parameter") + params = {'address': BAD_ADDRESS} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response , INVALID_STATE_ADDRESS) + + async def test_api_get_paginated_state_list(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + address = setup['address'] + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + + params={'limit':1, 'start':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_paginated_state_list_limit(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + address = setup['address'] + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params={'limit':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_api_get_paginated_state_list_start(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + address = setup['address'] + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params={'limit':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_api_get_state_list_bad_paging(self, setup): + """Tests GET /state is reachbale using bad paging parameters + """ + address = setup['address'] + LOGGER.info("Starting test for state with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params = {'start':-1 , 'limit':-1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + async def test_api_get_state_list_invalid_start(self, setup): + """Tests that GET /state is unreachable with invalid start parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params = {'start':-1 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + + async def test_api_get_state_list_invalid_limit(self, setup): + """Tests that GET /state is unreachable with bad limit parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params = {'limit': 0 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + async def test_api_get_state_list_reversed(self, setup): + """verifies that GET /state is unreachable with bad head parameter + """ + address = setup['address'] + LOGGER.info("Starting test for state with bad head parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + params = 'reverse' + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + async def test_api_get_state_data_address_prefix_namespace(self, setup): + """Tests the state data address with 6 hex characters long + namespace prefix + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using namespace prefix + namespace = state['address'][:6] + res=get_state_list(address=namespace) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Not able to access related state address using namespace prefix") + + async def test_api_get_state_data_head_wildcard_character(self, setup): + """Tests the state head with wildcard_character ***STL-1345*** + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: + expected_head = setup['expected_head'][:6] + addressList = list(expected_head) + addressList[2]='?' + expected_head = ''.join(addressList) + print("\nVALUE is: ", expected_head) + res=get_state_list(head_id=expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_get_state_data_head_partial_character(self, setup): + """Tests the state head with partial head address ***STL-1345*** + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + async def test_api_get_state_data_address_partial_character(self, setup): + """Tests the state address with partial head address ***STL-1346*** + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + + async def test_api_get_state_data_address_length(self, setup): + """Tests the state data address length is 70 hex character long + with proper prefix namespace + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State address is not 70 character long") + assert address == STATE_ADDRESS_LENGTH + + + async def test_api_get_state_data_address_with_odd_hex_value(self, setup): + """Tests the state data address fail with odd hex character + address + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + if(address%2 == 0): + pass + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Odd state address is not correct") + + async def test_api_get_state_data_address_with_reduced_length(self, setup): + """Tests the state data address with reduced even length hex character long + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[:-4] + get_state_list(address = nhex) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Reduced length data address failed to processed") + + + async def test_api_get_state_data_address_64_Hex(self, setup): + """Tests the state data address with 64 hex give empty data + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:70] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("state data address with 64 hex characters not processed ") + + + async def test_api_get_state_data_address_alter_bytes(self, setup): + """Tests the state data address with alter bytes give empty data + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for state in response['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:8] + naddress = get_state_list(address = nhex) + addressList = list(naddress) + addressList[2]='z' + naddress = ''.join(addressList) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("state data address with altered bytes not processed ") + + + async def test_api_get_state_link_val(self, setup): + """Tests/ validate the state parameters with state, head, start and limit + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for link in response: + if(link == 'link'): + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'state' in response['link'] + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + LOGGER.info("Link is not proper for state and parameters are missing") + + async def test_api_get_state_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + async def test_api_get_each_state_head_length(self, setup): + """Tests the each state head length should be 128 hex character long + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + for _ in response['data']: + expected_head = setup['expected_head'] + head_len = len(expected_head) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State Head length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + async def test_rest_api_check_state_count(self, setup): + """Tests state count from state list + """ + address = setup['address'] + count = 0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] + for batch in enumerate(state_list): + count = count+1 + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State count not able to collect") + + +class TestStateGet(RestApiBaseTest): + async def test_api_get_state_address(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['address'] + state_address = setup['state_address'][0] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state/{}'.format(address,state_address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + async def test_api_get_bad_address(self, setup): + """Tests /state/{bad_state_address} + """ + address = setup['address'] + LOGGER.info("Starting test for state/{bad_address}") + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state/{}'.format(address,BAD_ADDRESS)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_STATE_ADDRESS) + +class TestStateDeleteRoot(RestApiBaseTest): + async def test_api_get_state_delete_root(self, setup): + """Tests/ validate the state of deleted block at root node + """ + address = setup['address'] + count = 0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] + for _ in enumerate(state_list): + count = count+1 + if count == 1: + LOGGER.info("Currently selected state is root/ genesis node") + address = setup['address'] + if address == "": + LOGGER.info("Merkle tree root state deleted") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State count not able to collect or not root/ genesis node") + + async def test_api_get_state_delete_not_root_node(self, setup): + """Tests/ validate the state of deleted block at root node + """ + address = setup['address'] + count = 0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/state'.format(address), + raise_for_status=True) as data: + response = await data.json() + + state_list = response['data'] + for _ in enumerate(state_list): + count = count+1 + if count > 1: + LOGGER.info("Currently selected state is not root node") + address = setup['address'] + if address == "": + LOGGER.info("Merkle tree not root node state deleted") + + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("State count not able to collect or not root/ genesis node") diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py new file mode 100644 index 0000000000..aea78be0e5 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -0,0 +1,479 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import aiohttp + +from fixtures import break_genesis + +from utils import get_transactions, get_transaction_id + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.transactions, pytest.mark.first] + + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +TRANSACTION_NOT_FOUND = 72 +HEAD_LENGTH = 128 +TIMEOUT=5 + + +class TestTransactionList(RestApiBaseTest): + async def test_api_get_transaction_list(self, setup): + """Tests the transaction list after submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'] + address = setup['address'] + start = setup['start'] + limit = setup['limit'] + start = expected_txns[0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/transactions?head={}&start={}'.format(address,\ + expected_head, start) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + async def test_api_get_transaction_list_head(self, setup): + """Tests that GET /transactions is reachable with head parameter + """ + LOGGER.info("Starting test for transactions with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[0] + limit = setup['limit'] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + params={'head': expected_head} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + async def test_api_get_transaction_list_bad_head(self, setup): + """Tests that GET /transactions is unreachable with bad head parameter + """ + LOGGER.info("Starting test for transactions with bad head parameter") + address = setup['address'] + params={'head': BAD_HEAD} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_transaction_list_id(self, setup): + """Tests that GET /transactions is reachable with id as parameter + """ + LOGGER.info("Starting test for transactions with id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + params={'id': expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + async def test_api_get_transaction_list_bad_id(self, setup): + """Tests that GET /transactions is unreachable with bad id parameter + """ + LOGGER.info("Starting test for transactions with bad id parameter") + address = setup['address'] + params={'head': BAD_ID} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + async def test_api_get_transaction_list_head_and_id(self, setup): + """Tests GET /transactions is reachable with head and id as parameters + """ + LOGGER.info("Starting test for transactions with head and id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + params={'head':expected_head,'id':expected_id} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + async def test_api_get_paginated_transaction_list(self, setup): + """Tests GET /transactions is reachable using paging parameters + """ + LOGGER.info("Starting test for transactions with paging parameters") + address = setup['address'] + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + params={'limit':1, 'start':1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_transaction_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + + params={'limit':1} + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, 1) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + async def test_api_get_transaction_bad_paging(self, setup): + """Tests GET /transactions is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for transactions with bad paging parameters") + address = setup['address'] + params = {'start':-1 , 'limit':-1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + async def test_api_get_transaction_list_invalid_start(self, setup): + """Tests that GET /transactions is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for transactions with invalid start parameter") + address = setup['address'] + params = {'start':-1 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + async def test_api_get_transaction_list_invalid_limit(self, setup): + """Tests that GET /transactions is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for transactions with bad limit parameter") + address = setup['address'] + params = {'limit': 0 } + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + async def test_api_get_transaction_list_reversed(self, setup): + """verifies that GET /transactions with list reversed + """ + LOGGER.info("Starting test for transactions with list reversed") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + limit = setup['limit'] + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + params = 'reverse' + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), params=params, + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + async def test_api_get_transactions_link_val(self, setup): + """Tests/ validate the transactions parameters with transactions, head, start and limit + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for link in response: + if(link == 'link'): + assert 'head' in response['link'] + assert 'start' in response['link'] + assert 'limit' in response['link'] + assert 'transactions' in response['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for transactions and parameters are missing") + + async def test_api_get_transactions_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + async def test_api_get_transaction_id_length(self, setup): + """Tests the transaction id length should be 128 hex character long + """ + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for trans in response['data']: + transaction_ids = trans['header_signature'] + head_len = len(transaction_ids) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Transaction id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + async def test_rest_api_check_transactions_count(self, setup): + """Tests transaction count from transaction list + """ + address = setup['address'] + count =0 + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions'.format(address), raise_for_status=True) as data: + response = await data.json() + + for trans in enumerate(response['data']): + count = count+1 + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Transaction count not able to collect") + +class TestTransactionGet(RestApiBaseTest): + async def test_api_get_transaction_id(self, setup): + """Tests that GET /transactions/{transaction_id} is reachable + """ + LOGGER.info("Starting test for transaction/{transaction_id}") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_id = expected_txns[0] + address = setup['address'] + payload = setup['payload'] + expected_length = 1 + + expected_link = '{}/transactions/{}'.format(address,expected_id) + + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions/{}'.format(address,expected_id), + raise_for_status=True) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + txns = response['data'] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_link(response, expected_link) + + async def test_api_get_transaction_bad_id(self, setup): + """Tests that GET /transactions/{transaction_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for transactions/{bad_id}") + address = setup['address'] + try: + async with aiohttp.ClientSession() as session: + async with session.get(url='{}/transactions/{}'.format(address,BAD_ID)) as data: + response = await data.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py new file mode 100644 index 0000000000..b84589420e --- /dev/null +++ b/rest_api/tests/api_test/payload.py @@ -0,0 +1,709 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import base64 +import argparse +import cbor +import hashlib +import os +import time +import random +import string +import urllib + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ + get_state_list, _get_client_address, \ + batch_count, transaction_count,\ + get_batch_statuses, state_count + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +LIMIT = 100 +WAIT = 300 +BATCH_SIZE = 1 +WORD_COUNT=50 + +class Setup: + def __init__(self): + self.data = {} + self.signer= get_signer() + self.address = _get_client_address() + self.url='{}/batches'.format(self.address) + + def _create_transactions(self): + LOGGER.info("Creating intkey transactions with set operations") + txns = [create_intkey_transaction("set", [] , WORD_COUNT , self.signer) for i in range(BATCH_SIZE)] + return txns + + + def _create_batches(self,txns): + LOGGER.info("Creating batches for transactions 1trn/batch") + batches = [create_batch([txn], self.signer) for txn in txns] + return batches + + def _create_batch_list(self,batches): + batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + return batch_list + + + def _batch_statuses(self,expected_batches): + LOGGER.info("Batch statuses for the created batches") + for batch in expected_batches: + response = get_batch_statuses([batch]) + status = response['data'][0]['status'] + LOGGER.info(status) + + + def _expected_batch_ids(self,batches): + LOGGER.info("Expected batch ids") + expected_batches = [] + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + return expected_batches + + + def _expected_txn_ids(self,txns): + LOGGER.info("Expected transaction ids") + expected_txns = {} + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + if 'trxn_id' not in expected_txns: + expected_txns['trxn_id'] = [] + if 'payload' not in expected_txns: + expected_txns['payload'] =[] + + expected_txns['trxn_id'].append(dict['header_signature']) + expected_txns['payload'].append(dict['payload']) + return expected_txns + + + def _submit_batches(self,batch_list): + print("Submitting batches to the route handlers") + import time + start_time = time.time() + for batch in batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + print(time.time()-start_time) + return response + + + def _initial_count(self): + LOGGER.info("Calculating the initial count of batches,transactions, state before submission of batches") + data = self.data + data['state_length'] = state_count() + data['transaction_length'] = transaction_count() + data['batch_length'] = batch_count() + return data + + + def _expected_count(self,txns,batches): + LOGGER.info("Calculating the expected count of batches, transactions, state") + data = self.data + self._initial_count() + expected_txns=self._expected_txn_ids(txns) + expected_batches=self._expected_batch_ids(batches) + length_batches = len(expected_batches) + length_transactions = len(expected_txns['trxn_id']) + data['expected_batch_length'] = data['batch_length'] + length_batches + data['expected_trn_length'] = data['transaction_length'] + length_transactions + return data + + + def _expected_data(self,txns,batches): + LOGGER.info("Gathering expected data before submission of batches") + data = self.data + self._expected_count(txns,batches) + expected_txns=self._expected_txn_ids(txns) + expected_batches=self._expected_batch_ids(batches) + + data['expected_txns'] = expected_txns['trxn_id'][::-1] + data['payload'] = expected_txns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = self.signer.get_public_key().as_hex() + return data + + def _post_data(self,txns,batches): + print("Gathering data post submission of batches") + import time + start_time = time.time() + data = self.data + expected_batches=self._expected_batch_ids(batches) + batch_list = get_batches() + data['batch_list'] = batch_list + data['batch_ids'] = [batch['header_signature'] for batch in batch_list['data']] + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + data['transaction_ids'] = [trans['header_signature'] for trans in transaction_list['data']] + block_list = get_blocks() + data['block_list'] = block_list + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = self.address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + return data + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + + +class XOPayload(object): + def create_users(self,users): + for username in users: + _send_cmd('sawtooth keygen {} --force'.format(username)) + + def create_game(self, game, user,address): + cmd = 'xo create game-1 --username {}'.format(user) + _send_cmd(cmd) + + def take_game(self, game, user, position,address): + cmd = 'xo take game-1 {} --username {}'.format(position,user) + _send_cmd(cmd) + + def list_game(self): + cmd = 'xo list' + _send_cmd(cmd) + + def show_game(self,game,address): + cmd = 'xo show game-1'.format(game,address) + + def delete_game(self,game,address): + cmd = 'xo delete game-1 --username {}'.format(user) + _send_cmd(cmd) + + +class Transactions: + def __init__(self, invalidtype): + self.signer = get_signer() + self.data = {} + self.invalidtype = invalidtype + + def get_batch_valinv_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invval_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_one_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_valid_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + self.create_intkey_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_same_txns(self): + """Setup method for posting batches and returning the + response + """ + txns = [ + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + self.create_intkey_same_transaction("set",[],30, self.signer), + ] + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_batch_invalid_txns_fam_name(self): + """Setup method for posting batches and returning the + response + """ + + txns = [ + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + self.create_invalid_intkey_transaction("set",[],30, self.signer, self.invalidtype), + ] + + self.data = self.get_txns_commit_data(txns,self.signer, self.data) + return self.data + + def get_txns_commit_data(self, txns, signer, data): + """Setup method for posting batches and returning the + response + """ + expected_trxn_ids = [] + expected_batch_ids = [] + expected_trxns = {} + expected_batches = [] + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + + LOGGER.info("Creating intkey transactions with set operations") + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = dict['header_signature'] + expected_trxn_ids.append(trxn_id) + + self.data['expected_trxn_ids'] = expected_trxn_ids + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + #print(expected_trxns['trxn_id']) + print(expected_trxns['payload']) + + + LOGGER.info("Creating batches for transactions 3trn/batch") + + batches = [create_batch(txns, signer)] + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + length_batches = len(expected_batches) + length_transactions = len(expected_trxn_ids) + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + try: + for batch in post_batch_list: + response = post_batch(batch) + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + self.data['response'] = response['data'][0]['status'] + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + json_data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(json_data['error']['title']) + LOGGER.info(json_data['error']['message']) + LOGGER.info(json_data['error']['code']) + self.data['code'] = json_data['error']['code'] + + self.state_addresses = [state['address'] for state in get_state_list()['data']] + self.data['state_address'] = self.state_addresses + self.data['initial_batch_length'] = initial_batch_length + self.data['initial_trn_length'] = initial_transaction_length + self.data['expected_batch_length'] = initial_batch_length + length_batches + self.data['expected_trn_length'] = initial_transaction_length + length_transactions + return self.data + + def create_intkey_transaction(self, verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=21) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data + def create_intkey_same_transaction(self, verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + data = self.get_txns_data(addr,deps, payload) + return data + + def create_invalid_intkey_transaction(self, verb, deps, count, signer, invalidtye): + words = random_word_list(count) + name=random.choice(words) + + if invalidtye=="addr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + if invalidtye=="invaddr": + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-62:] + + elif invalidtye=="min": + payload = IntKeyPayload( + verb=verb,name=name,value=-1) + addr = make_intkey_address(name) + + elif invalidtye=="str": + payload = IntKeyPayload( + verb=verb,name=name,value="str") + addr = make_intkey_address(name) + + elif invalidtye=="max": + payload = IntKeyPayload( + verb=verb,name=name,value=4294967296) + addr = make_intkey_address(name) + + elif invalidtye=="attr": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + + elif invalidtye=="fn": + payload = IntKeyPayload( + verb="verb",name=name,value=1) + addr = make_intkey_address(name) + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='abcd', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + return transaction + + data = self.get_txns_data(addr,deps, payload) + return data + + def get_txns_data(self, addr, deps, payload): + + header = TransactionHeader( + signer_public_key=self.signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=self.signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = self.signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_invalid_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_same_transaction(verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + +def random_word(): + return ''.join([random.choice(string.ascii_letters) for _ in range(0, 6)]) + + +def random_word_list(count): + if os.path.isfile('/usr/share/dict/words'): + with open('/usr/share/dict/words', 'r') as fd: + return [x.strip() for x in fd.readlines()[0:count]] + else: + return [random_word() for _ in range(0, count)] + +def create_intkey_transaction_dep(verb, deps, name, value, signer): +# words = random_word_list(count) +# self.name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=value) + + addr = make_intkey_address(name) + data = get_txns_data(addr,deps, payload, signer) + return data + +def get_txns_data(addr, deps, payload, signer): + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction +def create_invalid_Address_intkey_dep_txn(verb, deps, name, value, signer): + + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + data = get_txns_data(addr,deps, payload, signer) + return data \ No newline at end of file diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py new file mode 100644 index 0000000000..a8b528d399 --- /dev/null +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -0,0 +1,558 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import aiohttp +import asyncio + +from google.protobuf.json_format import MessageToDict + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains, post_batch_no_endpoint,\ + get_reciepts, _get_client_address, state_count + + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction + +from base import RestApiBaseTest + +from fixtures import setup_empty_trxs_batch, setup_invalid_txns,setup_invalid_txns_min,\ + setup_invalid_txns_max, setup_valinv_txns, setup_invval_txns, \ + setup_same_txns, setup_valid_txns, setup_invalid_txns_fn,\ + setup_invalid_invaddr + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +BAD_PROTOBUF = b'BAD_PROTOBUF' +EMPTY_BATCH = b'' +NO_BATCHES_SUBMITTED = 34 +BAD_PROTOBUF_SUBMITTED = 35 +WRONG_HEADER_TYPE=42 +BATCH_QUEUE_FULL = 31 +INVALID_BATCH = 30 +WRONG_CONTENT_TYPE = 43 +WAIT=300 +RECEIPT_NOT_FOUND = 80 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = [pytest.mark.post,pytest.mark.last] + +async def async_fetch_url(url, session,params=None): + try: + async with session.get(url) as response: + return await response.json() + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + +async def async_post_batch(url, session, data, params=None,headers=None): + if headers: + headers=headers + else: + headers = {'Content-Type': 'application/octet-stream'} + try: + async with session.post(url,data=data,headers=headers) as response: + data = await response.json() + if 'link' in data: + link = data['link'] + return await async_fetch_url('{}&wait={}'.format(link, WAIT),session) + else: + return data + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info(error) + + +class TestPostList(RestApiBaseTest): + async def test_rest_api_post_batch(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey batches + with set operations + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + for response in responses: + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch is not committed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in expected_batch_ids: + if batch not in block_batch_ids: + LOGGER.info("Block is not created for the respective batch") + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + async def test_rest_api_no_batches(self): + LOGGER.info("Starting test for batch with bad protobuf") + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + try: + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(async_post_batch(url,session,data=EMPTY_BATCH)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], NO_BATCHES_SUBMITTED) + + async def test_rest_api_bad_protobuf(self): + LOGGER.info("Starting test for batch with bad protobuf") + + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + try: + async with aiohttp.ClientSession() as session: + task = asyncio.ensure_future(async_post_batch(url,session,data=BAD_PROTOBUF)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], BAD_PROTOBUF_SUBMITTED) + + async def test_rest_api_post_wrong_header(self,setup): + """Tests rest api by posting with wrong header + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + headers = {'Content-Type': 'application/json'} + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch,headers=headers)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + for response in responses: + self.assert_valid_error(response, WRONG_HEADER_TYPE) + + async def test_rest_api_post_same_txns(self, setup): + """Tests the rest-api by submitting multiple transactions with same key + """ + LOGGER.info('Starting test for batch post') + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = state_count() + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_rest_api_multiple_txns_batches(self, setup): + """Tests rest-api state by submitting multiple + transactions in multiple batches + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + responses = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + for response in responses: + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch is not committed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in expected_batch_ids: + if batch not in block_batch_ids: + LOGGER.info("Block is not created for the respective batch") + + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + + async def test_api_post_empty_trxns_list(self, setup_empty_trxs_batch): + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + batch = setup_empty_trxs_batch + post_batch_list = [BatchList(batches=[batch]).SerializeToString()] + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + + async def test_api_post_batch_different_signer(self, setup): + address = _get_client_address() + url='{}/batches'.format(address) + tasks=[] + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + post_batch_list=[BatchList(batches=[batch]).SerializeToString()] + + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.ClientResponseError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_error(response[0], INVALID_BATCH) + + async def test_rest_api_post_no_endpoint(self, setup): + address = _get_client_address() + url='/'.format(address) + tasks=[] + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + batch= create_batch(translist,signer_trans) + post_batch_list=[BatchList(batches=[batch]).SerializeToString()] + try: + async with aiohttp.ClientSession() as session: + for batch in post_batch_list: + task = asyncio.ensure_future(async_post_batch(url,session,data=batch)) + tasks.append(task) + response = await asyncio.gather(*tasks) + except aiohttp.client_exceptions.InvalidURL as error: + LOGGER.info("Rest Api is Unreachable") + LOGGER.info("Url is not correct") + + +class TestPostInvalidTxns(RestApiBaseTest): + def test_txn_invalid_addr(self, setup_invalid_txns): + initial_batch_length = setup_invalid_txns['initial_batch_length'] + expected_batch_length = setup_invalid_txns['expected_batch_length'] + initial_trn_length = setup_invalid_txns['initial_trn_length'] + expected_trn_length = setup_invalid_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns['response'] == 'INVALID' + + def test_txn_invalid_min(self, setup_invalid_txns_min): + initial_batch_length = setup_invalid_txns_min['initial_batch_length'] + expected_batch_length = setup_invalid_txns_min['expected_batch_length'] + initial_trn_length = setup_invalid_txns_min['initial_trn_length'] + expected_trn_length = setup_invalid_txns_min['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_min['response'] == 'INVALID' + + def test_txn_invalid_max(self, setup_invalid_txns_max): + initial_batch_length = setup_invalid_txns_max['initial_batch_length'] + expected_batch_length = setup_invalid_txns_max['expected_batch_length'] + initial_trn_length = setup_invalid_txns_max['initial_trn_length'] + expected_trn_length = setup_invalid_txns_max['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invalid_txns_max['response'] == 'INVALID' + + def test_txn_valid_invalid_txns(self, setup_valinv_txns): + initial_batch_length = setup_valinv_txns['initial_batch_length'] + expected_batch_length = setup_valinv_txns['expected_batch_length'] + initial_trn_length = setup_valinv_txns['initial_trn_length'] + expected_trn_length = setup_valinv_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_valinv_txns['response'] == 'INVALID' + + def test_txn_invalid_valid_txns(self, setup_invval_txns): + initial_batch_length = setup_invval_txns['initial_batch_length'] + expected_batch_length = setup_invval_txns['expected_batch_length'] + initial_trn_length = setup_invval_txns['initial_trn_length'] + expected_trn_length = setup_invval_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_invval_txns['response'] == 'INVALID' + + def test_txn_same_txns(self, setup_same_txns): + initial_batch_length = setup_same_txns['initial_batch_length'] + expected_batch_length = setup_same_txns['expected_batch_length'] + initial_trn_length = setup_same_txns['initial_trn_length'] + expected_trn_length = setup_same_txns['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + assert setup_same_txns['code'] == 30 + + def test_api_sent_commit_txns(self, setup_valid_txns): + expected_transaction=setup_valid_txns['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + assert transaction_id == response['data'][0]['id'] + assert response['data'][0]['state_changes'][0]['type'] == "SET" + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + + def test_txn_invalid_bad_addr(self, setup_invalid_invaddr): + initial_batch_length = setup_invalid_invaddr['initial_batch_length'] + expected_batch_length = setup_invalid_invaddr['expected_batch_length'] + initial_trn_length = setup_invalid_invaddr['initial_trn_length'] + expected_trn_length = setup_invalid_invaddr['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + + def test_txn_invalid_family_name(self, setup_invalid_txns_fn): + initial_batch_length = setup_invalid_txns_fn['initial_batch_length'] + expected_batch_length = setup_invalid_txns_fn['expected_batch_length'] + initial_trn_length = setup_invalid_txns_fn['initial_trn_length'] + expected_trn_length = setup_invalid_txns_fn['expected_trn_length'] + assert initial_batch_length < expected_batch_length + assert initial_trn_length < expected_trn_length + + + diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini new file mode 100644 index 0000000000..830bd0e1be --- /dev/null +++ b/rest_api/tests/api_test/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +addopts = -s -v --json-report --json-report-file=report.json -p no:warnings +python_files = test_rest*.py +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +log_file = pytest-logs.txt +log_file_date_format = %Y-%m-%d %H:%M:%S +log_file_format = %(asctime)s %(levelname)s %(message)s +log_format = %(asctime)s %(levelname)s %(message)s diff --git a/rest_api/tests/api_test/requirements.txt b/rest_api/tests/api_test/requirements.txt new file mode 100644 index 0000000000..e6c1ecacce --- /dev/null +++ b/rest_api/tests/api_test/requirements.txt @@ -0,0 +1,5 @@ +pytest==3.9.1 +pytest-aiohttp==0.3.0 +pytest-json-report==0.7.0 +pytest-metadata==1.7.0 +pytest-ordering==0.5 diff --git a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py new file mode 100644 index 0000000000..56cd93df38 --- /dev/null +++ b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py @@ -0,0 +1,131 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time + +from google.protobuf.json_format import MessageToDict + +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) +WAIT = 300 + +WORKLOAD_TIME = 5 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +INTKEY_PREFIX = '1cf126' +XO_PREFIX = '5b7349' + + +pytestmark = pytest.mark.scenario +''' +class TestScenario(RestApiBaseTest): + def test_rest_api_mul_val_intk_xo(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + LOGGER.info('Starting Test for Intkey and Xo as payload') + + LOGGER.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + LOGGER.info("Creating keys for xo users") + + for username in ('aditya', 'singh'): + _send_cmd('sawtooth keygen {} --force'.format(username)) + + + LOGGER.info("Submitting xo batches to the handlers") + + + xo_cmds = ( + 'xo create game-1 --username aditya', + 'xo take game-1 1 --username singh', + 'xo take game-1 4 --username aditya', + 'xo take game-1 2 --username singh', + ) + + for cmd in xo_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + xo_cli_cmds = ( + 'xo list', + 'xo show game-1', + ) + + for cmd in xo_cli_cmds: + _send_cmd( + '{} --url {}'.format( + cmd, + _get_client_address())) + + xo_delete_cmds = ( + 'xo delete game-1 --username aditya', + ) + + for cmd in xo_delete_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) +''' diff --git a/rest_api/tests/api_test/ssh.py b/rest_api/tests/api_test/ssh.py new file mode 100644 index 0000000000..1f01284da4 --- /dev/null +++ b/rest_api/tests/api_test/ssh.py @@ -0,0 +1,36 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import paramiko + + +class SSH(): + def do_ssh(self,hostname,port,username,password): + try: + ssh=paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect(hostname,port,username,password) + except paramiko.AuthenticationException: + print("Failed to connect to {} due to wrong username/password".format(hostname)) + exit(1) + except: + print("Failed to connect to {}".format(hostname)) + exit(2) + + command = 'ps aux | grep sawtooth' + stdin,stdout,stderr=ssh.exec_command(command) + outlines=stdout.readlines() + resp=''.join(outlines) + ssh.close() \ No newline at end of file diff --git a/rest_api/tests/api_test/thread.py b/rest_api/tests/api_test/thread.py new file mode 100644 index 0000000000..e58ed4a121 --- /dev/null +++ b/rest_api/tests/api_test/thread.py @@ -0,0 +1,116 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import queue +import threading +import os +import logging + + +from workload import Workload +from ssh import SSH +from utils import _get_node_chains + +logging.basicConfig(level=logging.INFO, + format='(%(threadName)-10s) %(message)s', + ) + + +def wait_for_event(e): + """Wait for the event to be set before doing anything""" + logging.debug('wait_for_event starting') + event_is_set = e.wait() + logging.debug('event set: %s', event_is_set) + + +def wait_for_event_timeout(e, t): + """Wait t seconds and then timeout""" + while not e.isSet(): + logging.debug('wait_for_event_timeout starting') + event_is_set = e.wait(t) + logging.debug('event set: %s', event_is_set) + if event_is_set: + logging.debug('processing event') + else: + logging.debug('doing other work') + + +class Workload_thread(threading.Thread): + def __init__(self): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + + def run(self): + logging.info('Starting Workload') + workload = Workload() + workload.do_workload() + return + + def stop(self): + pass + + +class SSH_thread(threading.Thread): + def __init__(self, hostname, port, username, password): + threading.Thread.__init__(self) + self.hostname = hostname + self.port = port + self.username = username + self.password = password + + def run(self): + logging.info('starting ssh thread') + logging.info('Logging into Validation Network') + self.ssh() + logging.info('Exiting ssh thread') + return + + def ssh(self): + logging.info('creating ssh object') + ssh = SSH() + logging.info('performing ssh') + ssh.do_ssh(self.hostname, self.port, self.username, self.password) + + def stop_validator(self): + loggin.info("stopping validator service") + + def start_validator(self): + loggin.info("starting validator service") + + +class Consensus_Thread(threading.Thread): + def __init__(self, nodes): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + self.nodes = nodes + + def run(self): + logging.info('starting consensus thread') + logging.info('calculating block list from the nodes') + chains = self.calculate_block_list() + self.compare_chains(chains) + return + + def calculate_block_list(self): + logging.info('getting block list from the nodes') + node_list = ['http://10.223.155.43:8008'] + chains = _get_node_chains(node_list) + return chains + + def compare_chains(self, chains): + logging.info('comparing chains for equality') + + + def calculate_sync_time(self): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..a5a993a301 --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,496 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +from urllib.request import urlopen +from urllib.error import HTTPError +from urllib.error import URLError +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os +import time +import aiohttp + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/blocks?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/blocks?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/blocks?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/blocks?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/blocks?id=%s'% id) + return response + if reverse: + response = query_rest_api('/blocks?reverse') + return response + else: + response = query_rest_api('/blocks') + return response + + +def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch_id(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response + +def get_block_id(block_id): + response = query_rest_api('/blocks/%s' % block_id) + return response + +def get_transaction_id(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response + +def get_peers(): + response = query_rest_api('/peers') + return response + +def get_transactions(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/transactions?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/transactions?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/transactions?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/transactions?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/transactions?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/transactions?id=%s'% id) + return response + if reverse: + response = query_rest_api('/transactions?reverse') + return response + else: + response = query_rest_api('/transactions') + return response + +def get_state_list(head_id=None , address=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , address]): + response = query_rest_api('/state?head={}&address={}'.format(head_id , address)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if address is not None: + response = query_rest_api('/state?address=%s'% address) + return response + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state_address(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "hostname -I | awk '{print $1}'" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _start_settings_tp(): + LOGGER.info('Starting settings-tp') + cmd = " sudo -u sawtooth settings-tp -vv " + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_settings_tp(): + LOGGER.info('Stopping the settings-tp') + cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _create_genesis(): + LOGGER.info("creating the genesis data") + _create_genesis_batch() + os.chdir("/home/aditya") + cmd = "sawadm genesis config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _create_genesis_batch(): + LOGGER.info("creating the config genesis batch") + os.chdir("/home/aditya") + cmd = "sawset genesis --force" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def post_batch_statuses(batch): + headers = {'content-type': 'application/json'} + response = query_rest_api( + '/batch_statuses', data=batch, headers=headers) + return response + +def get_batch_statuses(batch_ids=None, wait=None): + try: + batches = ",".join(batch_ids) + except: + batches = None + + if batches: + if wait == 'default': + response = query_rest_api('/batch_statuses?wait&id={}'.format(batches)) + return response + elif wait: + response = query_rest_api('/batch_statuses?id={}&wait={}'.format(batches,wait)) + return response + else: + response = query_rest_api('/batch_statuses?id=%s' % batches) + return response + else: + response = query_rest_api('/batch_statuses') + return response + +def get_state_limit(limit): + response = query_rest_api('/state?limit=%s' % limit) + return response + + +def get_reciepts(reciept_id): + response = query_rest_api('/receipts?id=%s' % reciept_id) + return response + +def post_receipts(receipts): + headers = {'Content-Type': 'application/json'} + response = query_rest_api('/receipts', data=receipts, headers=headers) + return response + +def state_count(): + state_list = get_state_list() + count = len(state_list['data']) + try: + next_position = state_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + state_list = get_state_list(start=next_position) + try: + next_position = state_list['paging']['next_position'] + except: + next_position = None + + count += len(state_list['data']) + return count + + +def batch_count(): + batch_list = get_batches() + count = len(batch_list['data']) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + batch_list = get_batches(start=next_position) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + count += len(batch_list['data']) + return count + + +def transaction_count(): + transaction_list = get_transactions() + count = len(transaction_list['data']) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + transaction_list = get_transactions(start=next_position) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + count += len(transaction_list['data']) + return count + +def _create_expected_link(expected_ids): + for id in expected_ids: + link = '{}/batch_statuses?id={},{}'.format(address, id) + return link + + +def _get_batch_list(response): + batch_list = response['data'] + + try: + next_position = response['paging']['next_position'] + print(next_position) + except: + next_position = None + + while(next_position): + response = get_batches(start=next_position) + print(response) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + print(next_position) + + batch_list += data_list + + return batch_list + + +def _get_transaction_list(response): + transaction_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_transactions(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + transaction_list += data_list + + return transaction_list + + +def _get_state_list(response): + state_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_state_list(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + state_list += data_list + + return state_list + + +def post_batch_no_endpoint(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response diff --git a/rest_api/tests/api_test/validators_down.sh b/rest_api/tests/api_test/validators_down.sh new file mode 100644 index 0000000000..ffc1472ced --- /dev/null +++ b/rest_api/tests/api_test/validators_down.sh @@ -0,0 +1,3 @@ + #!/bin/bash + sudo kill -9 $(ps aux | grep 'sawtooth' | awk '{print $2}') + echo "$(ps aux | grep 'sawtooth')" diff --git a/rest_api/tests/api_test/validators_up.sh b/rest_api/tests/api_test/validators_up.sh new file mode 100644 index 0000000000..41529247f0 --- /dev/null +++ b/rest_api/tests/api_test/validators_up.sh @@ -0,0 +1,6 @@ + #!/bin/bash + +sudo -u sawtooth sawtooth-validator -vv & +sudo -u sawtooth settings-tp -vv & +sudo -u sawtooth intkey-tp-python -C tcp://127.0.0.1:4004 -v & +sudo -u sawtooth xo-tp-python -C tcp://127.0.0.1:4004 -v & diff --git a/rest_api/tests/api_test/workload.py b/rest_api/tests/api_test/workload.py new file mode 100644 index 0000000000..7dbfd41591 --- /dev/null +++ b/rest_api/tests/api_test/workload.py @@ -0,0 +1,29 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import subprocess +import logging + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +class Workload(): + def do_workload(self): + LOGGER.info('Starting Intkey Workload') +# cmd = "intkey workload --url 10.223.155.43:8008 --rate 1 -d 1" +# subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + def stop_workload(self): + pass \ No newline at end of file