diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..6eb41a667e --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,321 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import aiohttp +from base64 import b64decode + +CONSENSUS_ALGO = b'Devmode' +FAMILY_NAME = 'intkey' +FAMILY_VERSION = '1.0' +DEFAULT_LIMIT = 100 +TRACE = False +NONCE = '' + + +class RestApiBaseTest(object): + """Base class for Rest Api tests that simplifies making assertions + for the test cases + """ + def assert_status(self, response, status): + for data in response['data']: + assert data['status'] == status + + def assert_equal(self, response, data): + assert response == data + + def assert_check_nonce(self, response): + """Asserts response has nonce parameter + """ + assert 'nonce' in response['header'] + assert response['header']['nonce'] == NONCE + + def assert_check_family(self, response): + """Asserts family name and versions in response + """ + assert 'family_name' in response['header'] + assert 'family_version' in response['header'] + assert response['header']['family_name'] == FAMILY_NAME + assert response['header']['family_version'] == FAMILY_VERSION + + def assert_check_dependency(self, response): + """Asserts transaction dependencies in response + """ + assert 'dependencies' in response['header'] + + def assert_content(self, response): + """Asserts response has inputs and outputs parameter + """ + assert 'inputs' in response['header'] + assert 'outputs' in response['header'] + + def assert_payload_algo(self, response): + """Asserts payload has been created with + proper algorithm + """ + assert 'payload_sha512' in response['header'] + + def assert_payload(self, txn, payload): + """Asserts payload is constructed properly + """ + assert 'payload' in txn + assert payload == txn['payload'] + self.assert_payload_algo(txn) + + def assert_batcher_public_key(self, response, public_key): + """Asserts batcher public key in response + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_signer_public_key(self, response, public_key): + """Asserts that signer public key is proper + """ + assert 'signer_public_key' in response['header'] + assert public_key == response['header']['signer_public_key'] + + def assert_trace(self, response): + """Asserts whether the response has trace parameter + """ + assert 'trace' in response +# assert bool(response['trace']) + assert response['trace'] == TRACE + + def assert_check_consensus(self, response): + """Asserts response has consensus as parameter + """ + assert 'consensus' in response + assert response['consensus'] == CONSENSUS_ALGO + + def assert_state_root_hash(self, response): + """Asserts the response has state root hash + """ + assert 'state_root_hash' in response + + def assert_previous_block_id(self, response): + """Asserts that response has previous block id + """ + assert 'previous_block_id' in response + + def assert_block_num(self, response): + """Asserts that response has proper block number + """ + assert 'block_num' in response + + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_valid_head(self, response, expected): + """Asserts a response has a head string with an + expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_valid_link(self, response, expected_link): + """Asserts a response has a link url string with an + expected ending + """ + assert 'link' in response + assert response['link'] == expected_link + self.assert_valid_url(response['link'], expected_link) + + def assert_valid_url(self, url, expected_link): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_link) + + def assert_transaction_ids(self, response, expected): + """Asserts a response has a link url string with an + expected ending + """ + assert 'transaction_ids' in response['header'] + assert response['header']['transaction_ids'][0] == expected + + def assert_valid_paging(self, response, expected_link): + """Asserts a response has a paging dict with the + expected values. + """ + assert 'paging' in response + paging = response['paging'] + + if 'next' in paging and expected_link is not None: + assert 'next' in paging + assert 'next_position' in paging + self.assert_valid_url(response['link'], expected_link) + else: + assert 'next' not in paging + assert paging['start'] == None + assert paging['limit'] == None + + def assert_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an + expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_valid_data(self, response): + """Asserts a response has a data list of dicts + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + self.assert_items(data, dict) + + def assert_valid_data_list(self, response, expected_length): + """Asserts a response has a data list of dicts of an + expected length. + """ + assert len(response) == expected_length + + def assert_check_block_seq(self, blocks, expected_batches, expected_txns): + """Asserts block is constructed properly after submitting batches + """ + if not isinstance(blocks, list): + blocks = [blocks] + + consensus_algo = CONSENSUS_ALGO + + ep = list(zip(blocks, expected_batches, expected_txns)) + + for block, expected_batch, expected_txn in ep: + assert isinstance(block, dict) + assert isinstance(block['header'], dict) + assert consensus_algo == b64decode(block['header']['consensus']) + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 + assert isinstance(batches, dict) + self.assert_check_batch_seq(batches, expected_batch, expected_txn) + + def assert_check_batch_seq(self, batches, expected_batches, expected_txns, + payload, signer_key): + """Asserts batch is constructed properly + """ + + if not isinstance(batches, list): + batches = [batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + for batch, expected_batch , expected_txn in zip(batches, expected_batches , expected_txns): + assert expected_batch == batch['header_signature'] + assert isinstance(batch['header'], dict) + txns = batch['transactions'] + assert isinstance(txns, list) + assert len(txns) == 1 + self.assert_items(txns, dict) + self.assert_transaction_ids(batch, expected_txn) + self.assert_signer_public_key(batch, signer_key) + self.assert_trace(batch) + self.assert_check_transaction_seq(txns, expected_txn, + payload[0], signer_key) + + + def assert_check_transaction_seq(self, txns, expected_ids, + payload, signer_key): + """Asserts transactions are constructed properly + """ + if not isinstance(txns, list): + txns = [txns] + + if not isinstance(expected_ids, list): + expected_ids = [expected_ids] + + for txn, expected_id in zip(txns, expected_ids): + assert expected_id == txn['header_signature'] + assert isinstance(txn['header'], dict) + self.assert_payload(txn, payload) + self.assert_check_family(txn) + self.assert_check_nonce(txn) + self.assert_check_dependency(txn) + self.assert_content(txn) + self.assert_signer_public_key(txn, signer_key) + self.assert_batcher_public_key(txn, signer_key) + + def assert_check_state_seq(self, state, expected): + """Asserts state is updated properly + """ + pass + + def wait_until_status(url, status_code=200, tries=5): + """Pause the program until the given url returns the required status. + + Args: + url (str): The url to query. + status_code (int, optional): The required status code. Defaults to 200. + tries (int, optional): The number of attempts to request the url for + the given status. Defaults to 5. + Raises: + AssertionError: If the status is not recieved in the given number of + tries. + """ + attempts = tries + while attempts > 0: + try: + response = urlopen(url) + if response.getcode() == status_code: + return + + except HTTPError as err: + if err.code == status_code: + return + + LOGGER.debug('failed to read url: %s', str(err)) + except URLError as err: + LOGGER.debug('failed to read url: %s', str(err)) + + sleep_time = (tries - attempts + 1) * 2 + LOGGER.debug('Retrying in %s secs', sleep_time) + time.sleep(sleep_time) + + attempts -= 1 + + raise AssertionError( + "{} is not available within {} attempts".format(url, tries)) + + def wait_for_rest_apis(endpoints, tries=5): + """Pause the program until all the given REST API endpoints are available. + + Args: + endpoints (list of str): A list of host:port strings. + tries (int, optional): The number of attempts to request the url for + availability. + """ + for endpoint in endpoints: + http = 'http://' + url = endpoint if endpoint.startswith(http) else http + endpoint + wait_until_status( + '{}/blocks'.format(url), + status_code=200, + tries=tries) diff --git a/rest_api/tests/api_test/conftest.py b/rest_api/tests/api_test/conftest.py new file mode 100644 index 0000000000..4a68272cf6 --- /dev/null +++ b/rest_api/tests/api_test/conftest.py @@ -0,0 +1,239 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import sys +import platform +import inspect +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks,\ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp, _get_client_address, batch_count, transaction_count,\ + get_batch_statuses + +from payload import get_signer, create_intkey_transaction , create_batch + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +LIMIT = 100 + + +def pytest_addoption(parser): + """Contains parsers for pytest cli commands + """ + parser.addoption( + "--get", action="store_true", default=False, help="run get tests" + ) + + parser.addoption( + "--post", action="store_true", default=False, help="run post tests" + ) + + parser.addoption( + "--sn", action="store_true", default=False, help="run scenario based tests" + ) + + parser.addoption("--batch", action="store", metavar="NAME", + help="only run batch tests." + ) + + parser.addoption("--transaction", action="store", metavar="NAME", + help="only run transaction tests." + ) + + parser.addoption("--state", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("--block", action="store", metavar="NAME", + help="only run state tests." + ) + + parser.addoption("-E", action="store", metavar="NAME", + help="only run tests matching the environment NAME." + ) + + parser.addoption("-N", action="store", metavar="NAME", + help="only run tests matching the Number." + ) + + parser.addoption("-O", action="store", metavar="NAME", + help="only run tests matching the OS release version." + ) + + +def pytest_collection_modifyitems(config, items): + """Filters tests based on markers when parameters passed + through the cli + """ + try: + num = int(config.getoption("-N")) + except: + num = None + + selected_items = [] + deselected_items = [] + if config.getoption("--get"): + for item in items: + for marker in list(item.iter_markers()): + if marker.name == 'get': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--post"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'post': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + elif config.getoption("--sn"): + for item in items: + for marker in item.iter_markers(): + if marker.name == 'scenario': + selected_items.append(item) + else: + deselected_items.append(item) + + items[:] = selected_items[:num] + return items + else: + selected_items = items[:num] + items[:] = selected_items + return items + +@pytest.fixture(scope="session", autouse=True) +def setup(request): + """Setup method for posting batches and returning the + response + """ + data = {} + signer = get_signer() + expected_trxns = {} + expected_batches = [] + transaction_list = [] + initial_state_length = len(get_state_list()) + initial_batch_length = batch_count() + initial_transaction_length = transaction_count() + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + length_batches = len(expected_batches) + length_transactions = len(expected_trxns) + + data['expected_batch_length'] = initial_batch_length + length_batches + data['expected_trn_length'] = initial_transaction_length + length_transactions + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['payload'] = expected_trxns['payload'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + + block_list = get_blocks() + data['block_list'] = block_list + batch_list = get_batches() + data['batch_list'] = batch_list + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] + data['transaction_ids'] = transaction_ids + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] + data['batch_ids'] = batch_ids + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['state_address'] = state_addresses + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + data['address'] = address + data['limit'] = LIMIT + data['start'] = expected_batches[::-1][0] + data['family_name']=[block['batches'][0]['transactions'][0]['header']['family_name'] for block in block_list['data']] + return data \ No newline at end of file diff --git a/rest_api/tests/api_test/exceptions.py b/rest_api/tests/api_test/exceptions.py new file mode 100644 index 0000000000..af131f05c1 --- /dev/null +++ b/rest_api/tests/api_test/exceptions.py @@ -0,0 +1,17 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +class RestApiError(Exception): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..489cfc1b48 --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,179 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + + +from utils import get_batches, get_transactions, get_state_address, post_batch, get_blocks, \ + get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , wait_for_rest_apis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp + +from payload import get_signer, create_intkey_transaction , create_batch,\ + create_invalid_intkey_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + + +@pytest.fixture(scope="function") +def break_genesis(request): + """Setup Function for deleting the genesis data + and restarting the validator with no genesis + + Waits for services to start again before + sending the request again + """ + _stop_validator() + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _start_validator() + + +@pytest.fixture(scope="function") +def setup_settings_tp(request): + _stop_settings_tp() + print("settings tp is connected") + + def teardown(): + print("Connecting settings tp") + _start_settings_tp() + + request.addfinalizer(teardown) + +@pytest.fixture(scope="function") +def invalid_batch(): + """Setup method for creating invalid batches + """ + signer = get_signer() + data = {} + expected_trxns = {} + expected_batches = [] + address = _get_client_address() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_invalid_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + dict = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = dict['header_signature'] + expected_batches.append(batch_id) + + data['expected_txns'] = expected_trxns['trxn_id'][::-1] + data['expected_batches'] = expected_batches[::-1] + data['address'] = address + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + return data + + +@pytest.fixture(scope="function") +def setup_batch_multiple_transaction(): + data = {} + signer = get_signer() + transactions= [] + expected_trxns = [] + expected_batches = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + for val in range(15): + txns = create_intkey_transaction("set", [] , 50 , signer) + transactions.append(txns) + + + for txn in transactions: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxns.append(trxn_id) + + + batch_s= create_batch(transactions, signer) + post_batch_list = BatchList(batches=[batch_s]).SerializeToString() + + LOGGER.info("Submitting batches to the handlers") + + try: + response = post_batch(post_batch_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + return expected_trxns + + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_batch.py b/rest_api/tests/api_test/get/test_rest_api_get_batch.py new file mode 100644 index 0000000000..a5a02403de --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_batch.py @@ -0,0 +1,611 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + + +from fixtures import break_genesis, invalid_batch +from utils import get_batches, get_batch_id, post_batch,\ + get_batch_statuses, post_batch_statuses,\ + _create_expected_link, _get_batch_list + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.batch] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BATCH_NOT_FOUND = 71 +STATUS_ID_QUERY_INVALID = 66 +STATUS_BODY_INVALID = 43 +STATUS_WRONG_CONTENT_TYPE = 46 +WAIT = 10 + + +class TestBatchList(RestApiBaseTest): + """This class tests the batch list with different parameters + """ + def test_api_get_batch_list(self, setup): + """Tests the batch list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + paging_link = '{}/batches?head={}&start={}'.format(address,\ + expected_head, start) + + try: + response = get_batches() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = _get_batch_list(response) + + self.assert_valid_data(response) + self.assert_valid_head(response, expected_head) + self.assert_valid_data_list(batches, expected_length) + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_head(self, setup): + """Tests that GET /batches is reachable with head parameter + """ + LOGGER.info("Starting test for batch with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_head = setup['expected_head'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + def test_api_get_batch_list_bad_head(self, setup): + """Tests that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + + try: + batch_list = get_batches(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + + def test_api_get_batch_list_id(self, setup): + """Tests that GET /batches is reachable with id as parameter + """ + LOGGER.info("Starting test for batch with id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_list_bad_id(self, setup): + """Tests that GET /batches is unreachable with bad id parameter + """ + LOGGER.info("Starting test for batch with bad id parameter") + + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_list_head_and_id(self, setup): + """Tests GET /batches is reachable with head and id as parameters + """ + LOGGER.info("Starting test for batch with head and id parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + batch_ids = setup['batch_ids'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + + expected_id = batch_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/batches?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, limit, expected_id) + + try: + response = get_batches(head_id=expected_head , id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + + def test_api_get_paginated_batch_list(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_batches(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_batch_list_limit(self, setup): + """Tests GET /batches is reachable using paging parameters + """ + LOGGER.info("Starting test for batch with paging parameters") + signer_key = setup['signer_key'] + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + payload = setup['payload'] + expected_id = batch_ids[0] + start = setup['start'] + address = setup['address'] + limit = 1 + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + batches = response['data'][:-1] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response, expected_link) + + + def test_api_get_batch_list_invalid_start(self, setup): + """Tests that GET /batches is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_batches(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + + def test_api_get_batch_list_invalid_limit(self, setup): + """Tests that GET /batches is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_batches(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_batch_list_reversed(self, setup): + """verifies that GET /batches is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + setup_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + payload = setup['payload'] + start = setup['start'] + limit = setup['limit'] + address = setup['address'] + expected_batches = setup_batches[::-1] + + expected_link = '{}/batches?head={}&start={}&limit={}'.format(address,\ + expected_head, start, limit) + + reverse = True + + try: + response = get_batches(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + batches = response['data'][:-1] + + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + + self.assert_valid_head(response, expected_head) + self.assert_valid_link(response, expected_link) + self.assert_valid_paging(response) + + def test_api_get_batch_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_batches() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_batch_param_link_val(self, setup): + """Tests/ validate the batch parameters with batches, head, start and limit + """ + try: + batch_list = get_batches() + for link in batch_list: + if(link == 'link'): + assert 'head' in batch_list['link'] + assert 'start' in batch_list['link'] + assert 'limit' in batch_list['link'] + assert 'batches' in batch_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for batch and parameters are missing") + + def test_rest_api_check_batches_count(self, setup): + """Tests batches count from batch list + """ + count =0 + try: + batch_list = get_batches() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Batch count not able to collect") + +class TestBatchGet(RestApiBaseTest): + def test_api_get_batch_id(self, setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_batch_length'] + batch_ids = setup['batch_ids'] + expected_id = batch_ids[0] + payload = setup['payload'] + address = setup['address'] + + expected_link = '{}/batches/{}'.format(address, expected_batches[0]) + + try: + response = get_batch_id(expected_batches[0]) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + batches = response['data'] + + self.assert_check_batch_seq(batches, expected_batches, + expected_txns, payload, + signer_key) + self.assert_valid_link(response, expected_link) + + def test_api_get_bad_batch_id(self, setup): + """verifies that GET /batches/{bad_batch_id} + is unreachable with bad head parameter + """ + try: + batch_list = get_batches(head_id=BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + +class TestBatchStatusesList(RestApiBaseTest): + """This class tests the batch status list with different parameters + """ + def test_api_post_batch_status_15ids(self, setup): + """verifies that POST /batches_statuses with more than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + data_str=json.dumps(data['batch_ids']).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_post_batch_status_10ids(self, setup): + """verifies that POST /batches_status with less than 15 ids + """ + LOGGER.info("Starting test for batch with bad head parameter") + data = {} + values = [] + batch_ids = setup['batch_ids'] + data['batch_ids'] = batch_ids + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + for i in range(10): + values.append(data['batch_ids'][i]) + data_str=json.dumps(values).encode() + + try: + response = post_batch_statuses(data_str) + assert response['data'][0]['status'] == "COMMITTED" + except urllib.error.HTTPError as error: + assert response.code == 400 + + def test_api_get_batch_statuses(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_many_ids(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + batches = ",".join(expected_batches) + + expected_link = '{}/batch_statuses?id={}'.format(address, batches) + + try: + response = get_batch_statuses(expected_batches) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_bad_id(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses(BAD_ID) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_batch_statuses_invalid_query(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + + try: + response = get_batch_statuses() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, STATUS_ID_QUERY_INVALID) + + def test_api_get_batch_statuses_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait={}'.format(address, expected_batches[0], WAIT) + + try: + response = get_batch_statuses([expected_batches[0]],WAIT) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_invalid(self, invalid_batch): + expected_batches = invalid_batch['expected_batches'] + address = invalid_batch['address'] + status = "INVALID" + + expected_link = '{}/batch_statuses?id={}'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + + def test_api_get_batch_statuses_unknown(self, setup): + address = setup['address'] + expected_batches = setup['expected_batches'] + unknown_batch = expected_batches[0] + status = "UNKNOWN" + + expected_link = '{}/batch_statuses?id={}'.format(address, unknown_batch) + + try: + response = get_batch_statuses([unknown_batch]) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) + + def test_api_get_batch_statuses_default_wait(self,setup): + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + address = setup['address'] + status = "COMMITTED" + + expected_link = '{}/batch_statuses?id={}&wait=300'.format(address, expected_batches[0]) + + try: + response = get_batch_statuses([expected_batches[0]],300) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_status(response,status) + self.assert_valid_link(response, expected_link) \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_block.py b/rest_api/tests/api_test/get/test_rest_api_get_block.py new file mode 100644 index 0000000000..cf54b22d59 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_block.py @@ -0,0 +1,410 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_blocks, get_block_id, get_batches, get_transactions + +from base import RestApiBaseTest + + +pytestmark = [pytest.mark.get , pytest.mark.block] + + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +BLOCK_NOT_FOUND = 70 +HEAD_LENGTH = 128 +MAX_BATCH_IN_BLOCK = 100 +FAMILY_NAME = 'xo' + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +class TestBlockList(RestApiBaseTest): + """This class tests the blocks list with different parameters + """ + def test_api_get_block_list(self, setup): + """Tests the block list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_blocks() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + blocks = response['data'][:-1] + + def test_api_get_block_list_head(self, setup): + """Tests that GET /blocks is reachable with head parameter + """ + LOGGER.info("Starting test for blocks with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_blocks(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_block_list_bad_head(self, setup): + """Tests that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for blocks with bad head parameter") + + try: + batch_list = get_blocks(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_id(self, setup): + """Tests that GET /blocks is reachable with id as parameter + """ + LOGGER.info("Starting test for blocks with id parameter") + + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + try: + response = get_blocks(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + assert response['head'] == expected_head, "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + + def test_api_get_block_list_bad_id(self, setup): + """Tests that GET /blocks is unreachable with bad id parameter + """ + LOGGER.info("Starting test for blocks with bad id parameter") + bad_id = 'f' + + try: + batch_list = get_blocks(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + def test_api_get_block_list_head_and_id(self, setup): + """Tests GET /blocks is reachable with head and id as parameters + """ + LOGGER.info("Starting test for blocks with head and id parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + + + response = get_blocks(head_id=expected_head , id=expected_id) + + assert response['head'] == expected_head , "head is not matching" + assert response['paging']['start'] == None , "start parameter is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + + def test_api_get_paginated_block_list(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_start_id(self, setup): + """Tests GET /blocks is reachable using paging parameters + """ + LOGGER.info("Starting test for blocks with paging parameters") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = 1 + limit = 1 + + try: + response = get_blocks(start=start , limit=limit, id=expected_id) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_start(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_limit(self, setup): + """Tests that GET /blocks is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for batch with invalid start parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + start = -1 + + try: + response = get_blocks(start=start) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_PAGING_QUERY) + + def test_api_get_block_list_invalid_limit(self, setup): + """Tests that GET /blocks is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for batch with bad limit parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + limit = 0 + + try: + response = get_blocks(limit=limit) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_COUNT_QUERY) + + + def test_api_get_block_list_reversed(self, setup): + """verifies that GET /blocks is unreachable with bad head parameter + """ + LOGGER.info("Starting test for batch with bad head parameter") + block_ids = setup['block_ids'] + expected_head = setup['expected_head'] + expected_id = block_ids[0] + reverse = True + + try: + response = get_blocks(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['head'] == expected_head , "request is not correct" + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_block_link_val(self, setup): + """Tests/ validate the block parameters with blocks, head, start and limit + """ + try: + block_list = get_blocks() + for link in block_list: + if(link == 'link'): + assert 'head' in block_list['link'] + assert 'start' in block_list['link'] + assert 'limit' in block_list['link'] + assert 'blocks' in block_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_block_key_params(self, setup): + """Tests/ validate the block key parameters with data, head, link and paging + """ + response = get_blocks() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_batch_id_length(self, setup): + """Tests the each batch id length should be 128 hex character long + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + expected_head = batch['header']['batch_ids'][0] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Batch id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_api_get_first_block_id_length(self, setup): + """Tests the first block id length should be 128 hex character long + """ + try: + for block_list in get_blocks(): + batch_list = get_batches() + for block in batch_list: + expected_head = batch_list['head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Block id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_post_max_batches(self, setup): + """Tests that allow max post batches in block + Handled max 100 batches post in block and handle for extra batch + """ + block_list = get_blocks()['data'] + for batchcount, _ in enumerate(block_list, start=1): + if batchcount == MAX_BATCH_IN_BLOCK: + print("Max 100 Batches are present in Block") + + def test_rest_api_check_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + block_list = get_blocks()['data'] + head_signature = [block['batches'][0]['header_signature'] for block in block_list] + for i, _ in enumerate(block_list): + head_sig = json.dumps(head_signature[i]).encode('utf8') + assert head_signature[i] is not None, "Head signature is available for all batches in block" + + def test_rest_api_check_family_version(self, setup): + """Test batch transaction family version should be present + for each transaction header + """ + block_list = get_blocks()['data'] + family_version = [block['batches'][0]['transactions'][0]['header']['family_version'] for block in block_list] + for i, _ in enumerate(block_list): + assert family_version[i] is not None, "family version present for all batches in block" + + def test_rest_api_check_input_output_content(self,setup): + """Test batch input and output content should be same for + each batch and unique from other + """ + block_list = get_blocks()['data'] + txn_input = [block['batches'][0]['transactions'][0]['header']['inputs'][0] for block in block_list] + txn_output = [block['batches'][0]['transactions'][0]['header']['outputs'][0] for block in block_list] + if(txn_input == txn_output): + return True + def test_rest_api_check_signer_public_key(self, setup): + """Tests that signer public key is calculated for a block + properly + """ + block_list = get_blocks()['data'] + signer_public_key = [block['batches'][0]['header']['signer_public_key'] for block in block_list] + assert signer_public_key is not None, "signer public key is available" + + def test_rest_api_check_blocks_count(self, setup): + """Tests blocks count from block list + """ + count =0 + try: + block_list = get_blocks() + for block in enumerate(block_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("BLock count not able to collect") + + def test_rest_api_blk_content_head_signature(self, setup): + """Tests that head signature of each batch of the block + should be not none + """ + try: + block_list = get_blocks() + for batch in block_list['data']: + batch_list = get_batches() + for block in batch_list: + transaction_list = get_transactions() + for trans in transaction_list['data']: + head_signature = trans['header_signature'] + except urllib.error.HTTPError as error: + LOGGER.info("Header signature is missing in some of the batches") + assert head_signature is not None, "Head signature is available for all batches in block" + +class TestBlockGet(RestApiBaseTest): + def test_api_get_block_id(self, setup): + """Tests that GET /blocks/{block_id} is reachable + """ + LOGGER.info("Starting test for blocks/{block_id}") + expected_head = setup['expected_head'] + expected_block_id = setup['block_ids'][0] + + try: + response = get_block_id(block_id=expected_block_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + def test_api_get_bad_block_id(self, setup): + """Tests that GET /blocks/{bad_block_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for blocks/{bad_block_id}") + + try: + response = get_block_id(block_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + diff --git a/rest_api/tests/api_test/get/test_rest_api_get_peers.py b/rest_api/tests/api_test/get/test_rest_api_get_peers.py new file mode 100644 index 0000000000..55fd908aaf --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_peers.py @@ -0,0 +1,48 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_peers + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.peers] + +PEER_LIST = [] + +class TestPeerList(RestApiBaseTest): + """This class tests the peer list with different parameters + """ + def test_api_get_peer_list(self, setup): + """Tests the peer list + """ + address = setup['address'] + expected_link = '{}/peers'.format(address) + + try: + response = get_peers() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + self.assert_valid_link(response, expected_link) + \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_receipts.py b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py new file mode 100644 index 0000000000..749fdbf951 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_receipts.py @@ -0,0 +1,137 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +# +import pytest +import logging +import json +import urllib.request +import urllib.error + +from conftest import setup +from utils import get_state_list, get_reciepts, post_receipts +from base import RestApiBaseTest +from fixtures import setup_batch_multiple_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get , pytest.mark.receipts] + +RECEIPT_NOT_FOUND = 80 +RECEIPT_WRONG_CONTENT_TYPE = 81 +RECEIPT_BODY_INVALID = 82 +RECEIPT_Id_QUERYINVALID = 83 +INVALID_RESOURCE_ID = 60 + + +class TestReceiptsList(RestApiBaseTest): + """This class tests the receipt list with different parameters + """ + def test_api_get_reciept_invalid_id(self): + """Tests the reciepts after submitting invalid transaction + """ + transaction_id="s" + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_get_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the get reciepts for multiple transaction. + """ + transaction_list="" + li=setup_batch_multiple_transaction + for txn in li: + transaction_list=txn+","+transaction_list + + trans_list = str(transaction_list)[:-1] + try: + response = get_reciepts(trans_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'],reversed(li)): + assert str(res['id']) == txn + + def test_api_get_reciepts_single_transactions(self,setup): + """Tests get reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_id=str(expected_transaction)[2:-2] + try: + response = get_reciepts(transaction_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_NOT_FOUND + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_single_transactions(self,setup): + """Test post reciepts response for single transaction""" + + expected_transaction=setup['expected_txns'] + + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == INVALID_RESOURCE_ID + assert response['error']['title'] == 'Invalid Resource Id' + + def test_api_post_reciepts_invalid_transactions(self): + """test reciepts post for invalid transaction""" + + expected_transaction="few" + transaction_json=json.dumps(expected_transaction).encode() + try: + response = post_receipts(transaction_json) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + assert response['error']['code'] == RECEIPT_BODY_INVALID + assert response['error']['title'] == 'Bad Receipts Request' + + def test_api_post_reciepts_multiple_transactions(self, setup_batch_multiple_transaction): + """Test the post reciepts response for multiple transaction. + """ + + transaction_list=setup_batch_multiple_transaction + + json_list=json.dumps(transaction_list).encode() + + try: + response= post_receipts(json_list) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + response = json.loads(error.fp.read().decode('utf-8')) + + for res,txn in zip(response['data'], transaction_list): + assert str(res['id']) == txn \ No newline at end of file diff --git a/rest_api/tests/api_test/get/test_rest_api_get_state.py b/rest_api/tests/api_test/get/test_rest_api_get_state.py new file mode 100644 index 0000000000..e3a40a8005 --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_state.py @@ -0,0 +1,488 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from utils import get_state_list, get_state_address +from fixtures import invalid_batch + + +from base import RestApiBaseTest + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +pytestmark = [pytest.mark.get, pytest.mark.state] + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +BAD_ADDRESS = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +STATE_ADDRESS_LENGTH = 70 +STATE_NOT_FOUND = 75 +INVALID_STATE_ADDRESS = 62 +HEAD_LENGTH = 128 + + +class TestStateList(RestApiBaseTest): + """This class tests the state list with different parameters + """ + def test_api_get_state_list(self, setup): + """Tests the state list by submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_batches = setup['expected_batches'] + expected_txns = setup['expected_txns'] + + try: + response = get_state_list() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + + state_list = response['data'][:-1] + + self.assert_valid_head(response , expected_head) + + def test_api_get_state_list_invalid_batch(self, invalid_batch): + """Tests that transactions are submitted and committed for + each block that are created by submitting invalid intkey batches + """ + batches = invalid_batch['expected_batches'] + try: + response = get_state_list() + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_state_list_head(self, setup): + """Tests that GET /state is reachable with head parameter + """ + LOGGER.info("Starting test for state with head parameter") + expected_head = setup['expected_head'] + + try: + response = get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_head(self, setup): + """Tests that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + bad_head = 'f' + + try: + batch_list = get_state_list(head_id=bad_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + + def test_api_get_state_list_address(self, setup): + """Tests that GET /state is reachable with address parameter + """ + LOGGER.info("Starting test for state with address parameter") + expected_head = setup['expected_head'] + address = setup['state_address'][0] + + try: + response = get_state_list(address=address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + assert response['head'] == expected_head , "request is not correct" + + def test_api_get_state_list_bad_address(self, setup): + """Tests that GET /state is unreachable with bad address parameter + """ + LOGGER.info("Starting test for state with bad address parameter") + bad_address = 'f' + + try: + batch_list = get_state_list(address=bad_address) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_RESOURCE_ID) + + def test_api_get_paginated_state_list(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + def test_api_get_paginated_state_list_limit(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_paginated_state_list_start(self, setup): + """Tests GET /state is reachbale using paging parameters + """ + LOGGER.info("Starting test for state with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 1 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + + def test_api_get_state_list_bad_paging(self, setup): + """Tests GET /state is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for state with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_state_list(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + + def test_api_get_state_list_invalid_start(self, setup): + """Tests that GET /state is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for state with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_state_list(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_PAGING_QUERY) + + + def test_api_get_state_list_invalid_limit(self, setup): + """Tests that GET /state is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for state with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_state_list(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data , INVALID_COUNT_QUERY) + + def test_api_get_state_list_reversed(self, setup): + """verifies that GET /state is unreachable with bad head parameter + """ + LOGGER.info("Starting test for state with bad head parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_state_list(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_state_data_address_prefix_namespace(self, setup): + """Tests the state data address with 6 hex characters long + namespace prefix + """ + try: + for state in get_state_list()['data']: + #Access each address using namespace prefix + namespace = state['address'][:6] + res=get_state_list(address=namespace) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access related state address using namespace prefix") + + def test_api_get_state_data_head_wildcard_character(self, setup): + """Tests the state head with wildcard_character ***STL-1345*** + """ + pass +# try: +# for _ in get_state_list()['data']: +# expected_head = setup['expected_head'][:6] +# addressList = list(expected_head) +# addressList[2]='?' +# expected_head = ''.join(addressList) +# print("\nVALUE is: ", expected_head) +# res=get_state_list(head_id=expected_head) +# except urllib.error.HTTPError as error: +# LOGGER.info("Not able to access ") +# data = json.loads(error.fp.read().decode('utf-8')) +# if data: +# LOGGER.info(data['error']['title']) +# LOGGER.info(data['error']['message']) +# assert data['error']['code'] == 60 +# assert data['error']['title'] == 'Invalid Resource Id' + + + def test_api_get_state_data_head_partial_character(self, setup): + """Tests the state head with partial head address ***STL-1345*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 60 + assert data['error']['title'] == 'Invalid Resource Id' + + def test_api_get_state_data_address_partial_character(self, setup): + """Tests the state address with partial head address ***STL-1346*** + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'][:6] + res=get_state_list(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access ") + data = json.loads(error.fp.read().decode('utf-8')) + if data: + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 62 + assert data['error']['title'] == 'Invalid State Address' + + + def test_api_get_state_data_address_length(self, setup): + """Tests the state data address length is 70 hex character long + with proper prefix namespace + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + except urllib.error.HTTPError as error: + LOGGER.info("State address is not 70 character long") + assert address == STATE_ADDRESS_LENGTH + + + def test_api_get_state_data_address_with_odd_hex_value(self, setup): + """Tests the state data address fail with odd hex character + address + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + if(address%2 == 0): + pass + except urllib.error.HTTPError as error: + LOGGER.info("Odd state address is not correct") + + def test_api_get_state_data_address_with_reduced_length(self, setup): + """Tests the state data address with reduced even length hex character long + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[:-4] + get_state_list(address = nhex) + except urllib.error.HTTPError as error: + LOGGER.info("Reduced length data address failed to processed") + + + def test_api_get_state_data_address_64_Hex(self, setup): + """Tests the state data address with 64 hex give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:70] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except urllib.error.HTTPError as error: + LOGGER.info("state data address with 64 hex characters not processed ") + + + def test_api_get_state_data_address_alter_bytes(self, setup): + """Tests the state data address with alter bytes give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:8] + naddress = get_state_list(address = nhex) + addressList = list(naddress) + addressList[2]='z' + naddress = ''.join(addressList) + except urllib.error.HTTPError as error: + LOGGER.info("state data address with altered bytes not processed ") + + + def test_api_get_state_link_val(self, setup): + """Tests/ validate the state parameters with state, head, start and limit + """ + try: + state_list = get_state_list() + for link in state_list: + if(link == 'link'): + assert 'head' in state_list['link'] + assert 'start' in state_list['link'] + assert 'limit' in state_list['link'] + assert 'state' in state_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for state and parameters are missing") + + def test_api_get_state_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_state_list() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_each_state_head_length(self, setup): + """Tests the each state head length should be 128 hex character long + """ + try: + for _ in get_state_list()['data']: + expected_head = setup['expected_head'] + head_len = len(expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("State Head length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_state_count(self, setup): + """Tests state count from state list + """ + count = 0 + try: + state_list = get_state_list()['data'] + for batch in enumerate(state_list): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("State count not able to collect") + + +class TestStateGet(RestApiBaseTest): + def test_api_get_state_address(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + address = setup['state_address'][0] + try: + response = get_state_address(address=address) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + def test_api_get_bad_address(self, setup): + """Tests /state/{bad_state_address} + """ + try: + response = get_state_address(address=BAD_ADDRESS) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_STATE_ADDRESS) diff --git a/rest_api/tests/api_test/get/test_rest_api_get_transaction.py b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py new file mode 100644 index 0000000000..6480a6f94f --- /dev/null +++ b/rest_api/tests/api_test/get/test_rest_api_get_transaction.py @@ -0,0 +1,379 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error + +from fixtures import break_genesis + +from utils import get_transactions, get_transaction_id + +from base import RestApiBaseTest + +pytestmark = [pytest.mark.get , pytest.mark.transactions] + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +START = 1 +LIMIT = 1 +COUNT = 0 +BAD_HEAD = 'f' +BAD_ID = 'f' +INVALID_START = -1 +INVALID_LIMIT = 0 +INVALID_RESOURCE_ID = 60 +INVALID_PAGING_QUERY = 54 +INVALID_COUNT_QUERY = 53 +VALIDATOR_NOT_READY = 15 +TRANSACTION_NOT_FOUND = 72 +HEAD_LENGTH = 128 + + +class TestTransactionList(RestApiBaseTest): + def test_api_get_transaction_list(self, setup): + """Tests the transaction list after submitting intkey batches + """ + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions() + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is Unreachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + +# self.assert_check_transaction_seq(txns, expected_txns, +# payload, signer_key) +# self.assert_valid_head(response , expected_head) +# self.assert_valid_paging(response) + + + def test_api_get_transaction_list_head(self, setup): + """Tests that GET /transactions is reachable with head parameter + """ + LOGGER.info("Starting test for transactions with head parameter") + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + + expected_link = '{}/transactions?head={}&start={}&limit={}'.format(address,\ + expected_head, start, LIMIT) + + try: + response = get_transactions(head_id=expected_head) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_transaction_list_bad_head(self, setup): + """Tests that GET /transactions is unreachable with bad head parameter + """ + LOGGER.info("Starting test for transactions with bad head parameter") + + try: + response = get_transactions(head_id=BAD_HEAD) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_id(self, setup): + """Tests that GET /transactions is reachable with id as parameter + """ + LOGGER.info("Starting test for transactions with id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(id=expected_id) + except: + LOGGER.info("Rest Api is not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + + def test_api_get_transaction_list_bad_id(self, setup): + """Tests that GET /transactions is unreachable with bad id parameter + """ + LOGGER.info("Starting test for transactions with bad id parameter") + bad_id = 'f' + + try: + response = get_transactions(head_id=bad_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + + self.assert_valid_error(data, INVALID_RESOURCE_ID) + + def test_api_get_transaction_list_head_and_id(self, setup): + """Tests GET /transactions is reachable with head and id as parameters + """ + LOGGER.info("Starting test for transactions with head and id parameter") + + signer_key = setup['signer_key'] + expected_head = setup['expected_head'] + expected_txns = setup['expected_txns'] + expected_length = setup['expected_trn_length'] + payload = setup['payload'][0] + address = setup['address'] + start = expected_txns[::-1][0] + transaction_ids = setup['transaction_ids'] + expected_id = transaction_ids[0] + expected_length = len([expected_id]) + + expected_link = '{}/transactions?head={}&start={}&limit={}&id={}'.format(address,\ + expected_head, start, LIMIT, expected_id) + + try: + response = get_transactions(head_id=expected_head , id=expected_id) + except: + LOGGER.info("Rest Api not reachable") + + + txns = response['data'][:-1] + + self.assert_check_transaction_seq(txns, expected_txns, + payload, signer_key) + self.assert_valid_head(response , expected_head) + + def test_api_get_paginated_transaction_list(self, setup): + """Tests GET /transactions is reachbale using paging parameters + """ + LOGGER.info("Starting test for transactions with paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = 1 + limit = 1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_bad_paging(self, setup): + """Tests GET /transactions is reachbale using bad paging parameters + """ + LOGGER.info("Starting test for transactions with bad paging parameters") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + limit = -1 + + try: + response = get_transactions(start=start , limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + def test_api_get_transaction_list_invalid_start(self, setup): + """Tests that GET /transactions is unreachable with invalid start parameter + """ + LOGGER.info("Starting test for transactions with invalid start parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + start = -1 + + try: + response = get_transactions(start=start) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_PAGING_QUERY) + + def test_api_get_transaction_list_invalid_limit(self, setup): + """Tests that GET /transactions is unreachable with bad limit parameter + """ + LOGGER.info("Starting test for transactions with bad limit parameter") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + limit = 0 + + try: + response = get_transactions(limit=limit) + except urllib.error.HTTPError as error: + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + self.assert_valid_error(data, INVALID_COUNT_QUERY) + + + def test_api_get_transaction_list_reversed(self, setup): + """verifies that GET /transactions with list reversed + """ + LOGGER.info("Starting test for transactions with list reversed") + batch_ids = setup['batch_ids'] + expected_head = setup['expected_head'] + expected_id = batch_ids[0] + reverse = True + + try: + response = get_transactions(reverse=reverse) + except urllib.error.HTTPError as error: + assert response.code == 400 + + assert response['paging']['start'] == None , "request is not correct" + assert response['paging']['limit'] == None , "request is not correct" + assert bool(response['data']) == True + + def test_api_get_transactions_link_val(self, setup): + """Tests/ validate the transactions parameters with transactions, head, start and limit + """ + try: + transactions_list = get_transactions() + for link in transactions_list: + if(link == 'link'): + assert 'head' in transactions_list['link'] + assert 'start' in transactions_list['link'] + assert 'limit' in transactions_list['link'] + assert 'transactions' in transactions_list['link'] + except urllib.error.HTTPError as error: + assert response.code == 400 + LOGGER.info("Link is not proper for transactions and parameters are missing") + + def test_api_get_transactions_key_params(self, setup): + """Tests/ validate the state key parameters with data, head, link and paging + """ + response = get_transactions() + assert 'link' in response + assert 'data' in response + assert 'paging' in response + assert 'head' in response + + def test_api_get_transaction_id_length(self, setup): + """Tests the transaction id length should be 128 hex character long + """ + try: + transaction_list = get_transactions() + for trans in transaction_list['data']: + transaction_ids = trans['header_signature'] + head_len = len(transaction_ids) + except urllib.error.HTTPError as error: + LOGGER.info("Transaction id length is not 128 hex character long") + assert head_len == HEAD_LENGTH + + def test_rest_api_check_transactions_count(self, setup): + """Tests transaction count from transaction list + """ + count =0 + try: + batch_list = get_transactions() + for batch in enumerate(batch_list['data']): + count = count+1 + except urllib.error.HTTPError as error: + LOGGER.info("Transaction count not able to collect") + +class TesttransactionGet(RestApiBaseTest): + def test_api_get_transaction_id(self, setup): + """Tests that GET /transactions/{transaction_id} is reachable + """ + LOGGER.info("Starting test for transaction/{transaction_id}") + expected_head = setup['expected_head'] + expected_id = setup['transaction_ids'][0] + address = setup['address'] + expected_length = 1 + + expected_link = '{}/transactions/{}'.format(address,expected_id) + + try: + response = get_transaction_id(transaction_id=expected_id) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_link(response, expected_link) + assert bool(response['data']) == True + + def test_api_get_transaction_bad_id(self, setup): + """Tests that GET /transactions/{transaction_id} is not reachable + with bad id + """ + LOGGER.info("Starting test for transactions/{transaction_id}") + try: + response = get_transaction_id(transaction_id=BAD_ID) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api not reachable") + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, INVALID_RESOURCE_ID) + + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py new file mode 100644 index 0000000000..6a7c73281a --- /dev/null +++ b/rest_api/tests/api_test/mul/test_rest_api_mul_validator.py @@ -0,0 +1,128 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time +import paramiko +import sys +import threading +import os +import signal + + +from google.protobuf.json_format import MessageToDict + +from base import RestApiBaseTest +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus, _stop_validator\ + +from workload import Workload +from ssh import SSH +from thread import Workload_thread, SSH_thread, Consensus_Thread,\ + wait_for_event, wait_for_event_timeout + + + +logging.basicConfig(level=logging.INFO, + format='[%(levelname)s] (%(threadName)-10s) %(message)s', + ) + +WAIT_TIME = 10 +PORT =22 +USERNAME = 'test' +PASSWORD = 'aditya9971' + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.mul + + +class TestMultiple(RestApiBaseTest): + def test_rest_api_mul_val_intk(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + logging.info('Starting Test for Intkey payload') + + logging.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + logging.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) + + def test_rest_api_mul_val_Node(self): + """Tests that leaf nodes are brought up/down in a network + and checks are performed on the respective nodes + """ + leaf_nodes = ['10.223.155.134', '10.223.155.25'] + threads = [] + + workload_thread = Workload_thread() + workload_thread.setName('workload_thread') + workload_thread.start() + + consensus_thread = Consensus_Thread(leaf_nodes) + consensus_thread.setName('consensus_thread') + consensus_thread.setDaemon(True) + consensus_thread.start() + + for node in leaf_nodes: + ssh_thread = SSH_thread(node,PORT,USERNAME,PASSWORD) + ssh_thread.setName('ssh_thread') + threads.append(ssh_thread) + + for thread in threads: + thread.start() + thread.join() + + consensus_thread.join() + workload_thread.join() + + + \ No newline at end of file diff --git a/rest_api/tests/api_test/payload.py b/rest_api/tests/api_test/payload.py new file mode 100644 index 0000000000..4df8269b00 --- /dev/null +++ b/rest_api/tests/api_test/payload.py @@ -0,0 +1,273 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import base64 +import argparse +import cbor +import hashlib +import os +import time +import random +import string + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + + + +def create_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_invalid_intkey_transaction(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + INVALID_INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'invalid'.encode('utf-8')).hexdigest()[0:6] + + addr = INVALID_INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_same_transaction(verb, deps, count, signer): + name='a' + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_transaction_inv_add(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_bad_address(name) + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_intkey_transaction_inv_fam_nam(verb, deps, count, signer): + words = random_word_list(count) + name=random.choice(words) + payload = IntKeyPayload( + verb=verb,name=name,value=1) + + addr = make_intkey_address(name) + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='abcdef', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + +def make_intkey_bad_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-62:] + +def random_word(): + return ''.join([random.choice(string.ascii_letters) for _ in range(0, 6)]) + + +def random_word_list(count): + if os.path.isfile('/usr/share/dict/words'): + with open('/usr/share/dict/words', 'r') as fd: + return [x.strip() for x in fd.readlines()[0:count]] + else: + return [random_word() for _ in range(0, count)] \ No newline at end of file diff --git a/rest_api/tests/api_test/post/test_rest_api_post.py b/rest_api/tests/api_test/post/test_rest_api_post.py new file mode 100644 index 0000000000..da7bb17212 --- /dev/null +++ b/rest_api/tests/api_test/post/test_rest_api_post.py @@ -0,0 +1,462 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib + +from google.protobuf.json_format import MessageToDict + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from utils import post_batch, get_state_list , get_blocks , get_transactions, \ + get_batches , get_state_address, check_for_consensus,\ + _get_node_list, _get_node_chains + + +from payload import get_signer, create_intkey_transaction, create_batch,\ + create_intkey_same_transaction, create_intkey_transaction_inv_add,\ + create_intkey_transaction_inv_fam_nam + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +BAD_PROTOBUF = b'BAD_PROTOBUF' +EMPTY_BATCH = b'' +NO_BATCHES_SUBMITTED = 34 +BAD_PROTOBUF_SUBMITTED = 35 +BATCH_QUEUE_FULL = 31 +INVALID_BATCH = 30 +WRONG_CONTENT_TYPE = 43 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +pytestmark = pytest.mark.post + + +class TestPost(RestApiBaseTest): + def test_rest_api_post_batch_inv_fam_nam(self): + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + + LOGGER.info("Creating wrong family named intkey transactions with set operations") + txns = [ + create_intkey_transaction_inv_fam_nam("set", [], 50, signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + data = error.fp.read().decode('utf-8') + if response['data'][0]['status'] == 'PENDING': + LOGGER.info('Batch status is pending as family name is not valid') + + def test_rest_api_post_batch_inv_intkey_add(self): + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + + LOGGER.info("Creating wrong family address intkey transactions with set operations") + txns = [ + create_intkey_transaction_inv_add("set", [], 50, signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + data = error.fp.read().decode('utf-8') + if response['data'][0]['status'] == 'INVALID': + print('Batch status is invalid as family address is not valid') + LOGGER.info('Batch status is invalid as family address is not valid') + + def test_rest_api_post_batch(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey batches + with set operations + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()['data']) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + data = error.fp.read().decode('utf-8') + LOGGER.info(data) + + block_batch_ids = [block['header']['batch_ids'][0] for block in get_blocks()['data']] + state_addresses = [state['address'] for state in get_state_list()['data']] + state_head_list = [get_state_address(address)['head'] for address in state_addresses] + committed_transaction_list = get_transactions()['data'] + + if response['data'][0]['status'] == 'COMMITTED': + LOGGER.info('Batch is committed') + + for batch in expected_batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + elif response['data'][0]['status'] == 'INVALID': + LOGGER.info('Batch submission failed') + + if any(['message' in response['data'][0]['invalid_transactions'][0]]): + message = response['data'][0]['invalid_transactions'][0]['message'] + LOGGER.info(message) + + for batch in batch_ids: + if batch in block_batch_ids: + LOGGER.info("Block is created for the respective batch") + + final_state_length = len(get_state_list()['data']) + node_list = _get_node_list() + chains = _get_node_chains(node_list) + assert final_state_length == initial_state_length + len(expected_batch_ids) + assert check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) == True + + def test_rest_api_no_batches(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=EMPTY_BATCH) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, NO_BATCHES_SUBMITTED) + + def test_rest_api_bad_protobuf(self): + LOGGER.info("Starting test for batch with bad protobuf") + + try: + response = post_batch(batch=BAD_PROTOBUF) + except urllib.error.HTTPError as error: + response = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(response['error']['title']) + LOGGER.info(response['error']['message']) + + self.assert_valid_error(response, BAD_PROTOBUF_SUBMITTED) + + def test_rest_api_post_wrong_header(self,setup): + """Tests rest api by posting with wrong header + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="True") + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_post_same_txns(self, setup): + """Tests the rest-api by submitting multiple transactions with same key + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + create_intkey_same_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + assert response['data'][0]['status'] == "INVALID" + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 42 + assert e.code == 400 + + def test_rest_api_multiple_txns_batches(self, setup): + """Tests rest-api state by submitting multiple + transactions in multiple batches + """ + LOGGER.info('Starting test for batch post') + + signer = get_signer() + expected_trxn_ids = [] + expected_batch_ids = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxn_ids.append(trxn_id) + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txns], signer)] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batch_ids.append(batch_id) + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch,headers="None") + response = get_state_list() + except urllib.error.HTTPError as e: + errdata = e.file.read().decode("utf-8") + error = json.loads(errdata) + LOGGER.info(error['error']['message']) + assert (json.loads(errdata)['error']['code']) == 17 + assert e.code == 400 + final_state_length = len(get_state_list()) + assert initial_state_length == final_state_length + + def test_api_post_batch_different_signer(self, setup): + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch(batc) + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 30 + assert data['error']['title'] =='Submitted Batches Invalid' + + def test_api_post_batch_different_signer(self, setup): + signer_trans = get_signer() + intkey=create_intkey_transaction("set",[],50,signer_trans) + translist=[intkey] + signer_batch = get_signer() + batch= create_batch(translist,signer_batch) + batch_list=[BatchList(batches=[batch]).SerializeToString()] + for batc in batch_list: + try: + response = post_batch(batc) + print(response) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + assert data['error']['code'] == 30 + assert data['error']['title'] =='Submitted Batches Invalid' + + \ No newline at end of file diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini new file mode 100644 index 0000000000..a444254e4b --- /dev/null +++ b/rest_api/tests/api_test/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +addopts = -s -v --json-report --json-report-file=report.json +python_files = test_rest*.py +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_format = %(asctime)s %(levelname)s %(message)s +log_date_format = %Y-%m-%d %H:%M:%S +log_file = pytest-logs.txt +log_file_date_format = %Y-%m-%d %H:%M:%S +log_file_format = %(asctime)s %(levelname)s %(message)s +log_format = %(asctime)s %(levelname)s %(message)s diff --git a/rest_api/tests/api_test/scenario/test_rest_api_scenario.py b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py new file mode 100644 index 0000000000..bc4510c05d --- /dev/null +++ b/rest_api/tests/api_test/scenario/test_rest_api_scenario.py @@ -0,0 +1,130 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import time + +from google.protobuf.json_format import MessageToDict + +from payload import get_signer, create_intkey_transaction , create_batch +from utils import _get_client_address, _send_cmd, _get_node_list, \ + _get_node_chain, check_for_consensus + +from base import RestApiBaseTest + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) +WAIT = 300 + +WORKLOAD_TIME = 5 + +BLOCK_TO_CHECK_CONSENSUS = 1 + +INTKEY_PREFIX = '1cf126' +XO_PREFIX = '5b7349' + + +pytestmark = pytest.mark.scenario + +class TestScenario(RestApiBaseTest): + def test_rest_api_mul_val_intk_xo(self): + """Tests that transactions are submitted and committed for + each block that are created by submitting intkey and XO batches + """ + signer = get_signer() + expected_trxns = {} + expected_batches = [] + node_list = [{_get_client_address()}] + + LOGGER.info('Starting Test for Intkey and Xo as payload') + + LOGGER.info("Creating intkey batches") + + txns = [ + create_intkey_transaction("set", [] , 50 , signer), + create_intkey_transaction("set", [] , 50 , signer), + ] + + for txn in txns: + dict = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + expected_trxns['trxn_id'] = [dict['header_signature']] + expected_trxns['payload'] = [dict['payload']] + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + LOGGER.info("Creating keys for xo users") + + for username in ('aditya', 'singh'): + _send_cmd('sawtooth keygen {} --force'.format(username)) + + + LOGGER.info("Submitting xo batches to the handlers") + + + xo_cmds = ( + 'xo create game-1 --username aditya', + 'xo take game-1 1 --username singh', + 'xo take game-1 4 --username aditya', + 'xo take game-1 2 --username singh', + ) + + for cmd in xo_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + xo_cli_cmds = ( + 'xo list', + 'xo show game-1', + ) + + for cmd in xo_cli_cmds: + _send_cmd( + '{} --url {}'.format( + cmd, + _get_client_address())) + + xo_delete_cmds = ( + 'xo delete game-1 --username aditya', + ) + + for cmd in xo_delete_cmds: + _send_cmd( + '{} --url {} --wait {}'.format( + cmd, + _get_client_address(), + WAIT)) + + node_list = _get_node_list() + + chains = _get_node_chain(node_list) + check_for_consensus(chains , BLOCK_TO_CHECK_CONSENSUS) diff --git a/rest_api/tests/api_test/ssh.py b/rest_api/tests/api_test/ssh.py new file mode 100644 index 0000000000..1f01284da4 --- /dev/null +++ b/rest_api/tests/api_test/ssh.py @@ -0,0 +1,36 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import paramiko + + +class SSH(): + def do_ssh(self,hostname,port,username,password): + try: + ssh=paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect(hostname,port,username,password) + except paramiko.AuthenticationException: + print("Failed to connect to {} due to wrong username/password".format(hostname)) + exit(1) + except: + print("Failed to connect to {}".format(hostname)) + exit(2) + + command = 'ps aux | grep sawtooth' + stdin,stdout,stderr=ssh.exec_command(command) + outlines=stdout.readlines() + resp=''.join(outlines) + ssh.close() \ No newline at end of file diff --git a/rest_api/tests/api_test/thread.py b/rest_api/tests/api_test/thread.py new file mode 100644 index 0000000000..e58ed4a121 --- /dev/null +++ b/rest_api/tests/api_test/thread.py @@ -0,0 +1,116 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ +import queue +import threading +import os +import logging + + +from workload import Workload +from ssh import SSH +from utils import _get_node_chains + +logging.basicConfig(level=logging.INFO, + format='(%(threadName)-10s) %(message)s', + ) + + +def wait_for_event(e): + """Wait for the event to be set before doing anything""" + logging.debug('wait_for_event starting') + event_is_set = e.wait() + logging.debug('event set: %s', event_is_set) + + +def wait_for_event_timeout(e, t): + """Wait t seconds and then timeout""" + while not e.isSet(): + logging.debug('wait_for_event_timeout starting') + event_is_set = e.wait(t) + logging.debug('event set: %s', event_is_set) + if event_is_set: + logging.debug('processing event') + else: + logging.debug('doing other work') + + +class Workload_thread(threading.Thread): + def __init__(self): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + + def run(self): + logging.info('Starting Workload') + workload = Workload() + workload.do_workload() + return + + def stop(self): + pass + + +class SSH_thread(threading.Thread): + def __init__(self, hostname, port, username, password): + threading.Thread.__init__(self) + self.hostname = hostname + self.port = port + self.username = username + self.password = password + + def run(self): + logging.info('starting ssh thread') + logging.info('Logging into Validation Network') + self.ssh() + logging.info('Exiting ssh thread') + return + + def ssh(self): + logging.info('creating ssh object') + ssh = SSH() + logging.info('performing ssh') + ssh.do_ssh(self.hostname, self.port, self.username, self.password) + + def stop_validator(self): + loggin.info("stopping validator service") + + def start_validator(self): + loggin.info("starting validator service") + + +class Consensus_Thread(threading.Thread): + def __init__(self, nodes): + threading.Thread.__init__(self) + self.shutdown_flag = threading.Event() + self.nodes = nodes + + def run(self): + logging.info('starting consensus thread') + logging.info('calculating block list from the nodes') + chains = self.calculate_block_list() + self.compare_chains(chains) + return + + def calculate_block_list(self): + logging.info('getting block list from the nodes') + node_list = ['http://10.223.155.43:8008'] + chains = _get_node_chains(node_list) + return chains + + def compare_chains(self, chains): + logging.info('comparing chains for equality') + + + def calculate_sync_time(self): + pass \ No newline at end of file diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..4e3355c132 --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,438 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +from urllib.request import urlopen +from urllib.error import HTTPError +from urllib.error import URLError +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os +import time +import socket +import netifaces + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + + +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/blocks?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/blocks?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/blocks?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/blocks?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/blocks?id=%s'% id) + return response + if reverse: + response = query_rest_api('/blocks?reverse') + return response + else: + response = query_rest_api('/blocks') + return response + + +def get_batches(head_id=None , id=None , start=None , limit=None, reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch_id(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response + +def get_block_id(block_id): + response = query_rest_api('/blocks/%s' % block_id) + return response + +def get_transaction_id(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response + +def get_peers(): + response = query_rest_api('/peers') + return response + +def get_transactions(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/transactions?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/transactions?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/transactions?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/transactions?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/transactions?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/transactions?id=%s'% id) + return response + if reverse: + response = query_rest_api('/transactions?reverse') + return response + else: + response = query_rest_api('/transactions') + return response + +def get_state_list(head_id=None , address=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , address]): + response = query_rest_api('/state?head={}&address={}'.format(head_id , address)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if address is not None: + response = query_rest_api('/state?address=%s'% address) + return response + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state_address(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch, headers="None"): + if headers=="True": + headers = {'Content-Type': 'application/json'} + else: + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "hostname -I | awk '{print $1}'" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _start_settings_tp(): + LOGGER.info('Starting settings-tp') + cmd = " sudo -u sawtooth settings-tp -vv " + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_settings_tp(): + LOGGER.info('Stopping the settings-tp') + cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _create_genesis(): + LOGGER.info("creating the genesis data") + _create_genesis_batch() + os.chdir("/home/aditya") + cmd = "sawadm genesis config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _create_genesis_batch(): + LOGGER.info("creating the config genesis batch") + os.chdir("/home/aditya") + cmd = "sawset genesis --force" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def post_batch_statuses(batch): + headers = {'content-type': 'application/json'} + response = query_rest_api( + '/batch_statuses', data=batch, headers=headers) + return response + +def get_batch_statuses(batch_ids=None, wait=None): + try: + batches = ",".join(batch_ids) + except: + batches = None + + if batches: + if wait == 'default': + response = query_rest_api('/batch_statuses?wait&id={}'.format(batches)) + return response + elif wait: + response = query_rest_api('/batch_statuses?id={}&wait={}'.format(batches,wait)) + return response + else: + response = query_rest_api('/batch_statuses?id=%s' % batches) + return response + else: + response = query_rest_api('/batch_statuses') + return response + +def get_state_limit(limit): + response = query_rest_api('/state?limit=%s' % limit) + return response + + +def get_reciepts(reciept_id): + response = query_rest_api('/receipts?id=%s' % reciept_id) + return response + +def post_receipts(receipts): + headers = {'Content-Type': 'application/json'} + response = query_rest_api('/receipts', data=receipts, headers=headers) + return response + +def batch_count(): + batch_list = get_batches() + count = len(batch_list['data']) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + batch_list = get_batches(start=next_position) + try: + next_position = batch_list['paging']['next_position'] + except: + next_position = None + + count += len(batch_list['data']) + return count + +def transaction_count(): + transaction_list = get_transactions() + count = len(transaction_list['data']) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + while(next_position): + transaction_list = get_transactions(start=next_position) + try: + next_position = transaction_list['paging']['next_position'] + except: + next_position = None + + count += len(transaction_list['data']) + return count + +def _create_expected_link(expected_ids): + for id in expected_ids: + link = '{}/batch_statuses?id={},{}'.format(address, id) + return link + +def _get_batch_list(response): + batch_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_batches(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + batch_list += data_list + + return batch_list + + +def _get_transaction_list(response): + transaction_list = response['data'] + + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + while(next_position): + response = get_transactions(start=next_position) + data_list = response['data'] + try: + next_position = response['paging']['next_position'] + except: + next_position = None + + transaction_list += data_list + + return transaction_list diff --git a/rest_api/tests/api_test/validators_down.sh b/rest_api/tests/api_test/validators_down.sh new file mode 100644 index 0000000000..ffc1472ced --- /dev/null +++ b/rest_api/tests/api_test/validators_down.sh @@ -0,0 +1,3 @@ + #!/bin/bash + sudo kill -9 $(ps aux | grep 'sawtooth' | awk '{print $2}') + echo "$(ps aux | grep 'sawtooth')" diff --git a/rest_api/tests/api_test/validators_up.sh b/rest_api/tests/api_test/validators_up.sh new file mode 100644 index 0000000000..41529247f0 --- /dev/null +++ b/rest_api/tests/api_test/validators_up.sh @@ -0,0 +1,6 @@ + #!/bin/bash + +sudo -u sawtooth sawtooth-validator -vv & +sudo -u sawtooth settings-tp -vv & +sudo -u sawtooth intkey-tp-python -C tcp://127.0.0.1:4004 -v & +sudo -u sawtooth xo-tp-python -C tcp://127.0.0.1:4004 -v & diff --git a/rest_api/tests/api_test/workload.py b/rest_api/tests/api_test/workload.py new file mode 100644 index 0000000000..7dbfd41591 --- /dev/null +++ b/rest_api/tests/api_test/workload.py @@ -0,0 +1,29 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import subprocess +import logging + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +class Workload(): + def do_workload(self): + LOGGER.info('Starting Intkey Workload') +# cmd = "intkey workload --url 10.223.155.43:8008 --rate 1 -d 1" +# subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + def stop_workload(self): + pass \ No newline at end of file