diff --git a/rest_api/tests/api_test/.common.py.swp b/rest_api/tests/api_test/.common.py.swp new file mode 100644 index 0000000000..dd45bd1f26 Binary files /dev/null and b/rest_api/tests/api_test/.common.py.swp differ diff --git a/rest_api/tests/api_test/base.py b/rest_api/tests/api_test/base.py new file mode 100644 index 0000000000..67f114ce1e --- /dev/null +++ b/rest_api/tests/api_test/base.py @@ -0,0 +1,189 @@ +from base64 import b64decode + +class RestApiBaseTest(object): + """Base class for Rest Api tests that simplifies making assertions + for the test cases + """ + def assert_check_batch_nonce(self, response): + pass + + def assert_check_txn_nonce(self, txn , expected_id): + expected_id == txn['header']['nonce'] + + def assert_check_family(self, response): + assert 'family_name' in response + assert 'family_version' in response + + def assert_check_dependency(self, response): + assert 'dependencies' in response + + def assert_check_content(self, response): + assert 'inputs' in response + assert 'outputs' in response + + def assert_check_payload_algo(self ,response): + assert 'payload_sha512' in response + + def assert_check_payload(self, response): + assert 'payload' in response + assert payload == b64decode(txn['payload']) + assert self.assert_check_payload_algo() + + def assert_batcher_public_key(self, public_key , batch): + assert public_key == batch['header']['signer_public_key'] + + def assert_signer_public_key(self, signer_key , batch): + assert public_key == batch['header']['signer_public_key'] + + def aasert_check_batch_trace(self, trace): + assert bool(trace) + + def assert_check_consensus(self): + pass + + def assert_state_root_hash(self): + pass + + def assert_check_previous_block_id(self): + pass + + def assert_check_block_num(self): + pass + + def assert_items(self, items, cls): + """Asserts that all items in a collection are instances of a class + """ + for item in items: + assert isinstance(item, cls) + + def assert_valid_head(self, response, expected): + """Asserts a response has a head string with an expected value + """ + assert 'head' in response + head = response['head'] + assert isinstance(head, str) + assert head == expected + + def assert_valid_link(self, response, expected): + """Asserts a response has a link url string with an expected ending + """ + assert link in response['link'] + self.assert_valid_url(link, expected) + + def assert_valid_paging(self, js_response, pb_paging, + next_link=None, previous_link=None): + """Asserts a response has a paging dict with the expected values. + """ + assert 'paging' in js_response + js_paging = js_response['paging'] + + if pb_paging.next: + assert 'next_position' in js_paging + + if next_link is not None: + assert 'next' in js_paging + self.assert_valid_url(js_paging['next'], next_link) + else: + assert 'next' not in js_paging + + def assert_valid_error(self, response, expected_code): + """Asserts a response has only an error dict with an expected code + """ + assert 'error' in response + assert len(response) == 1 + + error = response['error'] + assert 'code' in error + assert error['code'] == expected_code + assert 'title' in error + assert isinstance(error['title'], str) + assert 'message' in error + assert isinstance(error['message'], str) + + def assert_valid_data_list(self, response, expected_length): + """Asserts a response has a data list of dicts of an expected length. + """ + assert 'data' in response + data = response['data'] + assert isinstance(data, list) + assert expected_length == len(data) + self.assert_items(data, dict) + + def assert_valid_url(self, url, expected_ending=''): + """Asserts a url is valid, and ends with the expected value + """ + assert isinstance(url, str) + assert url.startswith('http') + assert url.endswith(expected_ending) + + + def assert_check_block_seq(self, blocks, expected_blocks, expected_batches, expected_txns): + if not isinstance(blocks, list): + blocks = [blocks] + + consensus = b'Devmode' + + print(expected_blocks) + print(expected_batches) + print(expected_txns) + + ep = list(zip(blocks, expected_blocks, expected_batches, expected_txns)) + + + for block, expected_block , expected_batch, expected_txn in ep: + assert isinstance(block, dict) + assert expected_block == block['header_signature'] + assert isinstance(block['header'], dict) + assert consensus == b64decode(block['header']['consensus']) + batches = block['batches'] + assert isinstance(batches, list) + assert len(batches) == 1 +# assert isinstance(batches, dict) + self.assert_check_batch_seq(batches, expected_batch, expected_txn) + + def assert_check_batch_seq(self, batches , expected_batches , expected_txns): + if not isinstance(batches, list): + batches = [batches] + + if not isinstance(expected_batches, list): + expected_batches = [expected_batches] + + if not isinstance(expected_txns, list): + expected_txns = [expected_txns] + + + for batch, expected_batch , expected_txn in zip(batches, expected_batches , expected_txns): + print("\nAsssertion: ", expected_batch, "\nAssertion 2\n") + assert expected_batch == batch['header_signature'] +# assert isinstance(batch['header'], dict) + txns = batch['transactions'] +# assert isinstance(txns, list) +# assert len(txns) == 1 +# self.assert_items(txns, dict) + self.assert_check_transaction_seq(txns , expected_txn) + + + def assert_check_transaction_seq(self, txns , expected_ids): + if not isinstance(txns, list): + txns = [txns] + + if not isinstance(expected_ids, list): + expected_ids = [expected_ids] + + payload = None + + + for txn, expected_id in zip(txns, expected_ids): + assert expected_id == txn['header_signature'] + assert isinstance(txn['header'], dict) +# self.assert_check_payload() +# self.assert_check_txn_nonce() +# self.assert_check_family() +# self.assert_check_dependency() +# self.assert_check_content() +# self.assert_signer_public_key(signer_key, batch) +# self.assert_batcher_public_key(public_key, batch) + + def assert_check_state_seq(self, state, expected): + pass + diff --git a/rest_api/tests/api_test/common.py b/rest_api/tests/api_test/common.py new file mode 100644 index 0000000000..aad2cf92f4 --- /dev/null +++ b/rest_api/tests/api_test/common.py @@ -0,0 +1,112 @@ +# Copyright 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------------------------ + +import pytest +import logging +import json +import urllib.request +import urllib.error +import base64 +import argparse +import cbor +import sys + +from sawtooth_intkey.intkey_message_factory import IntkeyMessageFactory +from sawtooth_intkey.client_cli.intkey_workload import do_workload + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) +WAIT = 300 + + +def get_blocks(): + response = query_rest_api('/blocks') + return response['data'] + +def get_batches(): + response = query_rest_api('/batches') + return response['data'] + +def get_batch(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response['data'] + +def get_transactions(): + response = query_rest_api('/transactions') + return response['data'] + +def get_transaction(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response['data'] + +def get_peer(): + response = query_rest_api('/peers') + return response + +def post_receipts(transaction_id, header=None, resourceid=None): + headers={'content-type': 'application/json'} + response = query_rest_api('/receipts', data=transaction_id, headers=headers) + + if resourceid == "NO_RESOURCE": + #response['link']="http://localhost:8008/batch_statuses?id=d3424" + #response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + response = query_rest_api('/receipts', data=transaction_id, headers=headers) + return response['data'] + +def get_receipts(): + headers={'content-type': 'application/json'} + response = query_rest_api('/receipts', headers=headers) + return response +""" +def get_receipts(transaction_id): + headers={'content-type': 'application/json'} + response = query_rest_api('/receipts', data=transaction_id, headers=headers) + return response +""" +def get_state_list(address=None): + if address is not None: + response = (query_rest_api('/state/%s',address) or query_rest_api('/state/address=%s',address)) + return response + +def get_state(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch): + headers = {'Content-Type': 'application/octet-stream'} + response = query_rest_api( + '/batches', data=batch, headers=headers) + print(response) + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = 'http://localhost:8008' + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def make_batches(keys): + imf = IntkeyMessageFactory() + return [imf.create_batch([('set', k, 0)]) for k in keys] + +def data_gen(): + with open("source.txt", mode = "r", encoding = "utf8") as f: + return f.read() diff --git a/rest_api/tests/api_test/fixtures.py b/rest_api/tests/api_test/fixtures.py new file mode 100644 index 0000000000..1add6871ac --- /dev/null +++ b/rest_api/tests/api_test/fixtures.py @@ -0,0 +1,162 @@ +import pytest +import logging +import urllib +import json +import os + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.json_format import MessageToDict + + +from utils import get_batches, get_transactions , get_state , post_batch, get_signer , get_blocks , create_batch, \ + create_intkey_transaction , get_state_list , _delete_genesis , _start_validator, \ + _stop_validator , _create_genesis , wait_for_rest_apis , _get_client_address, \ + _stop_settings_tp, _start_settings_tp , create_invalid_intkey_transaction + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + + +@pytest.fixture(scope="session") +def setup(request): + """Setup method for posting batches and returning the + response + """ + data = {} + signer = get_signer() + expected_trxns = [] + expected_batches = [] + initial_state_length = len(get_state_list()) + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_intkey_transaction("set", 'a', 0, [], signer) + ] + + for txn in txns: + data = MessageToDict( + txn, + including_default_value_fields=True, + preserving_proto_field_name=True) + + trxn_id = data['header_signature'] + expected_trxns.append(trxn_id) + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + for batch in batches: + data = MessageToDict( + batch, + including_default_value_fields=True, + preserving_proto_field_name=True) + + batch_id = data['header_signature'] + expected_batches.append(batch_id) + + data['expected_txns'] = expected_trxns[::-1] + data['expected_batches'] = expected_batches[::-1] + data['signer_key'] = signer.get_public_key().as_hex() + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + LOGGER.info("Submitting batches to the handlers") + + for batch in post_batch_list: + try: + response = post_batch(batch) + except urllib.error.HTTPError as error: + LOGGER.info("Rest Api is not reachable") + data = json.loads(error.fp.read().decode('utf-8')) + LOGGER.info(data['error']['title']) + LOGGER.info(data['error']['message']) + + block_list = get_blocks() + data['block_list'] = block_list + batch_list = get_batches() + data['batch_list'] = batch_list + transaction_list = get_transactions() + data['transaction_list'] = transaction_list + transaction_ids = [trans['header_signature'] for trans in transaction_list['data']] + data['transaction_ids'] = transaction_ids + block_ids = [block['header_signature'] for block in block_list['data']] + data['block_ids'] = block_ids[:-1] + batch_ids = [block['header']['batch_ids'][0] for block in block_list['data']] + data['batch_ids'] = batch_ids + expected_head = block_ids[0] + data['expected_head'] = expected_head + state_addresses = [state['address'] for state in get_state_list()['data']] + data['address'] = state_addresses + state_head_list = [get_state(address)['head'] for address in state_addresses] + data['state_head'] = state_head_list + return data + + +@pytest.fixture(scope="function") +def break_genesis(request): + """Setup Function for deleting the genesis data + and restarting the validator with no genesis + + Waits for services to start again before + sending the request again + """ + _stop_validator() + LOGGER.info("Deleting the genesis data") + _delete_genesis() + _start_validator() + + +@pytest.fixture(scope="function") +def setup_settings_tp(request): + _stop_settings_tp() + print("settings tp is connected") + + def teardown(): + print("Connecting settings tp") + _start_settings_tp() + + request.addfinalizer(teardown) + +@pytest.fixture(scope="function") +def invalid_batch(): + """Setup method for creating invalid batches + """ + signer = get_signer() + + LOGGER.info("Creating intkey transactions with set operations") + + txns = [ + create_invalid_intkey_transaction("set", 'a', 0, [], signer), + ] + + + LOGGER.info("Creating batches for transactions 1trn/batch") + + batches = [create_batch([txn], signer) for txn in txns] + + post_batch_list = [BatchList(batches=[batch]).SerializeToString() for batch in batches] + + return post_batch_list + + + + diff --git a/rest_api/tests/api_test/get_state_api.py b/rest_api/tests/api_test/get_state_api.py new file mode 100644 index 0000000000..46eff98b9d --- /dev/null +++ b/rest_api/tests/api_test/get_state_api.py @@ -0,0 +1,102 @@ +import pytest +import logging +import json +import urllib.request +import urllib.error + +from fixtures import setup +from utils import get_state_list + +from base import RestApiBaseTest +state_address = 70 + +class TestStateList(RestApiBaseTest): + """This class tests the state list with different parameters + """ + def test_api_get_state_data_address_prefix_namespace(self, setup): + """Tests the state data address with 6 hex characters long + namespace prefix + """ + try: + for state in get_state_list()['data']: + #Access each address using namespace prefix + namespace = state['address'][:6] + get_state_list(address=namespace) + except urllib.error.HTTPError as error: + LOGGER.info("Not able to access related state address using namespace prefix") + + + def test_api_get_state_data_address_length(self, setup): + """Tests the state data address length is 70 hex character long + with proper prefix namespace + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + except urllib.error.HTTPError as error: + LOGGER.info("State address is not 70 character long") + assert address == state_address + + + def test_api_get_state_data_address_with_odd_hex_value(self, setup): + """Tests the state data address fail with odd hex character + address + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = len(response['data'][0]['address']) + if(address%2 == 0): + pass + except urllib.error.HTTPError as error: + LOGGER.info("Odd state address is not correct") + + def test_api_get_state_data_address_with_reduced_length(self, setup): + """Tests the state data address with reduced even length hex character long + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[:-4] + get_state_list(address = nhex) + except urllib.error.HTTPError as error: + LOGGER.info("Reduced length data address failed to processed") + + + def test_api_get_state_data_address_64_Hex(self, setup): + """Tests the state data address with 64 hex give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:70] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except urllib.error.HTTPError as error: + LOGGER.info("state data address with 64 hex characters not processed ") + + + def test_api_get_state_data_address_alter_bytes(self, setup): + """Tests the state data address with alter bytes give empty data + """ + try: + response = get_state_list() + for state in get_state_list()['data']: + #Access each address using of state + address = response['data'][0]['address'] + nhex = address[6:8] + naddress = get_state_list(address = nhex) + assert naddress['data'] == [] + except urllib.error.HTTPError as error: + LOGGER.info("state data address with altered bytes not processed ") + + + + diff --git a/rest_api/tests/api_test/pytest.ini b/rest_api/tests/api_test/pytest.ini new file mode 100644 index 0000000000..4aba5d279a --- /dev/null +++ b/rest_api/tests/api_test/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +python_files = api_*.py diff --git a/rest_api/tests/api_test/source.txt b/rest_api/tests/api_test/source.txt new file mode 100644 index 0000000000..acbe86c7c8 --- /dev/null +++ b/rest_api/tests/api_test/source.txt @@ -0,0 +1 @@ +abcd diff --git a/rest_api/tests/api_test/utils.py b/rest_api/tests/api_test/utils.py new file mode 100644 index 0000000000..192fd55c2a --- /dev/null +++ b/rest_api/tests/api_test/utils.py @@ -0,0 +1,491 @@ +import pytest +import logging +import json +import urllib.request +import urllib.error +from urllib.request import urlopen +from urllib.error import HTTPError +from urllib.error import URLError +import base64 +import argparse +import cbor +import subprocess +import shlex +import requests +import hashlib +import os +import time + + +from sawtooth_signing import create_context +from sawtooth_signing import CryptoFactory +from sawtooth_signing import ParseError +from sawtooth_signing.secp256k1 import Secp256k1PrivateKey + +from sawtooth_rest_api.protobuf.validator_pb2 import Message +from sawtooth_rest_api.protobuf import client_batch_submit_pb2 +from sawtooth_rest_api.protobuf import client_batch_pb2 +from sawtooth_rest_api.protobuf import client_list_control_pb2 + +from sawtooth_rest_api.protobuf.batch_pb2 import Batch +from sawtooth_rest_api.protobuf.batch_pb2 import BatchList +from sawtooth_rest_api.protobuf.batch_pb2 import BatchHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import TransactionHeader +from sawtooth_rest_api.protobuf.transaction_pb2 import Transaction + +from google.protobuf.message import DecodeError +from google.protobuf.json_format import MessageToDict + +INTKEY_ADDRESS_PREFIX = hashlib.sha512( + 'intkey'.encode('utf-8')).hexdigest()[0:6] + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.INFO) + +WAIT = 300 + +def get_blocks(head_id=None , id=None , start=None , limit=None , reverse=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/blocks?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/blocks?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/blocks?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/blocks?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/blocks?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/blocks?id=%s'% id) + return response + if reverse: + response = query_rest_api('/blocks?reverse') + return response + else: + response = query_rest_api('/blocks') + return response + + +def get_batches(head_id=None , id=None , start=None , limit=None , reverse=None, count=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/batches?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/batches?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/batches?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/batches?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/batches?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/batches?id=%s'% id) + return response + if count is not None: + response = query_rest_api('/batches?count=%s'% count) + return response + if reverse: + response = query_rest_api('/batches?reverse') + return response + else: + response = query_rest_api('/batches') + return response + +def get_batch(batch_id): + response = query_rest_api('/batches/%s' % batch_id) + return response + +def get_transactions(head_id=None , id=None , start=None , limit=None , reverse=None, count=None): + if all(v is not None for v in [head_id , id]): + response = query_rest_api('/transactions?head={}&id={}'.format(head_id , id)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/transactions?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/transactions?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/transactions?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/transactions?head=%s'% head_id) + return response + if id is not None: + response = query_rest_api('/transactions?id=%s'% id) + return response + if count is not None: + response = query_rest_api('/transactions?count=%s'% count) + return response + if reverse: + response = query_rest_api('/transactions?reverse') + return response + else: + response = query_rest_api('/transactions') + return response + +def get_transaction(transaction_id): + response = query_rest_api('/transactions/%s' % transaction_id) + return response['data'] + +def get_state_list(head_id=None , address=None , start=None , limit=None , reverse=None, count=None): + if all(v is not None for v in [head_id , address]): + response = query_rest_api('/state?head={}&address={}'.format(head_id , address)) + return response + if all(v is not None for v in [start , limit]): + response = query_rest_api('/state?start={}&limit={}'.format(start , limit)) + return response + if limit is not None: + response = query_rest_api('/state?limit=%s'% limit) + return response + if start is not None: + response = query_rest_api('/state?start=%s'% start) + return response + if head_id is not None: + response = query_rest_api('/state?head=%s'% head_id) + return response + if address is not None: + response = query_rest_api('/state?address=%s'% address) + return response + if count is not None: + response = query_rest_api('/state?count=%s'% count) + return response + + if reverse: + response = query_rest_api('/state?reverse') + return response + else: + response = query_rest_api('/state') + return response + +def get_state(address): + response = query_rest_api('/state/%s' % address) + return response + +def post_batch(batch): + headers = {'Content-Type': 'application/octet-stream'} + + response = query_rest_api( + '/batches', data=batch, headers=headers) + + response = submit_request('{}&wait={}'.format(response['link'], WAIT)) + return response + +def query_rest_api(suffix='', data=None, headers=None): + if headers is None: + headers = {} + url = _get_client_address() + suffix + return submit_request(urllib.request.Request(url, data, headers)) + +def submit_request(request): + response = urllib.request.urlopen(request).read().decode('utf-8') + return json.loads(response) + +def _delete_genesis(): + folder = '/var/lib/sawtooth' + for the_file in os.listdir(folder): + file_path = os.path.join(folder, the_file) + try: + if os.path.isfile(file_path): + os.unlink(file_path) + except Exception as e: + print(e) + +def _get_node_chain(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def _get_node_list(): + client_address = _get_client_address() + node_list = [_make_http_address(peer) for peer in _get_peers_list(client_address)] + node_list.append(_get_client_address()) + return node_list + + +def _get_peers_list(rest_client, fmt='json'): + cmd_output = _run_peer_command( + 'sawtooth peer list --url {} --format {}'.format( + rest_client, + fmt)) + + if fmt == 'json': + parsed = json.loads(cmd_output) + + elif fmt == 'csv': + parsed = cmd_output.split(',') + + return set(parsed) + +def _get_node_chains(node_list): + chain_list = [] + for node in node_list: + try: + result = requests.get(node + "/blocks").json() + chain_list.append(result['data']) + except: + LOGGER.warning("Couldn't connect to %s REST API", node) + return chain_list + +def check_for_consensus(chains , block_num): + LOGGER.info("Checking Consensus on block number %s" , block_num) + blocks = [] + for chain in chains: + if chain is not None: + block = chain[-(block_num + 1)] + blocks.append(block) + else: + return False + block0 = blocks[0] + for block in blocks[1:]: + if block0["header_signature"] != block["header_signature"]: + LOGGER.error("Validators not in consensus on block %s", block_num) + LOGGER.error("BLOCK DUMP: %s", blocks) + return False + else: + LOGGER.info('Validators in Consensus on block number %s' , block_num) + return True + +def _run_peer_command(command): + return subprocess.check_output( + shlex.split(command) + ).decode().strip().replace("'", '"') + +def _send_cmd(cmd_str): + LOGGER.info('Sending %s', cmd_str) + + subprocess.run( + shlex.split(cmd_str), + check=True) + +def _make_http_address(node_number): + node = node_number.replace('tcp' , 'http') + node_number = node.replace('8800' , '8008') + return node_number + +def _get_client_address(): + command = "ifconfig lo | grep 'inet addr' | cut -d ':' -f 2 | cut -d ' ' -f 1" + node_ip = subprocess.check_output(command , shell=True).decode().strip().replace("'", '"') + return 'http://' + node_ip + ':8008' + +def _start_validator(): + LOGGER.info('Starting the validator') + cmd = "sudo -u sawtooth sawtooth-validator -vv" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_validator(): + LOGGER.info('Stopping the validator') + cmd = "sudo kill -9 $(ps aux | grep 'sawtooth-validator' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _start_settings_tp(): + LOGGER.info('Starting settings-tp') + cmd = " sudo -u sawtooth settings-tp -vv " + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def _stop_settings_tp(): + LOGGER.info('Stopping the settings-tp') + cmd = "sudo kill -9 $(ps aux | grep 'settings-tp' | awk '{print $2}')" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + +def make_intkey_address(name): + return INTKEY_ADDRESS_PREFIX + hashlib.sha512( + name.encode('utf-8')).hexdigest()[-64:] + + +class IntKeyPayload(object): + def __init__(self, verb, name, value): + self._verb = verb + self._name = name + self._value = value + + self._cbor = None + self._sha512 = None + + def to_hash(self): + return { + 'Verb': self._verb, + 'Name': self._name, + 'Value': self._value + } + + def to_cbor(self): + if self._cbor is None: + self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) + return self._cbor + + def sha512(self): + if self._sha512 is None: + self._sha512 = hashlib.sha512(self.to_cbor()).hexdigest() + return self._sha512 + + +def create_intkey_transaction(verb, name, value, deps, signer): + payload = IntKeyPayload( + verb=verb, name=name, value=value) + + # The prefix should eventually be looked up from the + # validator's namespace registry. + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key=signer.get_public_key().as_hex(), + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_invalid_intkey_transaction(verb, name, value, deps, signer): + payload = IntKeyPayload( + verb=verb, name=name, value=value) + + # The prefix should eventually be looked up from the + # validator's namespace registry. + addr = make_intkey_address(name) + + header = TransactionHeader( + signer_public_key='signer', + family_name='intkey', + family_version='1.0', + inputs=[addr], + outputs=[addr], + dependencies=deps, + payload_sha512=payload.sha512(), + batcher_public_key=signer.get_public_key().as_hex()) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + transaction = Transaction( + header=header_bytes, + payload=payload.to_cbor(), + header_signature=signature) + + return transaction + + +def create_batch(transactions, signer): + transaction_signatures = [t.header_signature for t in transactions] + + header = BatchHeader( + signer_public_key=signer.get_public_key().as_hex(), + transaction_ids=transaction_signatures) + + header_bytes = header.SerializeToString() + + signature = signer.sign(header_bytes) + + batch = Batch( + header=header_bytes, + transactions=transactions, + header_signature=signature) + + return batch + +def get_signer(): + context = create_context('secp256k1') + private_key = context.new_random_private_key() + crypto_factory = CryptoFactory(context) + return crypto_factory.new_signer(private_key) + +def _create_genesis(): + LOGGER.info("creating the genesis data") + _create_genesis_batch() + os.chdir("/home/aditya") + cmd = "sawadm genesis config-genesis.batch" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + +def _create_genesis_batch(): + LOGGER.info("creating the config genesis batch") + os.chdir("/home/aditya") + cmd = "sawset genesis --force" + subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) + + + +def wait_until_status(url, status_code=200, tries=5): + """Pause the program until the given url returns the required status. + + Args: + url (str): The url to query. + status_code (int, optional): The required status code. Defaults to 200. + tries (int, optional): The number of attempts to request the url for + the given status. Defaults to 5. + Raises: + AssertionError: If the status is not recieved in the given number of + tries. + """ + attempts = tries + while attempts > 0: + try: + response = urlopen(url) + if response.getcode() == status_code: + return + + except HTTPError as err: + if err.code == status_code: + return + + LOGGER.debug('failed to read url: %s', str(err)) + except URLError as err: + LOGGER.debug('failed to read url: %s', str(err)) + + sleep_time = (tries - attempts + 1) * 2 + LOGGER.debug('Retrying in %s secs', sleep_time) + time.sleep(sleep_time) + + attempts -= 1 + + raise AssertionError( + "{} is not available within {} attempts".format(url, tries)) + + +def wait_for_rest_apis(endpoints, tries=5): + """Pause the program until all the given REST API endpoints are available. + + Args: + endpoints (list of str): A list of host:port strings. + tries (int, optional): The number of attempts to request the url for + availability. + """ + for endpoint in endpoints: + http = 'http://' + url = endpoint if endpoint.startswith(http) else http + endpoint + wait_until_status( + '{}/blocks'.format(url), + status_code=200, + tries=tries) + +