Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions scripts/check-python-files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,14 @@ $PYTHON -m pylint framework/scripts/*.py framework/scripts/mbedtls_framework/*.p

echo
echo 'Running mypy ...'
$PYTHON -m mypy framework/scripts/*.py framework/scripts/mbedtls_framework/*.py scripts/*.py tests/scripts/*.py ||
ret=1
$PYTHON -m mypy framework/scripts/*.py framework/scripts/mbedtls_framework/*.py || {
echo >&2 "mypy reported errors in the framework"
ret=1
}

$PYTHON -m mypy scripts/*.py tests/scripts/*.py || {
echo >&2 "pylint reported errors in the parent repository"
ret=1
}

exit $ret
2 changes: 1 addition & 1 deletion scripts/check_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ class TrailingWhitespaceIssueTracker(LineIssueTracker):
"""Track lines with trailing whitespace."""

heading = "Trailing whitespace:"
suffix_exemptions = frozenset([".dsp", ".md"])
suffix_exemptions = frozenset([".diff", ".dsp", ".md", ".patch"])

def issue_with_line(self, line, _filepath, _line_number):
return line.rstrip(b"\r\n") != line.rstrip()
Expand Down
2 changes: 2 additions & 0 deletions scripts/mbedtls_framework/crypto_knowledge.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,12 +355,14 @@ def determine_head(expr: str) -> str:
'TLS12_PRF': AlgorithmCategory.KEY_DERIVATION,
'TLS12_PSK_TO_MS': AlgorithmCategory.KEY_DERIVATION,
'TLS12_ECJPAKE_TO_PMS': AlgorithmCategory.KEY_DERIVATION,
'SP800_108': AlgorithmCategory.KEY_DERIVATION,
'PBKDF': AlgorithmCategory.KEY_DERIVATION,
'ECDH': AlgorithmCategory.KEY_AGREEMENT,
'FFDH': AlgorithmCategory.KEY_AGREEMENT,
# KEY_AGREEMENT(...) is a key derivation with a key agreement component
'KEY_AGREEMENT': AlgorithmCategory.KEY_DERIVATION,
'JPAKE': AlgorithmCategory.PAKE,
'SPAKE2P': AlgorithmCategory.PAKE,
}
for x in BLOCK_MAC_MODES:
CATEGORY_FROM_HEAD[x] = AlgorithmCategory.MAC
Expand Down
33 changes: 29 additions & 4 deletions scripts/mbedtls_framework/macro_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,12 +279,27 @@ def record_algorithm_subtype(self, name: str, expansion: str) -> None:
r'(.+)')
_deprecated_definition_re = re.compile(r'\s*MBEDTLS_DEPRECATED')

# Macro that is a destructor, not a constructor (i.e. takes a thing as
# an argument and analyzes it, rather than constructing a thing).
_destructor_name_re = re.compile(r'.*(_GET_|_HAS_|_IS_)|.*_LENGTH\Z')

# Macro that converts between things, rather than building a thing from
# scratch.
_conversion_macro_names = frozenset([
'PSA_KEY_TYPE_KEY_PAIR_OF_PUBLIC_KEY',
'PSA_KEY_TYPE_PUBLIC_KEY_OF_KEY_PAIR',
'PSA_ALG_FULL_LENGTH_MAC',
'PSA_ALG_AEAD_WITH_DEFAULT_LENGTH_TAG',
'PSA_JPAKE_EXPECTED_INPUTS',
'PSA_JPAKE_EXPECTED_OUTPUTS',
])

def read_line(self, line):
"""Parse a C header line and record the PSA identifier it defines if any.
This function analyzes lines that start with "#define PSA_"
(up to non-significant whitespace) and skips all non-matching lines.
"""
# pylint: disable=too-many-branches
# pylint: disable=too-many-branches,too-many-return-statements
m = re.match(self._define_directive_re, line)
if not m:
return
Expand All @@ -297,6 +312,12 @@ def read_line(self, line):
# backward compatibility aliases that share
# numerical values with non-deprecated values.
return
if re.match(self._destructor_name_re, name):
# Not a constructor
return
if name in self._conversion_macro_names:
# Not a constructor
return
if self.is_internal_name(name):
# Macro only to build actual values
return
Expand Down Expand Up @@ -324,9 +345,13 @@ def read_line(self, line):
self.algorithms_from_hash[name] = self.algorithm_tester(name)
elif name.startswith('PSA_KEY_USAGE_') and not parameter:
self.key_usage_flags.add(name)
else:
# Other macro without parameter
elif parameter is None:
# Macro with no parameter, whose name does not start with one
# of the prefixes we look for. Just ignore it.
return
else:
raise Exception("Unsupported macro and parameter name: {}({})"
.format(name, parameter))

_nonascii_re = re.compile(rb'[^\x00-\x7f]+')
_continued_line_re = re.compile(rb'\\\r?\n\Z')
Expand Down Expand Up @@ -451,7 +476,7 @@ def get_names(self, type_word: str) -> Set[str]:
r'(PSA_((?:(?:DH|ECC|KEY)_)?[A-Z]+)_\w+)' +
r'(?:\(([^\n()]*)\))?')
# Regex of macro names to exclude.
_excluded_name_re = re.compile(r'_(?:GET|IS|OF)_|_(?:BASE|FLAG|MASK)\Z')
_excluded_name_re = re.compile(r'_(?:GET|HAS|IS|OF)_|_(?:BASE|FLAG|MASK)\Z')
# Additional excluded macros.
_excluded_names = set([
# Macros that provide an alternative way to build the same
Expand Down
189 changes: 189 additions & 0 deletions scripts/mbedtls_framework/psa_compliance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
"""Run the PSA Crypto API compliance test suite.
Clone the repo and check out the commit specified by PSA_ARCH_TEST_REPO and PSA_ARCH_TEST_REF,
then compile and run the test suite. The clone is stored at <repository root>/psa-arch-tests.
Known defects in either the test suite or mbedtls / TF-PSA-Crypto - identified by their test
number - are ignored, while unexpected failures AND successes are reported as errors, to help
keep the list of known defects as up to date as possible.
"""

# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later

import argparse
import glob
import os
import re
import shutil
import subprocess
import sys
from typing import List, Optional
from pathlib import Path

from . import build_tree

PSA_ARCH_TESTS_REPO = 'https://github.com/ARM-software/psa-arch-tests.git'

#pylint: disable=too-many-branches,too-many-statements,too-many-locals
def test_compliance(library_build_dir: str,
psa_arch_tests_ref: str,
patch_files: List[str],
expected_failures: List[int]) -> int:
"""Check out and run compliance tests.

library_build_dir: path where our library will be built.
psa_arch_tests_ref: tag or sha to use for the arch-tests.
patch: patch to apply to the arch-tests with ``patch -p1``.
expected_failures: default list of expected failures.
"""
root_dir = os.getcwd()
install_dir = Path(library_build_dir + "/install_dir").resolve()
tmp_env = os.environ
tmp_env['CC'] = 'gcc'
subprocess.check_call(['cmake', '.', '-GUnix Makefiles',
'-B' + library_build_dir,
'-DCMAKE_INSTALL_PREFIX=' + str(install_dir)],
env=tmp_env)
subprocess.check_call(['cmake', '--build', library_build_dir, '--target', 'install'])

if build_tree.is_mbedtls_3_6():
crypto_library_path = install_dir.joinpath("lib/libmbedcrypto.a")
else:
crypto_library_path = install_dir.joinpath("lib/libtfpsacrypto.a")

psa_arch_tests_dir = 'psa-arch-tests'
os.makedirs(psa_arch_tests_dir, exist_ok=True)
try:
os.chdir(psa_arch_tests_dir)

# Reuse existing local clone
subprocess.check_call(['git', 'init'])
subprocess.check_call(['git', 'fetch', PSA_ARCH_TESTS_REPO, psa_arch_tests_ref])
subprocess.check_call(['git', 'checkout', '--force', 'FETCH_HEAD'])

if patch_files:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't understand the logic behind running git reset only if we have a patch to apply. It seems to me that git reset is needed if we had a patch to apply the last time we ran this script (and we are re-using the repo), which might not be the same as we have one now. Also, I don't think calling git reset when not strictly needed does any harm. So, I'd be inclined to just call it unconditionally. Am I missing something?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wanted to minimize changes in 3.6 which doesn't need any of the new fancy stuff. Also, arguably, it makes things worse for local debugging where you might want to edit the cached psa-arch-test tree until you get it right. On the other hand, I agree with you that I would have made git reset unconditional if I was doing this from scratch. I'll defer to Bence's preference on that since he know this script's history a lot better than we do.

subprocess.check_call(['git', 'reset', '--hard'])
for patch_file in patch_files:
with open(os.path.join(root_dir, patch_file), 'rb') as patch:
subprocess.check_call(['patch', '-p1'],
stdin=patch)

build_dir = 'api-tests/build'
try:
shutil.rmtree(build_dir)
except FileNotFoundError:
pass
os.mkdir(build_dir)
os.chdir(build_dir)

#pylint: disable=bad-continuation
subprocess.check_call([
'cmake', '..',
'-GUnix Makefiles',
'-DTARGET=tgt_dev_apis_stdc',
'-DTOOLCHAIN=HOST_GCC',
'-DSUITE=CRYPTO',
'-DPSA_CRYPTO_LIB_FILENAME={}'.format(str(crypto_library_path)),
'-DPSA_INCLUDE_PATHS=' + str(install_dir.joinpath("include"))
])

subprocess.check_call(['cmake', '--build', '.'])

proc = subprocess.Popen(['./psa-arch-tests-crypto'],
bufsize=1, stdout=subprocess.PIPE, universal_newlines=True)

test_re = re.compile(
'^TEST: (?P<test_num>[0-9]*)|'
'^TEST RESULT: (?P<test_result>FAILED|PASSED)'
)
test = -1
unexpected_successes = expected_failures.copy()
expected_failures.clear()
unexpected_failures = [] # type: List[int]
if proc.stdout is None:
return 1

for line in proc.stdout:
print(line, end='')
match = test_re.match(line)
if match is not None:
groupdict = match.groupdict()
test_num = groupdict['test_num']
if test_num is not None:
test = int(test_num)
elif groupdict['test_result'] == 'FAILED':
try:
unexpected_successes.remove(test)
expected_failures.append(test)
print('Expected failure, ignoring')
except KeyError:
unexpected_failures.append(test)
print('ERROR: Unexpected failure')
elif test in unexpected_successes:
print('ERROR: Unexpected success')
proc.wait()

print()
print('***** test_psa_compliance.py report ******')
print()
print('Expected failures:', ', '.join(str(i) for i in expected_failures))
print('Unexpected failures:', ', '.join(str(i) for i in unexpected_failures))
print('Unexpected successes:', ', '.join(str(i) for i in sorted(unexpected_successes)))
print()
if unexpected_successes or unexpected_failures:
if unexpected_successes:
print('Unexpected successes encountered.')
print('Please remove the corresponding tests from '
'EXPECTED_FAILURES in tests/scripts/compliance_test.py')
print()
print('FAILED')
return 1
else:
print('SUCCESS')
return 0
finally:
os.chdir(root_dir)

def main(psa_arch_tests_ref: str,
expected_failures: Optional[List[int]] = None) -> None:
"""Command line entry point.

psa_arch_tests_ref: tag or sha to use for the arch-tests.
expected_failures: default list of expected failures.
"""
build_dir = 'out_of_source_build'
default_patch_directory = os.path.join(build_tree.guess_project_root(),
'scripts/data_files/psa-arch-tests')

# pylint: disable=invalid-name
parser = argparse.ArgumentParser()
parser.add_argument('--build-dir', nargs=1,
help='path to Mbed TLS / TF-PSA-Crypto build directory')
parser.add_argument('--expected-failures', nargs='+',
help='''set the list of test codes which are expected to fail
from the command line. If omitted the list given by
EXPECTED_FAILURES (inside the script) is used.''')
parser.add_argument('--patch-directory', nargs=1,
default=default_patch_directory,
help='Directory containing patches (*.patch) to apply to psa-arch-tests')
args = parser.parse_args()

if args.build_dir is not None:
build_dir = args.build_dir[0]

if expected_failures is None:
expected_failures = []
if args.expected_failures is not None:
expected_failures_list = [int(i) for i in args.expected_failures]
else:
expected_failures_list = expected_failures

if args.patch_directory:
patch_file_glob = os.path.join(args.patch_directory, '*.patch')
patch_files = sorted(glob.glob(patch_file_glob))
else:
patch_files = []

sys.exit(test_compliance(build_dir,
psa_arch_tests_ref,
patch_files,
expected_failures_list))
8 changes: 7 additions & 1 deletion scripts/mbedtls_framework/psa_information.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,18 @@ def remove_unwanted_macros(
"""Remove constructors that should be exckuded from systematic testing."""
# Mbed TLS does not support finite-field DSA, but 3.6 defines DSA
# identifiers for historical reasons.
# Mbed TLS and TF-PSA-Crypto 1.0 do not support SPAKE2+, although
# TF-PSA-Crypto 1.0 defines SPAKE2+ identifiers to be able to build
# the psa-arch-tests compliance test suite.
#
# Don't attempt to generate any related test case.
# The corresponding test cases would be commented out anyway,
# but for DSA, we don't have enough support in the test scripts
# but for these types, we don't have enough support in the test scripts
# to generate these test cases.
constructors.key_types.discard('PSA_KEY_TYPE_DSA_KEY_PAIR')
constructors.key_types.discard('PSA_KEY_TYPE_DSA_PUBLIC_KEY')
constructors.key_types.discard('PSA_KEY_TYPE_SPAKE2P_KEY_PAIR')
constructors.key_types.discard('PSA_KEY_TYPE_SPAKE2P_PUBLIC_KEY')

def read_psa_interface(self) -> macro_collector.PSAMacroEnumerator:
"""Return the list of known key types, algorithms, etc."""
Expand Down
Loading