2018-03-01 15:53:49 +01:00
|
|
|
#!/usr/bin/env python3
|
2018-04-06 12:23:22 +02:00
|
|
|
"""
|
2021-04-23 16:37:12 +02:00
|
|
|
This script compares the interfaces of two versions of Mbed TLS, looking
|
|
|
|
for backward incompatibilities between two different Git revisions within
|
|
|
|
an Mbed TLS repository. It must be run from the root of a Git working tree.
|
|
|
|
|
|
|
|
For the source (API) and runtime (ABI) interface compatibility, this script
|
|
|
|
is a small wrapper around the abi-compliance-checker and abi-dumper tools,
|
|
|
|
applying them to compare the header and library files.
|
|
|
|
|
|
|
|
For the storage format, this script compares the automatically generated
|
2022-02-22 19:02:44 +01:00
|
|
|
storage tests and the manual read tests, and complains if there is a
|
2022-03-04 19:59:55 +01:00
|
|
|
reduction in coverage. A change in test data will be signaled as a
|
2022-02-22 19:02:44 +01:00
|
|
|
coverage reduction since the old test data is no longer present. A change in
|
2022-03-04 19:59:55 +01:00
|
|
|
how test data is presented will be signaled as well; this would be a false
|
2022-02-22 19:02:44 +01:00
|
|
|
positive.
|
2021-04-23 16:37:12 +02:00
|
|
|
|
2022-02-22 19:02:44 +01:00
|
|
|
The results of the API/ABI comparison are either formatted as HTML and stored
|
|
|
|
at a configurable location, or are given as a brief list of problems.
|
|
|
|
Returns 0 on success, 1 on non-compliance, and 2 if there is an error
|
2021-04-23 16:37:12 +02:00
|
|
|
while running the script.
|
2022-03-03 10:23:09 +01:00
|
|
|
|
|
|
|
You must run this test from an Mbed TLS root.
|
2018-04-06 12:23:22 +02:00
|
|
|
"""
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2020-08-07 13:07:28 +02:00
|
|
|
# Copyright The Mbed TLS Contributors
|
2020-05-26 01:54:15 +02:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2022-02-22 19:02:44 +01:00
|
|
|
import glob
|
2018-03-01 15:53:49 +01:00
|
|
|
import os
|
2021-04-23 16:37:12 +02:00
|
|
|
import re
|
2018-03-01 15:53:49 +01:00
|
|
|
import sys
|
|
|
|
import traceback
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import tempfile
|
2019-02-25 12:35:05 +01:00
|
|
|
import fnmatch
|
2019-04-09 10:14:17 +02:00
|
|
|
from types import SimpleNamespace
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2019-02-21 14:09:26 +01:00
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2020-03-24 18:25:17 +01:00
|
|
|
class AbiChecker:
|
2019-02-25 20:36:52 +01:00
|
|
|
"""API and ABI checker."""
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2019-04-09 10:14:17 +02:00
|
|
|
def __init__(self, old_version, new_version, configuration):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Instantiate the API/ABI checker.
|
|
|
|
|
2019-03-05 17:25:38 +01:00
|
|
|
old_version: RepoVersion containing details to compare against
|
|
|
|
new_version: RepoVersion containing details to check
|
2019-04-12 16:17:02 +02:00
|
|
|
configuration.report_dir: directory for output files
|
|
|
|
configuration.keep_all_reports: if false, delete old reports
|
|
|
|
configuration.brief: if true, output shorter report to stdout
|
2022-03-04 19:59:55 +01:00
|
|
|
configuration.check_abi: if true, compare ABIs
|
2021-04-23 16:32:32 +02:00
|
|
|
configuration.check_api: if true, compare APIs
|
2021-04-23 16:37:12 +02:00
|
|
|
configuration.check_storage: if true, compare storage format tests
|
2019-04-12 16:17:02 +02:00
|
|
|
configuration.skip_file: path to file containing symbols and types to skip
|
2019-02-25 20:36:52 +01:00
|
|
|
"""
|
2018-03-01 15:53:49 +01:00
|
|
|
self.repo_path = "."
|
|
|
|
self.log = None
|
2019-04-09 10:14:17 +02:00
|
|
|
self.verbose = configuration.verbose
|
2019-03-05 17:30:39 +01:00
|
|
|
self._setup_logger()
|
2019-04-09 10:14:17 +02:00
|
|
|
self.report_dir = os.path.abspath(configuration.report_dir)
|
|
|
|
self.keep_all_reports = configuration.keep_all_reports
|
2019-04-11 16:50:41 +02:00
|
|
|
self.can_remove_report_dir = not (os.path.exists(self.report_dir) or
|
2019-04-09 10:14:17 +02:00
|
|
|
self.keep_all_reports)
|
2019-03-05 17:25:38 +01:00
|
|
|
self.old_version = old_version
|
|
|
|
self.new_version = new_version
|
2019-04-09 10:14:17 +02:00
|
|
|
self.skip_file = configuration.skip_file
|
2021-04-23 16:32:32 +02:00
|
|
|
self.check_abi = configuration.check_abi
|
|
|
|
self.check_api = configuration.check_api
|
|
|
|
if self.check_abi != self.check_api:
|
|
|
|
raise Exception('Checking API without ABI or vice versa is not supported')
|
2021-04-23 16:37:12 +02:00
|
|
|
self.check_storage_tests = configuration.check_storage
|
2019-04-09 10:14:17 +02:00
|
|
|
self.brief = configuration.brief
|
2018-03-01 15:53:49 +01:00
|
|
|
self.git_command = "git"
|
|
|
|
self.make_command = "make"
|
|
|
|
|
2019-02-25 20:36:52 +01:00
|
|
|
@staticmethod
|
|
|
|
def check_repo_path():
|
2019-07-04 18:59:36 +02:00
|
|
|
if not all(os.path.isdir(d) for d in ["include", "library", "tests"]):
|
2018-03-01 15:53:49 +01:00
|
|
|
raise Exception("Must be run from Mbed TLS root")
|
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _setup_logger(self):
|
2018-03-01 15:53:49 +01:00
|
|
|
self.log = logging.getLogger()
|
2019-03-08 12:30:04 +01:00
|
|
|
if self.verbose:
|
|
|
|
self.log.setLevel(logging.DEBUG)
|
|
|
|
else:
|
|
|
|
self.log.setLevel(logging.INFO)
|
2018-03-01 15:53:49 +01:00
|
|
|
self.log.addHandler(logging.StreamHandler())
|
|
|
|
|
2019-02-25 20:36:52 +01:00
|
|
|
@staticmethod
|
|
|
|
def check_abi_tools_are_installed():
|
2018-03-01 15:53:49 +01:00
|
|
|
for command in ["abi-dumper", "abi-compliance-checker"]:
|
|
|
|
if not shutil.which(command):
|
|
|
|
raise Exception("{} not installed, aborting".format(command))
|
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _get_clean_worktree_for_git_revision(self, version):
|
2019-03-05 17:25:38 +01:00
|
|
|
"""Make a separate worktree with version.revision checked out.
|
2019-02-25 20:36:52 +01:00
|
|
|
Do not modify the current worktree."""
|
2018-03-01 15:53:49 +01:00
|
|
|
git_worktree_path = tempfile.mkdtemp()
|
2019-03-05 17:25:38 +01:00
|
|
|
if version.repository:
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(
|
2019-02-19 17:59:33 +01:00
|
|
|
"Checking out git worktree for revision {} from {}".format(
|
2019-03-05 17:25:38 +01:00
|
|
|
version.revision, version.repository
|
2019-02-19 17:59:33 +01:00
|
|
|
)
|
|
|
|
)
|
2019-04-12 17:24:25 +02:00
|
|
|
fetch_output = subprocess.check_output(
|
2019-03-05 17:25:38 +01:00
|
|
|
[self.git_command, "fetch",
|
|
|
|
version.repository, version.revision],
|
2019-02-19 17:59:33 +01:00
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(fetch_output.decode("utf-8"))
|
2019-02-19 17:59:33 +01:00
|
|
|
worktree_rev = "FETCH_HEAD"
|
|
|
|
else:
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug("Checking out git worktree for revision {}".format(
|
2019-03-05 17:25:38 +01:00
|
|
|
version.revision
|
|
|
|
))
|
|
|
|
worktree_rev = version.revision
|
2019-04-12 17:24:25 +02:00
|
|
|
worktree_output = subprocess.check_output(
|
2019-02-19 17:59:33 +01:00
|
|
|
[self.git_command, "worktree", "add", "--detach",
|
|
|
|
git_worktree_path, worktree_rev],
|
2018-03-01 15:53:49 +01:00
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(worktree_output.decode("utf-8"))
|
2019-07-04 19:01:22 +02:00
|
|
|
version.commit = subprocess.check_output(
|
2019-07-25 15:33:33 +02:00
|
|
|
[self.git_command, "rev-parse", "HEAD"],
|
2019-07-04 19:01:22 +02:00
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
).decode("ascii").rstrip()
|
|
|
|
self.log.debug("Commit is {}".format(version.commit))
|
2018-03-01 15:53:49 +01:00
|
|
|
return git_worktree_path
|
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _update_git_submodules(self, git_worktree_path, version):
|
2019-04-05 18:06:17 +02:00
|
|
|
"""If the crypto submodule is present, initialize it.
|
|
|
|
if version.crypto_revision exists, update it to that revision,
|
|
|
|
otherwise update it to the default revision"""
|
2019-04-12 17:24:25 +02:00
|
|
|
update_output = subprocess.check_output(
|
2018-11-02 17:35:09 +01:00
|
|
|
[self.git_command, "submodule", "update", "--init", '--recursive'],
|
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-04-12 17:24:25 +02:00
|
|
|
self.log.debug(update_output.decode("utf-8"))
|
2019-03-05 16:23:25 +01:00
|
|
|
if not (os.path.exists(os.path.join(git_worktree_path, "crypto"))
|
2019-03-05 17:25:38 +01:00
|
|
|
and version.crypto_revision):
|
2019-03-05 16:23:25 +01:00
|
|
|
return
|
|
|
|
|
2019-03-05 17:25:38 +01:00
|
|
|
if version.crypto_repository:
|
2019-04-12 17:24:25 +02:00
|
|
|
fetch_output = subprocess.check_output(
|
2019-03-08 12:12:19 +01:00
|
|
|
[self.git_command, "fetch", version.crypto_repository,
|
|
|
|
version.crypto_revision],
|
2019-03-05 16:23:25 +01:00
|
|
|
cwd=os.path.join(git_worktree_path, "crypto"),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(fetch_output.decode("utf-8"))
|
2019-03-08 12:12:19 +01:00
|
|
|
crypto_rev = "FETCH_HEAD"
|
|
|
|
else:
|
|
|
|
crypto_rev = version.crypto_revision
|
|
|
|
|
2019-04-12 17:24:25 +02:00
|
|
|
checkout_output = subprocess.check_output(
|
2019-03-08 12:12:19 +01:00
|
|
|
[self.git_command, "checkout", crypto_rev],
|
|
|
|
cwd=os.path.join(git_worktree_path, "crypto"),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(checkout_output.decode("utf-8"))
|
2018-11-02 17:35:09 +01:00
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _build_shared_libraries(self, git_worktree_path, version):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Build the shared libraries in the specified worktree."""
|
2018-03-01 15:53:49 +01:00
|
|
|
my_environment = os.environ.copy()
|
|
|
|
my_environment["CFLAGS"] = "-g -Og"
|
|
|
|
my_environment["SHARED"] = "1"
|
2019-05-09 14:03:05 +02:00
|
|
|
if os.path.exists(os.path.join(git_worktree_path, "crypto")):
|
|
|
|
my_environment["USE_CRYPTO_SUBMODULE"] = "1"
|
2019-04-12 17:24:25 +02:00
|
|
|
make_output = subprocess.check_output(
|
2019-02-28 12:52:39 +01:00
|
|
|
[self.make_command, "lib"],
|
2018-03-01 15:53:49 +01:00
|
|
|
env=my_environment,
|
|
|
|
cwd=git_worktree_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(make_output.decode("utf-8"))
|
2019-04-12 16:18:02 +02:00
|
|
|
for root, _dirs, files in os.walk(git_worktree_path):
|
2019-02-25 12:35:05 +01:00
|
|
|
for file in fnmatch.filter(files, "*.so"):
|
2019-03-05 17:25:38 +01:00
|
|
|
version.modules[os.path.splitext(file)[0]] = (
|
2019-02-27 17:53:40 +01:00
|
|
|
os.path.join(root, file)
|
2019-02-25 12:35:05 +01:00
|
|
|
)
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2019-07-04 19:01:22 +02:00
|
|
|
@staticmethod
|
|
|
|
def _pretty_revision(version):
|
|
|
|
if version.revision == version.commit:
|
|
|
|
return version.revision
|
|
|
|
else:
|
|
|
|
return "{} ({})".format(version.revision, version.commit)
|
|
|
|
|
2019-04-05 18:06:17 +02:00
|
|
|
def _get_abi_dumps_from_shared_libraries(self, version):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Generate the ABI dumps for the specified git revision.
|
2019-04-05 18:06:17 +02:00
|
|
|
The shared libraries must have been built and the module paths
|
|
|
|
present in version.modules."""
|
2019-03-05 17:25:38 +01:00
|
|
|
for mbed_module, module_path in version.modules.items():
|
2018-03-01 15:53:49 +01:00
|
|
|
output_path = os.path.join(
|
2019-04-04 15:39:33 +02:00
|
|
|
self.report_dir, "{}-{}-{}.dump".format(
|
|
|
|
mbed_module, version.revision, version.version
|
2019-02-27 17:53:40 +01:00
|
|
|
)
|
2018-03-01 15:53:49 +01:00
|
|
|
)
|
|
|
|
abi_dump_command = [
|
|
|
|
"abi-dumper",
|
2019-02-25 12:35:05 +01:00
|
|
|
module_path,
|
2018-03-01 15:53:49 +01:00
|
|
|
"-o", output_path,
|
2019-07-04 19:01:22 +02:00
|
|
|
"-lver", self._pretty_revision(version),
|
2018-03-01 15:53:49 +01:00
|
|
|
]
|
2019-04-12 17:24:25 +02:00
|
|
|
abi_dump_output = subprocess.check_output(
|
2018-03-01 15:53:49 +01:00
|
|
|
abi_dump_command,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(abi_dump_output.decode("utf-8"))
|
2019-03-05 17:25:38 +01:00
|
|
|
version.abi_dumps[mbed_module] = output_path
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2021-04-23 16:37:12 +02:00
|
|
|
@staticmethod
|
|
|
|
def _normalize_storage_test_case_data(line):
|
|
|
|
"""Eliminate cosmetic or irrelevant details in storage format test cases."""
|
|
|
|
line = re.sub(r'\s+', r'', line)
|
|
|
|
return line
|
|
|
|
|
2022-02-22 19:02:44 +01:00
|
|
|
def _read_storage_tests(self,
|
|
|
|
directory,
|
|
|
|
filename,
|
|
|
|
is_generated,
|
|
|
|
storage_tests):
|
2021-04-23 16:37:12 +02:00
|
|
|
"""Record storage tests from the given file.
|
|
|
|
|
|
|
|
Populate the storage_tests dictionary with test cases read from
|
|
|
|
filename under directory.
|
|
|
|
"""
|
|
|
|
at_paragraph_start = True
|
|
|
|
description = None
|
|
|
|
full_path = os.path.join(directory, filename)
|
2022-03-04 20:02:00 +01:00
|
|
|
with open(full_path) as fd:
|
|
|
|
for line_number, line in enumerate(fd, 1):
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
at_paragraph_start = True
|
2022-02-22 19:02:44 +01:00
|
|
|
continue
|
2022-03-04 20:02:00 +01:00
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
if at_paragraph_start:
|
|
|
|
description = line.strip()
|
|
|
|
at_paragraph_start = False
|
|
|
|
continue
|
|
|
|
if line.startswith('depends_on:'):
|
|
|
|
continue
|
|
|
|
# We've reached a test case data line
|
|
|
|
test_case_data = self._normalize_storage_test_case_data(line)
|
|
|
|
if not is_generated:
|
|
|
|
# In manual test data, only look at read tests.
|
|
|
|
function_name = test_case_data.split(':', 1)[0]
|
|
|
|
if 'read' not in function_name.split('_'):
|
|
|
|
continue
|
|
|
|
metadata = SimpleNamespace(
|
|
|
|
filename=filename,
|
|
|
|
line_number=line_number,
|
|
|
|
description=description
|
|
|
|
)
|
|
|
|
storage_tests[test_case_data] = metadata
|
2021-04-23 16:37:12 +02:00
|
|
|
|
2022-02-22 19:02:44 +01:00
|
|
|
@staticmethod
|
|
|
|
def _list_generated_test_data_files(git_worktree_path):
|
|
|
|
"""List the generated test data files."""
|
|
|
|
output = subprocess.check_output(
|
|
|
|
['tests/scripts/generate_psa_tests.py', '--list'],
|
|
|
|
cwd=git_worktree_path,
|
|
|
|
).decode('ascii')
|
|
|
|
return [line for line in output.split('\n') if line]
|
|
|
|
|
2021-04-23 16:37:12 +02:00
|
|
|
def _get_storage_format_tests(self, version, git_worktree_path):
|
2022-02-22 19:02:44 +01:00
|
|
|
"""Record the storage format tests for the specified git version.
|
|
|
|
|
|
|
|
The storage format tests are the test suite data files whose name
|
|
|
|
contains "storage_format".
|
2021-04-23 16:37:12 +02:00
|
|
|
|
|
|
|
The version must be checked out at git_worktree_path.
|
2022-02-22 19:02:44 +01:00
|
|
|
|
|
|
|
This function creates or updates the generated data files.
|
2021-04-23 16:37:12 +02:00
|
|
|
"""
|
2022-02-22 19:02:44 +01:00
|
|
|
# Existing test data files. This may be missing some automatically
|
|
|
|
# generated files if they haven't been generated yet.
|
|
|
|
storage_data_files = set(glob.glob(
|
|
|
|
'tests/suites/test_suite_*storage_format*.data'
|
|
|
|
))
|
|
|
|
# Discover and (re)generate automatically generated data files.
|
|
|
|
to_be_generated = set()
|
|
|
|
for filename in self._list_generated_test_data_files(git_worktree_path):
|
|
|
|
if 'storage_format' in filename:
|
|
|
|
storage_data_files.add(filename)
|
|
|
|
to_be_generated.add(filename)
|
2021-04-23 16:37:12 +02:00
|
|
|
subprocess.check_call(
|
2022-02-22 19:02:44 +01:00
|
|
|
['tests/scripts/generate_psa_tests.py'] + sorted(to_be_generated),
|
2021-04-23 16:37:12 +02:00
|
|
|
cwd=git_worktree_path,
|
|
|
|
)
|
2022-02-22 19:02:44 +01:00
|
|
|
for test_file in sorted(storage_data_files):
|
|
|
|
self._read_storage_tests(git_worktree_path,
|
|
|
|
test_file,
|
|
|
|
test_file in to_be_generated,
|
2021-04-23 16:37:12 +02:00
|
|
|
version.storage_tests)
|
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _cleanup_worktree(self, git_worktree_path):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Remove the specified git worktree."""
|
2018-03-01 15:53:49 +01:00
|
|
|
shutil.rmtree(git_worktree_path)
|
2019-04-12 17:24:25 +02:00
|
|
|
worktree_output = subprocess.check_output(
|
2018-03-01 15:53:49 +01:00
|
|
|
[self.git_command, "worktree", "prune"],
|
|
|
|
cwd=self.repo_path,
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
self.log.debug(worktree_output.decode("utf-8"))
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _get_abi_dump_for_ref(self, version):
|
2021-04-23 16:37:12 +02:00
|
|
|
"""Generate the interface information for the specified git revision."""
|
2019-03-05 17:30:39 +01:00
|
|
|
git_worktree_path = self._get_clean_worktree_for_git_revision(version)
|
|
|
|
self._update_git_submodules(git_worktree_path, version)
|
2021-04-23 16:32:32 +02:00
|
|
|
if self.check_abi:
|
|
|
|
self._build_shared_libraries(git_worktree_path, version)
|
|
|
|
self._get_abi_dumps_from_shared_libraries(version)
|
2021-04-23 16:37:12 +02:00
|
|
|
if self.check_storage_tests:
|
|
|
|
self._get_storage_format_tests(version, git_worktree_path)
|
2019-03-05 17:30:39 +01:00
|
|
|
self._cleanup_worktree(git_worktree_path)
|
2018-03-01 15:53:49 +01:00
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _remove_children_with_tag(self, parent, tag):
|
2019-02-21 14:09:26 +01:00
|
|
|
children = parent.getchildren()
|
|
|
|
for child in children:
|
|
|
|
if child.tag == tag:
|
|
|
|
parent.remove(child)
|
|
|
|
else:
|
2019-03-05 17:30:39 +01:00
|
|
|
self._remove_children_with_tag(child, tag)
|
2019-02-21 14:09:26 +01:00
|
|
|
|
2019-03-05 17:30:39 +01:00
|
|
|
def _remove_extra_detail_from_report(self, report_root):
|
2019-02-21 14:09:26 +01:00
|
|
|
for tag in ['test_info', 'test_results', 'problem_summary',
|
2019-06-05 13:57:50 +02:00
|
|
|
'added_symbols', 'affected']:
|
2019-03-05 17:30:39 +01:00
|
|
|
self._remove_children_with_tag(report_root, tag)
|
2019-02-21 14:09:26 +01:00
|
|
|
|
|
|
|
for report in report_root:
|
|
|
|
for problems in report.getchildren()[:]:
|
|
|
|
if not problems.getchildren():
|
|
|
|
report.remove(problems)
|
|
|
|
|
2019-07-04 19:17:40 +02:00
|
|
|
def _abi_compliance_command(self, mbed_module, output_path):
|
|
|
|
"""Build the command to run to analyze the library mbed_module.
|
|
|
|
The report will be placed in output_path."""
|
|
|
|
abi_compliance_command = [
|
|
|
|
"abi-compliance-checker",
|
|
|
|
"-l", mbed_module,
|
|
|
|
"-old", self.old_version.abi_dumps[mbed_module],
|
|
|
|
"-new", self.new_version.abi_dumps[mbed_module],
|
|
|
|
"-strict",
|
|
|
|
"-report-path", output_path,
|
|
|
|
]
|
|
|
|
if self.skip_file:
|
|
|
|
abi_compliance_command += ["-skip-symbols", self.skip_file,
|
|
|
|
"-skip-types", self.skip_file]
|
|
|
|
if self.brief:
|
|
|
|
abi_compliance_command += ["-report-format", "xml",
|
|
|
|
"-stdout"]
|
|
|
|
return abi_compliance_command
|
|
|
|
|
|
|
|
def _is_library_compatible(self, mbed_module, compatibility_report):
|
|
|
|
"""Test if the library mbed_module has remained compatible.
|
|
|
|
Append a message regarding compatibility to compatibility_report."""
|
|
|
|
output_path = os.path.join(
|
|
|
|
self.report_dir, "{}-{}-{}.html".format(
|
|
|
|
mbed_module, self.old_version.revision,
|
|
|
|
self.new_version.revision
|
|
|
|
)
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
subprocess.check_output(
|
|
|
|
self._abi_compliance_command(mbed_module, output_path),
|
|
|
|
stderr=subprocess.STDOUT
|
|
|
|
)
|
|
|
|
except subprocess.CalledProcessError as err:
|
|
|
|
if err.returncode != 1:
|
|
|
|
raise err
|
|
|
|
if self.brief:
|
|
|
|
self.log.info(
|
|
|
|
"Compatibility issues found for {}".format(mbed_module)
|
|
|
|
)
|
|
|
|
report_root = ET.fromstring(err.output.decode("utf-8"))
|
|
|
|
self._remove_extra_detail_from_report(report_root)
|
|
|
|
self.log.info(ET.tostring(report_root).decode("utf-8"))
|
|
|
|
else:
|
|
|
|
self.can_remove_report_dir = False
|
|
|
|
compatibility_report.append(
|
|
|
|
"Compatibility issues found for {}, "
|
|
|
|
"for details see {}".format(mbed_module, output_path)
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
compatibility_report.append(
|
|
|
|
"No compatibility issues for {}".format(mbed_module)
|
|
|
|
)
|
|
|
|
if not (self.keep_all_reports or self.brief):
|
|
|
|
os.remove(output_path)
|
|
|
|
return True
|
|
|
|
|
2021-04-23 16:37:12 +02:00
|
|
|
@staticmethod
|
|
|
|
def _is_storage_format_compatible(old_tests, new_tests,
|
|
|
|
compatibility_report):
|
|
|
|
"""Check whether all tests present in old_tests are also in new_tests.
|
|
|
|
|
|
|
|
Append a message regarding compatibility to compatibility_report.
|
|
|
|
"""
|
|
|
|
missing = frozenset(old_tests.keys()).difference(new_tests.keys())
|
|
|
|
for test_data in sorted(missing):
|
|
|
|
metadata = old_tests[test_data]
|
|
|
|
compatibility_report.append(
|
|
|
|
'Test case from {} line {} "{}" has disappeared: {}'.format(
|
|
|
|
metadata.filename, metadata.line_number,
|
|
|
|
metadata.description, test_data
|
|
|
|
)
|
|
|
|
)
|
|
|
|
compatibility_report.append(
|
|
|
|
'FAIL: {}/{} storage format test cases have changed or disappeared.'.format(
|
|
|
|
len(missing), len(old_tests)
|
|
|
|
) if missing else
|
|
|
|
'PASS: All {} storage format test cases are preserved.'.format(
|
|
|
|
len(old_tests)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
compatibility_report.append(
|
|
|
|
'Info: number of storage format tests cases: {} -> {}.'.format(
|
|
|
|
len(old_tests), len(new_tests)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return not missing
|
|
|
|
|
2018-03-01 15:53:49 +01:00
|
|
|
def get_abi_compatibility_report(self):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Generate a report of the differences between the reference ABI
|
2019-04-05 18:06:17 +02:00
|
|
|
and the new ABI. ABI dumps from self.old_version and self.new_version
|
|
|
|
must be available."""
|
2019-07-04 19:17:40 +02:00
|
|
|
compatibility_report = ["Checking evolution from {} to {}".format(
|
2019-07-04 19:01:22 +02:00
|
|
|
self._pretty_revision(self.old_version),
|
|
|
|
self._pretty_revision(self.new_version)
|
2019-07-04 19:17:40 +02:00
|
|
|
)]
|
2018-03-01 15:53:49 +01:00
|
|
|
compliance_return_code = 0
|
2021-04-23 16:37:12 +02:00
|
|
|
|
2021-04-23 16:32:32 +02:00
|
|
|
if self.check_abi:
|
|
|
|
shared_modules = list(set(self.old_version.modules.keys()) &
|
|
|
|
set(self.new_version.modules.keys()))
|
|
|
|
for mbed_module in shared_modules:
|
|
|
|
if not self._is_library_compatible(mbed_module,
|
|
|
|
compatibility_report):
|
|
|
|
compliance_return_code = 1
|
|
|
|
|
2021-04-23 16:37:12 +02:00
|
|
|
if self.check_storage_tests:
|
|
|
|
if not self._is_storage_format_compatible(
|
|
|
|
self.old_version.storage_tests,
|
|
|
|
self.new_version.storage_tests,
|
|
|
|
compatibility_report):
|
2019-07-04 19:17:40 +02:00
|
|
|
compliance_return_code = 1
|
2021-04-23 16:37:12 +02:00
|
|
|
|
2019-05-29 12:29:08 +02:00
|
|
|
for version in [self.old_version, self.new_version]:
|
|
|
|
for mbed_module, mbed_module_dump in version.abi_dumps.items():
|
|
|
|
os.remove(mbed_module_dump)
|
2019-02-25 18:01:55 +01:00
|
|
|
if self.can_remove_report_dir:
|
2018-03-01 15:53:49 +01:00
|
|
|
os.rmdir(self.report_dir)
|
2019-07-04 19:17:40 +02:00
|
|
|
self.log.info("\n".join(compatibility_report))
|
2018-03-01 15:53:49 +01:00
|
|
|
return compliance_return_code
|
|
|
|
|
|
|
|
def check_for_abi_changes(self):
|
2019-02-25 20:36:52 +01:00
|
|
|
"""Generate a report of ABI differences
|
|
|
|
between self.old_rev and self.new_rev."""
|
2018-03-01 15:53:49 +01:00
|
|
|
self.check_repo_path()
|
2022-03-03 10:22:36 +01:00
|
|
|
if self.check_api or self.check_abi:
|
|
|
|
self.check_abi_tools_are_installed()
|
2019-03-05 17:30:39 +01:00
|
|
|
self._get_abi_dump_for_ref(self.old_version)
|
|
|
|
self._get_abi_dump_for_ref(self.new_version)
|
2018-03-01 15:53:49 +01:00
|
|
|
return self.get_abi_compatibility_report()
|
|
|
|
|
|
|
|
|
|
|
|
def run_main():
|
|
|
|
try:
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-03-03 10:23:09 +01:00
|
|
|
description=__doc__
|
2018-03-01 15:53:49 +01:00
|
|
|
)
|
2019-03-08 12:30:04 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"-v", "--verbose", action="store_true",
|
|
|
|
help="set verbosity level",
|
|
|
|
)
|
2018-03-01 15:53:49 +01:00
|
|
|
parser.add_argument(
|
2018-04-16 13:02:29 +02:00
|
|
|
"-r", "--report-dir", type=str, default="reports",
|
2018-03-01 15:53:49 +01:00
|
|
|
help="directory where reports are stored, default is reports",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2018-04-16 13:02:29 +02:00
|
|
|
"-k", "--keep-all-reports", action="store_true",
|
2018-03-01 15:53:49 +01:00
|
|
|
help="keep all reports, even if there are no compatibility issues",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2019-03-01 10:54:44 +01:00
|
|
|
"-o", "--old-rev", type=str, help="revision for old version.",
|
|
|
|
required=True,
|
2018-03-01 15:53:49 +01:00
|
|
|
)
|
2019-02-25 12:35:05 +01:00
|
|
|
parser.add_argument(
|
2019-03-01 10:54:44 +01:00
|
|
|
"-or", "--old-repo", type=str, help="repository for old version."
|
2019-02-25 12:35:05 +01:00
|
|
|
)
|
2018-03-01 15:53:49 +01:00
|
|
|
parser.add_argument(
|
2019-03-01 10:54:44 +01:00
|
|
|
"-oc", "--old-crypto-rev", type=str,
|
|
|
|
help="revision for old crypto submodule."
|
2018-03-01 15:53:49 +01:00
|
|
|
)
|
2019-02-25 12:35:05 +01:00
|
|
|
parser.add_argument(
|
2019-03-01 10:54:44 +01:00
|
|
|
"-ocr", "--old-crypto-repo", type=str,
|
|
|
|
help="repository for old crypto submodule."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-n", "--new-rev", type=str, help="revision for new version",
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-nr", "--new-repo", type=str, help="repository for new version."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-nc", "--new-crypto-rev", type=str,
|
|
|
|
help="revision for new crypto version"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-ncr", "--new-crypto-repo", type=str,
|
|
|
|
help="repository for new crypto submodule."
|
2019-02-25 12:35:05 +01:00
|
|
|
)
|
2019-02-20 16:01:56 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"-s", "--skip-file", type=str,
|
2019-07-04 19:00:31 +02:00
|
|
|
help=("path to file containing symbols and types to skip "
|
|
|
|
"(typically \"-s identifiers\" after running "
|
|
|
|
"\"tests/scripts/list-identifiers.sh --internal\")")
|
2019-02-20 16:01:56 +01:00
|
|
|
)
|
2021-04-23 16:32:32 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--check-abi",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform ABI comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-abi", action='store_false', dest='check_abi')
|
|
|
|
parser.add_argument(
|
|
|
|
"--check-api",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform API comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-api", action='store_false', dest='check_api')
|
2021-04-23 16:37:12 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--check-storage",
|
|
|
|
action='store_true', default=True,
|
|
|
|
help="Perform storage tests comparison (default: yes)"
|
|
|
|
)
|
|
|
|
parser.add_argument("--no-check-storage", action='store_false', dest='check_storage')
|
2019-02-21 14:09:26 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"-b", "--brief", action="store_true",
|
|
|
|
help="output only the list of issues to stdout, instead of a full report",
|
|
|
|
)
|
2018-03-01 15:53:49 +01:00
|
|
|
abi_args = parser.parse_args()
|
2019-04-11 16:50:41 +02:00
|
|
|
if os.path.isfile(abi_args.report_dir):
|
|
|
|
print("Error: {} is not a directory".format(abi_args.report_dir))
|
|
|
|
parser.exit()
|
2019-04-09 10:14:17 +02:00
|
|
|
old_version = SimpleNamespace(
|
|
|
|
version="old",
|
|
|
|
repository=abi_args.old_repo,
|
|
|
|
revision=abi_args.old_rev,
|
2019-07-04 19:01:22 +02:00
|
|
|
commit=None,
|
2019-04-09 10:14:17 +02:00
|
|
|
crypto_repository=abi_args.old_crypto_repo,
|
|
|
|
crypto_revision=abi_args.old_crypto_rev,
|
|
|
|
abi_dumps={},
|
2021-04-23 16:37:12 +02:00
|
|
|
storage_tests={},
|
2019-04-09 10:14:17 +02:00
|
|
|
modules={}
|
2019-04-05 18:06:17 +02:00
|
|
|
)
|
2019-04-09 10:14:17 +02:00
|
|
|
new_version = SimpleNamespace(
|
|
|
|
version="new",
|
|
|
|
repository=abi_args.new_repo,
|
|
|
|
revision=abi_args.new_rev,
|
2019-07-04 19:01:22 +02:00
|
|
|
commit=None,
|
2019-04-09 10:14:17 +02:00
|
|
|
crypto_repository=abi_args.new_crypto_repo,
|
|
|
|
crypto_revision=abi_args.new_crypto_rev,
|
|
|
|
abi_dumps={},
|
2021-04-23 16:37:12 +02:00
|
|
|
storage_tests={},
|
2019-04-09 10:14:17 +02:00
|
|
|
modules={}
|
2019-04-05 18:06:17 +02:00
|
|
|
)
|
2019-04-09 10:14:17 +02:00
|
|
|
configuration = SimpleNamespace(
|
|
|
|
verbose=abi_args.verbose,
|
|
|
|
report_dir=abi_args.report_dir,
|
|
|
|
keep_all_reports=abi_args.keep_all_reports,
|
|
|
|
brief=abi_args.brief,
|
2021-04-23 16:32:32 +02:00
|
|
|
check_abi=abi_args.check_abi,
|
|
|
|
check_api=abi_args.check_api,
|
2021-04-23 16:37:12 +02:00
|
|
|
check_storage=abi_args.check_storage,
|
2019-04-09 10:14:17 +02:00
|
|
|
skip_file=abi_args.skip_file
|
2018-03-01 15:53:49 +01:00
|
|
|
)
|
2019-04-09 10:14:17 +02:00
|
|
|
abi_check = AbiChecker(old_version, new_version, configuration)
|
2018-03-01 15:53:49 +01:00
|
|
|
return_code = abi_check.check_for_abi_changes()
|
|
|
|
sys.exit(return_code)
|
2019-02-25 21:39:42 +01:00
|
|
|
except Exception: # pylint: disable=broad-except
|
|
|
|
# Print the backtrace and exit explicitly so as to exit with
|
|
|
|
# status 2, not 1.
|
2018-03-15 11:12:06 +01:00
|
|
|
traceback.print_exc()
|
2018-03-01 15:53:49 +01:00
|
|
|
sys.exit(2)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
run_main()
|