2018-02-28 11:02:55 +01:00
|
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
2020-08-07 13:07:28 +02:00
|
|
|
|
# Copyright The Mbed TLS Contributors
|
2023-11-02 20:47:20 +01:00
|
|
|
|
# SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2019-07-04 19:31:02 +02:00
|
|
|
|
"""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
This script checks the current state of the source code for minor issues,
|
|
|
|
|
including incorrect file permissions, presence of tabs, non-Unix line endings,
|
2019-07-04 19:31:33 +02:00
|
|
|
|
trailing whitespace, and presence of UTF-8 BOM.
|
2018-02-28 11:02:55 +01:00
|
|
|
|
Note: requires python 3, must be run from Mbed TLS root.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
import codecs
|
2023-11-03 14:13:55 +01:00
|
|
|
|
import inspect
|
2023-11-03 13:55:00 +01:00
|
|
|
|
import logging
|
|
|
|
|
import os
|
2020-05-10 16:57:16 +02:00
|
|
|
|
import re
|
2020-05-10 17:18:06 +02:00
|
|
|
|
import subprocess
|
2018-02-28 11:02:55 +01:00
|
|
|
|
import sys
|
2020-08-11 15:11:50 +02:00
|
|
|
|
try:
|
|
|
|
|
from typing import FrozenSet, Optional, Pattern # pylint: disable=unused-import
|
|
|
|
|
except ImportError:
|
|
|
|
|
pass
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2022-09-18 21:17:09 +02:00
|
|
|
|
import scripts_path # pylint: disable=unused-import
|
|
|
|
|
from mbedtls_dev import build_tree
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-03-24 18:25:17 +01:00
|
|
|
|
class FileIssueTracker:
|
2019-02-25 20:59:05 +01:00
|
|
|
|
"""Base class for file-wide issue tracking.
|
|
|
|
|
|
|
|
|
|
To implement a checker that processes a file as a whole, inherit from
|
2019-02-25 21:10:04 +01:00
|
|
|
|
this class and implement `check_file_for_issue` and define ``heading``.
|
|
|
|
|
|
2020-05-10 16:52:44 +02:00
|
|
|
|
``suffix_exemptions``: files whose name ends with a string in this set
|
2019-02-25 21:10:04 +01:00
|
|
|
|
will not be checked.
|
|
|
|
|
|
2020-05-10 16:57:16 +02:00
|
|
|
|
``path_exemptions``: files whose path (relative to the root of the source
|
|
|
|
|
tree) matches this regular expression will not be checked. This can be
|
|
|
|
|
``None`` to match no path. Paths are normalized and converted to ``/``
|
|
|
|
|
separators before matching.
|
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
``heading``: human-readable description of the issue
|
2019-02-25 20:59:05 +01:00
|
|
|
|
"""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-08-11 15:11:50 +02:00
|
|
|
|
suffix_exemptions = frozenset() #type: FrozenSet[str]
|
|
|
|
|
path_exemptions = None #type: Optional[Pattern[str]]
|
2019-02-25 21:10:04 +01:00
|
|
|
|
# heading must be defined in derived classes.
|
|
|
|
|
# pylint: disable=no-member
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def __init__(self):
|
|
|
|
|
self.files_with_issues = {}
|
|
|
|
|
|
2020-05-10 16:57:16 +02:00
|
|
|
|
@staticmethod
|
|
|
|
|
def normalize_path(filepath):
|
2020-05-28 18:19:20 +02:00
|
|
|
|
"""Normalize ``filepath`` with / as the directory separator."""
|
2020-05-10 16:57:16 +02:00
|
|
|
|
filepath = os.path.normpath(filepath)
|
2020-05-28 18:19:20 +02:00
|
|
|
|
# On Windows, we may have backslashes to separate directories.
|
|
|
|
|
# We need slashes to match exemption lists.
|
2020-05-10 16:57:16 +02:00
|
|
|
|
seps = os.path.sep
|
|
|
|
|
if os.path.altsep is not None:
|
|
|
|
|
seps += os.path.altsep
|
|
|
|
|
return '/'.join(filepath.split(seps))
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def should_check_file(self, filepath):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Whether the given file name should be checked.
|
|
|
|
|
|
2020-05-10 16:52:44 +02:00
|
|
|
|
Files whose name ends with a string listed in ``self.suffix_exemptions``
|
|
|
|
|
or whose path matches ``self.path_exemptions`` will not be checked.
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""
|
2020-05-10 16:52:44 +02:00
|
|
|
|
for files_exemption in self.suffix_exemptions:
|
2018-02-28 11:02:55 +01:00
|
|
|
|
if filepath.endswith(files_exemption):
|
|
|
|
|
return False
|
2020-05-10 16:57:16 +02:00
|
|
|
|
if self.path_exemptions and \
|
|
|
|
|
re.match(self.path_exemptions, self.normalize_path(filepath)):
|
|
|
|
|
return False
|
2018-02-28 11:02:55 +01:00
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def check_file_for_issue(self, filepath):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Check the specified file for the issue that this class is for.
|
|
|
|
|
|
|
|
|
|
Subclasses must implement this method.
|
|
|
|
|
"""
|
2019-02-25 20:59:05 +01:00
|
|
|
|
raise NotImplementedError
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2018-11-23 21:11:30 +01:00
|
|
|
|
def record_issue(self, filepath, line_number):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Record that an issue was found at the specified location."""
|
2018-11-23 21:11:30 +01:00
|
|
|
|
if filepath not in self.files_with_issues.keys():
|
|
|
|
|
self.files_with_issues[filepath] = []
|
|
|
|
|
self.files_with_issues[filepath].append(line_number)
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def output_file_issues(self, logger):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Log all the locations where the issue was found."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
if self.files_with_issues.values():
|
|
|
|
|
logger.info(self.heading)
|
|
|
|
|
for filename, lines in sorted(self.files_with_issues.items()):
|
|
|
|
|
if lines:
|
|
|
|
|
logger.info("{}: {}".format(
|
|
|
|
|
filename, ", ".join(str(x) for x in lines)
|
|
|
|
|
))
|
|
|
|
|
else:
|
|
|
|
|
logger.info(filename)
|
|
|
|
|
logger.info("")
|
|
|
|
|
|
2020-05-10 16:57:59 +02:00
|
|
|
|
BINARY_FILE_PATH_RE_LIST = [
|
|
|
|
|
r'docs/.*\.pdf\Z',
|
|
|
|
|
r'programs/fuzz/corpuses/[^.]+\Z',
|
|
|
|
|
r'tests/data_files/[^.]+\Z',
|
|
|
|
|
r'tests/data_files/.*\.(crt|csr|db|der|key|pubkey)\Z',
|
|
|
|
|
r'tests/data_files/.*\.req\.[^/]+\Z',
|
|
|
|
|
r'tests/data_files/.*malformed[^/]+\Z',
|
|
|
|
|
r'tests/data_files/format_pkcs12\.fmt\Z',
|
2023-01-05 20:27:18 +01:00
|
|
|
|
r'tests/data_files/.*\.bin\Z',
|
2020-05-10 16:57:59 +02:00
|
|
|
|
]
|
|
|
|
|
BINARY_FILE_PATH_RE = re.compile('|'.join(BINARY_FILE_PATH_RE_LIST))
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class LineIssueTracker(FileIssueTracker):
|
|
|
|
|
"""Base class for line-by-line issue tracking.
|
|
|
|
|
|
|
|
|
|
To implement a checker that processes files line by line, inherit from
|
|
|
|
|
this class and implement `line_with_issue`.
|
|
|
|
|
"""
|
|
|
|
|
|
2020-05-10 16:57:59 +02:00
|
|
|
|
# Exclude binary files.
|
|
|
|
|
path_exemptions = BINARY_FILE_PATH_RE
|
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, filepath, line_number):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Check the specified line for the issue that this class is for.
|
|
|
|
|
|
|
|
|
|
Subclasses must implement this method.
|
|
|
|
|
"""
|
2019-02-25 20:59:05 +01:00
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
def check_file_line(self, filepath, line, line_number):
|
2023-01-05 20:28:30 +01:00
|
|
|
|
if self.issue_with_line(line, filepath, line_number):
|
2019-02-25 20:59:05 +01:00
|
|
|
|
self.record_issue(filepath, line_number)
|
|
|
|
|
|
|
|
|
|
def check_file_for_issue(self, filepath):
|
2020-03-24 16:49:21 +01:00
|
|
|
|
"""Check the lines of the specified file.
|
|
|
|
|
|
|
|
|
|
Subclasses must implement the ``issue_with_line`` method.
|
|
|
|
|
"""
|
2019-02-25 20:59:05 +01:00
|
|
|
|
with open(filepath, "rb") as f:
|
|
|
|
|
for i, line in enumerate(iter(f.readline, b"")):
|
|
|
|
|
self.check_file_line(filepath, line, i + 1)
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-03-24 22:26:01 +01:00
|
|
|
|
|
|
|
|
|
def is_windows_file(filepath):
|
|
|
|
|
_root, ext = os.path.splitext(filepath)
|
2020-05-10 17:36:51 +02:00
|
|
|
|
return ext in ('.bat', '.dsp', '.dsw', '.sln', '.vcxproj')
|
2020-03-24 22:26:01 +01:00
|
|
|
|
|
|
|
|
|
|
2020-08-08 23:15:18 +02:00
|
|
|
|
class ShebangIssueTracker(FileIssueTracker):
|
|
|
|
|
"""Track files with a bad, missing or extraneous shebang line.
|
|
|
|
|
|
|
|
|
|
Executable scripts must start with a valid shebang (#!) line.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
heading = "Invalid shebang line:"
|
|
|
|
|
|
|
|
|
|
# Allow either /bin/sh, /bin/bash, or /usr/bin/env.
|
|
|
|
|
# Allow at most one argument (this is a Linux limitation).
|
|
|
|
|
# For sh and bash, the argument if present must be options.
|
2021-12-21 06:14:10 +01:00
|
|
|
|
# For env, the argument must be the base name of the interpreter.
|
2020-08-08 23:15:18 +02:00
|
|
|
|
_shebang_re = re.compile(rb'^#! ?(?:/bin/(bash|sh)(?: -[^\n ]*)?'
|
|
|
|
|
rb'|/usr/bin/env ([^\n /]+))$')
|
|
|
|
|
_extensions = {
|
|
|
|
|
b'bash': 'sh',
|
|
|
|
|
b'perl': 'pl',
|
|
|
|
|
b'python3': 'py',
|
|
|
|
|
b'sh': 'sh',
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def is_valid_shebang(self, first_line, filepath):
|
|
|
|
|
m = re.match(self._shebang_re, first_line)
|
|
|
|
|
if not m:
|
|
|
|
|
return False
|
|
|
|
|
interpreter = m.group(1) or m.group(2)
|
|
|
|
|
if interpreter not in self._extensions:
|
|
|
|
|
return False
|
|
|
|
|
if not filepath.endswith('.' + self._extensions[interpreter]):
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def check_file_for_issue(self, filepath):
|
|
|
|
|
is_executable = os.access(filepath, os.X_OK)
|
|
|
|
|
with open(filepath, "rb") as f:
|
|
|
|
|
first_line = f.readline()
|
|
|
|
|
if first_line.startswith(b'#!'):
|
|
|
|
|
if not is_executable:
|
|
|
|
|
# Shebang on a non-executable file
|
|
|
|
|
self.files_with_issues[filepath] = None
|
|
|
|
|
elif not self.is_valid_shebang(first_line, filepath):
|
|
|
|
|
self.files_with_issues[filepath] = [1]
|
|
|
|
|
elif is_executable:
|
|
|
|
|
# Executable without a shebang
|
|
|
|
|
self.files_with_issues[filepath] = None
|
|
|
|
|
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class EndOfFileNewlineIssueTracker(FileIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track files that end with an incomplete line
|
|
|
|
|
(no newline character at the end of the last line)."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
heading = "Missing newline at end of file:"
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-05-10 16:57:59 +02:00
|
|
|
|
path_exemptions = BINARY_FILE_PATH_RE
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def check_file_for_issue(self, filepath):
|
|
|
|
|
with open(filepath, "rb") as f:
|
2020-05-10 17:36:42 +02:00
|
|
|
|
try:
|
|
|
|
|
f.seek(-1, 2)
|
|
|
|
|
except OSError:
|
|
|
|
|
# This script only works on regular files. If we can't seek
|
|
|
|
|
# 1 before the end, it means that this position is before
|
|
|
|
|
# the beginning of the file, i.e. that the file is empty.
|
|
|
|
|
return
|
|
|
|
|
if f.read(1) != b"\n":
|
2018-02-28 11:02:55 +01:00
|
|
|
|
self.files_with_issues[filepath] = None
|
|
|
|
|
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class Utf8BomIssueTracker(FileIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track files that start with a UTF-8 BOM.
|
|
|
|
|
Files should be ASCII or UTF-8. Valid UTF-8 does not start with a BOM."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
heading = "UTF-8 BOM present:"
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-05-10 16:52:44 +02:00
|
|
|
|
suffix_exemptions = frozenset([".vcxproj", ".sln"])
|
2020-05-10 16:57:59 +02:00
|
|
|
|
path_exemptions = BINARY_FILE_PATH_RE
|
2020-03-24 22:26:01 +01:00
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def check_file_for_issue(self, filepath):
|
|
|
|
|
with open(filepath, "rb") as f:
|
|
|
|
|
if f.read().startswith(codecs.BOM_UTF8):
|
|
|
|
|
self.files_with_issues[filepath] = None
|
|
|
|
|
|
|
|
|
|
|
2023-01-05 20:28:57 +01:00
|
|
|
|
class UnicodeIssueTracker(LineIssueTracker):
|
|
|
|
|
"""Track lines with invalid characters or invalid text encoding."""
|
|
|
|
|
|
|
|
|
|
heading = "Invalid UTF-8 or forbidden character:"
|
|
|
|
|
|
2023-01-30 14:46:58 +01:00
|
|
|
|
# Only allow valid UTF-8, and only other explicitly allowed characters.
|
2023-01-05 20:28:57 +01:00
|
|
|
|
# We deliberately exclude all characters that aren't a simple non-blank,
|
|
|
|
|
# non-zero-width glyph, apart from a very small set (tab, ordinary space,
|
|
|
|
|
# line breaks, "basic" no-break space and soft hyphen). In particular,
|
|
|
|
|
# non-ASCII control characters, combinig characters, and Unicode state
|
|
|
|
|
# changes (e.g. right-to-left text) are forbidden.
|
|
|
|
|
# Note that we do allow some characters with a risk of visual confusion,
|
|
|
|
|
# for example '-' (U+002D HYPHEN-MINUS) vs '' (U+00AD SOFT HYPHEN) vs
|
|
|
|
|
# '‐' (U+2010 HYPHEN), or 'A' (U+0041 LATIN CAPITAL LETTER A) vs
|
|
|
|
|
# 'Α' (U+0391 GREEK CAPITAL LETTER ALPHA).
|
|
|
|
|
GOOD_CHARACTERS = ''.join([
|
|
|
|
|
'\t\n\r -~', # ASCII (tabs and line endings are checked separately)
|
|
|
|
|
'\u00A0-\u00FF', # Latin-1 Supplement (for NO-BREAK SPACE and punctuation)
|
|
|
|
|
'\u2010-\u2027\u2030-\u205E', # General Punctuation (printable)
|
|
|
|
|
'\u2070\u2071\u2074-\u208E\u2090-\u209C', # Superscripts and Subscripts
|
|
|
|
|
'\u2190-\u21FF', # Arrows
|
|
|
|
|
'\u2200-\u22FF', # Mathematical Symbols
|
2023-02-01 14:30:26 +01:00
|
|
|
|
'\u2500-\u257F' # Box Drawings characters used in markdown trees
|
2023-01-05 20:28:57 +01:00
|
|
|
|
])
|
|
|
|
|
# Allow any of the characters and ranges above, and anything classified
|
|
|
|
|
# as a word constituent.
|
|
|
|
|
GOOD_CHARACTERS_RE = re.compile(r'[\w{}]+\Z'.format(GOOD_CHARACTERS))
|
|
|
|
|
|
|
|
|
|
def issue_with_line(self, line, _filepath, line_number):
|
|
|
|
|
try:
|
|
|
|
|
text = line.decode('utf-8')
|
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
|
return True
|
|
|
|
|
if line_number == 1 and text.startswith('\uFEFF'):
|
|
|
|
|
# Strip BOM (U+FEFF ZERO WIDTH NO-BREAK SPACE) at the beginning.
|
|
|
|
|
# Which files are allowed to have a BOM is handled in
|
|
|
|
|
# Utf8BomIssueTracker.
|
|
|
|
|
text = text[1:]
|
|
|
|
|
return not self.GOOD_CHARACTERS_RE.match(text)
|
|
|
|
|
|
2020-03-24 22:26:01 +01:00
|
|
|
|
class UnixLineEndingIssueTracker(LineIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track files with non-Unix line endings (i.e. files with CR)."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2020-03-24 22:26:01 +01:00
|
|
|
|
heading = "Non-Unix line endings:"
|
|
|
|
|
|
|
|
|
|
def should_check_file(self, filepath):
|
2020-05-10 16:57:16 +02:00
|
|
|
|
if not super().should_check_file(filepath):
|
|
|
|
|
return False
|
2020-03-24 22:26:01 +01:00
|
|
|
|
return not is_windows_file(filepath)
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, _filepath, _line_number):
|
2018-02-28 11:02:55 +01:00
|
|
|
|
return b"\r" in line
|
|
|
|
|
|
|
|
|
|
|
2020-03-24 22:29:11 +01:00
|
|
|
|
class WindowsLineEndingIssueTracker(LineIssueTracker):
|
2020-04-01 13:35:46 +02:00
|
|
|
|
"""Track files with non-Windows line endings (i.e. CR or LF not in CRLF)."""
|
2020-03-24 22:29:11 +01:00
|
|
|
|
|
|
|
|
|
heading = "Non-Windows line endings:"
|
|
|
|
|
|
|
|
|
|
def should_check_file(self, filepath):
|
2020-05-10 16:57:16 +02:00
|
|
|
|
if not super().should_check_file(filepath):
|
|
|
|
|
return False
|
2020-03-24 22:29:11 +01:00
|
|
|
|
return is_windows_file(filepath)
|
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, _filepath, _line_number):
|
2020-04-01 13:35:46 +02:00
|
|
|
|
return not line.endswith(b"\r\n") or b"\r" in line[:-2]
|
2020-03-24 22:29:11 +01:00
|
|
|
|
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class TrailingWhitespaceIssueTracker(LineIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track lines with trailing whitespace."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
heading = "Trailing whitespace:"
|
2020-05-10 16:52:44 +02:00
|
|
|
|
suffix_exemptions = frozenset([".dsp", ".md"])
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, _filepath, _line_number):
|
2018-02-28 11:02:55 +01:00
|
|
|
|
return line.rstrip(b"\r\n") != line.rstrip()
|
|
|
|
|
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class TabIssueTracker(LineIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track lines with tabs."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
heading = "Tabs present:"
|
2020-05-10 16:52:44 +02:00
|
|
|
|
suffix_exemptions = frozenset([
|
2020-05-10 17:37:02 +02:00
|
|
|
|
".pem", # some openssl dumps have tabs
|
2020-03-24 22:26:01 +01:00
|
|
|
|
".sln",
|
2020-03-24 22:01:28 +01:00
|
|
|
|
"/Makefile",
|
|
|
|
|
"/Makefile.inc",
|
|
|
|
|
"/generate_visualc_files.pl",
|
2019-02-25 21:10:04 +01:00
|
|
|
|
])
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, _filepath, _line_number):
|
2018-02-28 11:02:55 +01:00
|
|
|
|
return b"\t" in line
|
|
|
|
|
|
|
|
|
|
|
2019-02-25 20:59:05 +01:00
|
|
|
|
class MergeArtifactIssueTracker(LineIssueTracker):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Track lines with merge artifacts.
|
|
|
|
|
These are leftovers from a ``git merge`` that wasn't fully edited."""
|
2018-11-23 21:11:52 +01:00
|
|
|
|
|
2019-02-25 21:10:04 +01:00
|
|
|
|
heading = "Merge artifact:"
|
2018-11-23 21:11:52 +01:00
|
|
|
|
|
2023-01-05 20:28:30 +01:00
|
|
|
|
def issue_with_line(self, line, _filepath, _line_number):
|
2018-11-23 21:11:52 +01:00
|
|
|
|
# Detect leftover git conflict markers.
|
|
|
|
|
if line.startswith(b'<<<<<<< ') or line.startswith(b'>>>>>>> '):
|
|
|
|
|
return True
|
|
|
|
|
if line.startswith(b'||||||| '): # from merge.conflictStyle=diff3
|
|
|
|
|
return True
|
|
|
|
|
if line.rstrip(b'\r\n') == b'=======' and \
|
2019-02-25 20:59:05 +01:00
|
|
|
|
not _filepath.endswith('.md'):
|
2018-11-23 21:11:52 +01:00
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
2023-11-03 14:13:55 +01:00
|
|
|
|
THIS_FILE_BASE_NAME = \
|
|
|
|
|
os.path.basename(inspect.getframeinfo(inspect.currentframe()).filename)
|
|
|
|
|
LINE_NUMBER_BEFORE_LICENSE_ISSUE_TRACKER = \
|
|
|
|
|
inspect.getframeinfo(inspect.currentframe()).lineno
|
|
|
|
|
class LicenseIssueTracker(LineIssueTracker):
|
|
|
|
|
"""Check copyright statements and license indications.
|
|
|
|
|
|
|
|
|
|
This class only checks that statements are correct if present. It does
|
|
|
|
|
not enforce the presence of statements in each file.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
heading = "License issue:"
|
|
|
|
|
|
|
|
|
|
LICENSE_EXEMPTION_RE_LIST = [
|
|
|
|
|
# Third-party code, other than whitelisted third-party modules,
|
|
|
|
|
# may be under a different license.
|
|
|
|
|
r'3rdparty/(?!(p256-m)/.*)',
|
|
|
|
|
# Documentation explaining the license may have accidental
|
|
|
|
|
# false positives.
|
|
|
|
|
r'(ChangeLog|LICENSE|[-0-9A-Z_a-z]+\.md)\Z',
|
|
|
|
|
# Files imported from TF-M, and not used except in test builds,
|
|
|
|
|
# may be under a different license.
|
|
|
|
|
r'configs/crypto_config_profile_medium\.h\Z',
|
|
|
|
|
r'configs/tfm_mbedcrypto_config_profile_medium\.h\Z',
|
|
|
|
|
# Third-party file.
|
|
|
|
|
r'dco\.txt\Z',
|
|
|
|
|
]
|
|
|
|
|
path_exemptions = re.compile('|'.join(BINARY_FILE_PATH_RE_LIST +
|
|
|
|
|
LICENSE_EXEMPTION_RE_LIST))
|
|
|
|
|
|
|
|
|
|
COPYRIGHT_HOLDER = rb'The Mbed TLS Contributors'
|
|
|
|
|
# Catch "Copyright foo", "Copyright (C) foo", "Copyright © foo", etc.
|
|
|
|
|
COPYRIGHT_RE = re.compile(rb'.*\bcopyright\s+((?:\w|\s|[()]|[^ -~])*\w)', re.I)
|
|
|
|
|
|
|
|
|
|
SPDX_HEADER_KEY = b'SPDX-License-Identifier'
|
|
|
|
|
LICENSE_IDENTIFIER = b'Apache-2.0 OR GPL-2.0-or-later'
|
|
|
|
|
SPDX_RE = re.compile(br'.*?(' +
|
|
|
|
|
re.escape(SPDX_HEADER_KEY) +
|
|
|
|
|
br')(:\s*(.*?)\W*\Z|.*)', re.I)
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
super().__init__()
|
|
|
|
|
# Record what problem was caused. We can't easily report it due to
|
|
|
|
|
# the structure of the script. To be fixed after
|
|
|
|
|
# https://github.com/Mbed-TLS/mbedtls/pull/2506
|
|
|
|
|
self.problem = None
|
|
|
|
|
|
|
|
|
|
def issue_with_line(self, line, filepath, line_number):
|
|
|
|
|
# Use endswith() rather than the more correct os.path.basename()
|
|
|
|
|
# because experimentally, it makes a significant difference to
|
|
|
|
|
# the running time.
|
|
|
|
|
if filepath.endswith(THIS_FILE_BASE_NAME) and \
|
|
|
|
|
line_number > LINE_NUMBER_BEFORE_LICENSE_ISSUE_TRACKER:
|
|
|
|
|
# Avoid false positives from the code in this class.
|
|
|
|
|
# Also skip the rest of this file, which is highly unlikely to
|
|
|
|
|
# contain any problematic statements since we put those near the
|
|
|
|
|
# top of files.
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
m = self.COPYRIGHT_RE.match(line)
|
|
|
|
|
if m and m.group(1) != self.COPYRIGHT_HOLDER:
|
|
|
|
|
self.problem = 'Invalid copyright line'
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
m = self.SPDX_RE.match(line)
|
|
|
|
|
if m:
|
|
|
|
|
if m.group(1) != self.SPDX_HEADER_KEY:
|
|
|
|
|
self.problem = 'Misspelled ' + self.SPDX_HEADER_KEY.decode()
|
|
|
|
|
return True
|
|
|
|
|
if not m.group(3):
|
|
|
|
|
self.problem = 'Improperly formatted SPDX license identifier'
|
|
|
|
|
return True
|
|
|
|
|
if m.group(3) != self.LICENSE_IDENTIFIER:
|
|
|
|
|
self.problem = 'Wrong SPDX license identifier'
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2020-03-24 18:25:17 +01:00
|
|
|
|
class IntegrityChecker:
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Sanity-check files under the current directory."""
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
|
|
|
|
def __init__(self, log_file):
|
2019-02-25 20:35:31 +01:00
|
|
|
|
"""Instantiate the sanity checker.
|
|
|
|
|
Check files under the current directory.
|
|
|
|
|
Write a report of issues to log_file."""
|
2022-09-18 21:17:09 +02:00
|
|
|
|
build_tree.check_repo_path()
|
2018-02-28 11:02:55 +01:00
|
|
|
|
self.logger = None
|
|
|
|
|
self.setup_logger(log_file)
|
|
|
|
|
self.issues_to_check = [
|
2020-08-08 23:15:18 +02:00
|
|
|
|
ShebangIssueTracker(),
|
2018-02-28 11:02:55 +01:00
|
|
|
|
EndOfFileNewlineIssueTracker(),
|
|
|
|
|
Utf8BomIssueTracker(),
|
2023-01-05 20:28:57 +01:00
|
|
|
|
UnicodeIssueTracker(),
|
2020-03-24 22:26:01 +01:00
|
|
|
|
UnixLineEndingIssueTracker(),
|
2020-03-24 22:29:11 +01:00
|
|
|
|
WindowsLineEndingIssueTracker(),
|
2018-02-28 11:02:55 +01:00
|
|
|
|
TrailingWhitespaceIssueTracker(),
|
|
|
|
|
TabIssueTracker(),
|
2018-11-23 21:11:52 +01:00
|
|
|
|
MergeArtifactIssueTracker(),
|
2023-11-03 14:13:55 +01:00
|
|
|
|
LicenseIssueTracker(),
|
2018-02-28 11:02:55 +01:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
def setup_logger(self, log_file, level=logging.INFO):
|
|
|
|
|
self.logger = logging.getLogger()
|
|
|
|
|
self.logger.setLevel(level)
|
|
|
|
|
if log_file:
|
|
|
|
|
handler = logging.FileHandler(log_file)
|
|
|
|
|
self.logger.addHandler(handler)
|
|
|
|
|
else:
|
|
|
|
|
console = logging.StreamHandler()
|
|
|
|
|
self.logger.addHandler(console)
|
|
|
|
|
|
2020-05-10 17:18:06 +02:00
|
|
|
|
@staticmethod
|
|
|
|
|
def collect_files():
|
|
|
|
|
bytes_output = subprocess.check_output(['git', 'ls-files', '-z'])
|
|
|
|
|
bytes_filepaths = bytes_output.split(b'\0')[:-1]
|
|
|
|
|
ascii_filepaths = map(lambda fp: fp.decode('ascii'), bytes_filepaths)
|
|
|
|
|
# Prepend './' to files in the top-level directory so that
|
|
|
|
|
# something like `'/Makefile' in fp` matches in the top-level
|
|
|
|
|
# directory as well as in subdirectories.
|
|
|
|
|
return [fp if os.path.dirname(fp) else os.path.join(os.curdir, fp)
|
|
|
|
|
for fp in ascii_filepaths]
|
2018-09-28 11:48:10 +02:00
|
|
|
|
|
2018-02-28 11:02:55 +01:00
|
|
|
|
def check_files(self):
|
2020-05-10 17:18:06 +02:00
|
|
|
|
for issue_to_check in self.issues_to_check:
|
|
|
|
|
for filepath in self.collect_files():
|
|
|
|
|
if issue_to_check.should_check_file(filepath):
|
|
|
|
|
issue_to_check.check_file_for_issue(filepath)
|
2018-02-28 11:02:55 +01:00
|
|
|
|
|
|
|
|
|
def output_issues(self):
|
|
|
|
|
integrity_return_code = 0
|
|
|
|
|
for issue_to_check in self.issues_to_check:
|
|
|
|
|
if issue_to_check.files_with_issues:
|
|
|
|
|
integrity_return_code = 1
|
|
|
|
|
issue_to_check.output_file_issues(self.logger)
|
|
|
|
|
return integrity_return_code
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_main():
|
2019-07-04 19:31:02 +02:00
|
|
|
|
parser = argparse.ArgumentParser(description=__doc__)
|
2018-02-28 11:02:55 +01:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
"-l", "--log_file", type=str, help="path to optional output log",
|
|
|
|
|
)
|
|
|
|
|
check_args = parser.parse_args()
|
|
|
|
|
integrity_check = IntegrityChecker(check_args.log_file)
|
|
|
|
|
integrity_check.check_files()
|
|
|
|
|
return_code = integrity_check.output_issues()
|
|
|
|
|
sys.exit(return_code)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
run_main()
|