2022-12-14 22:04:40 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
#
|
|
|
|
# Copyright The Mbed TLS Contributors
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
|
|
|
"""
|
|
|
|
Make fuzz like testing for pkcs7 tests
|
|
|
|
Given a valid DER pkcs7 file add tests to the test_suite_pkcs7.data file
|
|
|
|
- It is expected that the pkcs7_asn1_fail( data_t *pkcs7_buf )
|
|
|
|
function is defined in test_suite_pkcs7.function
|
|
|
|
- This is not meant to be portable code, if anything it is meant to serve as
|
|
|
|
documentation for showing how those ugly tests in test_suite_pkcs7.data were created
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
from os.path import exists
|
|
|
|
|
2023-02-07 21:01:49 +01:00
|
|
|
PKCS7_TEST_FILE = "../suites/test_suite_pkcs7.data"
|
2022-12-14 22:04:40 +01:00
|
|
|
|
2023-02-07 21:01:49 +01:00
|
|
|
class Test: # pylint: disable=too-few-public-methods
|
|
|
|
"""
|
|
|
|
A instance of a test in test_suite_pkcs7.data
|
|
|
|
"""
|
2022-12-14 22:04:40 +01:00
|
|
|
def __init__(self, name, depends, func_call):
|
|
|
|
self.name = name
|
|
|
|
self.depends = depends
|
|
|
|
self.func_call = func_call
|
|
|
|
|
2023-02-07 21:01:49 +01:00
|
|
|
# pylint: disable=no-self-use
|
2022-12-14 22:04:40 +01:00
|
|
|
def to_string(self):
|
2023-02-01 19:40:21 +01:00
|
|
|
return "\n" + self.name + "\n" + self.depends + "\n" + self.func_call + "\n"
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
class TestData:
|
2023-02-07 21:01:49 +01:00
|
|
|
"""
|
|
|
|
Take in test_suite_pkcs7.data file.
|
|
|
|
Allow for new tests to be added.
|
|
|
|
"""
|
2023-03-21 17:23:08 +01:00
|
|
|
mandatory_dep = "MBEDTLS_MD_CAN_SHA256"
|
2022-12-14 22:04:40 +01:00
|
|
|
test_name = "PKCS7 Parse Failure Invalid ASN1"
|
|
|
|
test_function = "pkcs7_asn1_fail:"
|
|
|
|
def __init__(self, file_name):
|
|
|
|
self.file_name = file_name
|
|
|
|
self.last_test_num, self.old_tests = self.read_test_file(file_name)
|
|
|
|
self.new_tests = []
|
|
|
|
|
2023-02-08 17:40:40 +01:00
|
|
|
# pylint: disable=no-self-use
|
2022-12-14 22:04:40 +01:00
|
|
|
def read_test_file(self, file):
|
2023-02-07 21:01:49 +01:00
|
|
|
"""
|
|
|
|
Parse the test_suite_pkcs7.data file.
|
|
|
|
"""
|
2022-12-14 22:04:40 +01:00
|
|
|
tests = []
|
|
|
|
if not exists(file):
|
2023-02-01 19:40:21 +01:00
|
|
|
print(file + " Does not exist")
|
2023-02-07 21:01:49 +01:00
|
|
|
sys.exit()
|
|
|
|
with open(file, "r", encoding='UTF-8') as fp:
|
|
|
|
data = fp.read()
|
2022-12-14 22:04:40 +01:00
|
|
|
lines = [line.strip() for line in data.split('\n') if len(line.strip()) > 1]
|
|
|
|
i = 0
|
|
|
|
while i < len(lines):
|
|
|
|
if "depends" in lines[i+1]:
|
2023-02-07 21:01:49 +01:00
|
|
|
tests.append(Test(lines[i], lines[i+1], lines[i+2]))
|
2022-12-14 22:04:40 +01:00
|
|
|
i += 3
|
|
|
|
else:
|
|
|
|
tests.append(Test(lines[i], None, lines[i+1]))
|
|
|
|
i += 2
|
|
|
|
latest_test_num = float(tests[-1].name.split('#')[1])
|
|
|
|
return latest_test_num, tests
|
|
|
|
|
|
|
|
def add(self, name, func_call):
|
|
|
|
self.last_test_num += 1
|
2023-02-07 21:01:49 +01:00
|
|
|
self.new_tests.append(Test(self.test_name + ": " + name + " #" + \
|
|
|
|
str(self.last_test_num), "depends_on:" + self.mandatory_dep, \
|
|
|
|
self.test_function + '"' + func_call + '"'))
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
def write_changes(self):
|
2023-02-07 21:01:49 +01:00
|
|
|
with open(self.file_name, 'a', encoding='UTF-8') as fw:
|
|
|
|
fw.write("\n")
|
|
|
|
for t in self.new_tests:
|
|
|
|
fw.write(t.to_string())
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
|
|
|
|
def asn1_mutate(data):
|
2023-02-07 21:01:49 +01:00
|
|
|
"""
|
|
|
|
We have been given an asn1 structure representing a pkcs7.
|
|
|
|
We want to return an array of slightly modified versions of this data
|
|
|
|
they should be modified in a way which makes the structure invalid
|
2022-12-14 22:04:40 +01:00
|
|
|
|
2023-02-07 21:01:49 +01:00
|
|
|
We know that asn1 structures are:
|
|
|
|
|---1 byte showing data type---|----byte(s) for length of data---|---data content--|
|
|
|
|
We know that some data types can contain other data types.
|
|
|
|
Return a dictionary of reasons and mutated data types.
|
|
|
|
"""
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
# off the bat just add bytes to start and end of the buffer
|
2023-02-07 21:01:49 +01:00
|
|
|
mutations = []
|
|
|
|
reasons = []
|
2022-12-14 22:04:40 +01:00
|
|
|
mutations.append(["00"] + data)
|
|
|
|
reasons.append("Add null byte to start")
|
|
|
|
mutations.append(data + ["00"])
|
|
|
|
reasons.append("Add null byte to end")
|
|
|
|
# for every asn1 entry we should attempt to:
|
|
|
|
# - change the data type tag
|
|
|
|
# - make the length longer than actual
|
|
|
|
# - make the length shorter than actual
|
|
|
|
i = 0
|
|
|
|
while i < len(data):
|
|
|
|
tag_i = i
|
|
|
|
leng_i = tag_i + 1
|
|
|
|
data_i = leng_i + 1 + (int(data[leng_i][1], 16) if data[leng_i][0] == '8' else 0)
|
|
|
|
if data[leng_i][0] == '8':
|
|
|
|
length = int(''.join(data[leng_i + 1: data_i]), 16)
|
|
|
|
else:
|
|
|
|
length = int(data[leng_i], 16)
|
|
|
|
|
|
|
|
tag = data[tag_i]
|
2023-02-07 21:01:49 +01:00
|
|
|
print("Looking at ans1: offset " + str(i) + " tag = " + tag + \
|
|
|
|
", length = " + str(length)+ ":")
|
2023-02-01 19:40:21 +01:00
|
|
|
print(''.join(data[data_i:data_i+length]))
|
2022-12-14 22:04:40 +01:00
|
|
|
# change tag to something else
|
|
|
|
if tag == "02":
|
|
|
|
# turn integers into octet strings
|
|
|
|
new_tag = "04"
|
|
|
|
else:
|
|
|
|
# turn everything else into an integer
|
|
|
|
new_tag = "02"
|
|
|
|
mutations.append(data[:tag_i] + [new_tag] + data[leng_i:])
|
2023-02-01 19:40:21 +01:00
|
|
|
reasons.append("Change tag " + tag + " to " + new_tag)
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
# change lengths to too big
|
|
|
|
# skip any edge cases which would cause carry over
|
|
|
|
if int(data[data_i - 1], 16) < 255:
|
|
|
|
new_length = str(hex(int(data[data_i - 1], 16) + 1))[2:]
|
|
|
|
if len(new_length) == 1:
|
|
|
|
new_length = "0"+new_length
|
|
|
|
mutations.append(data[:data_i -1] + [new_length] + data[data_i:])
|
2023-02-07 21:01:49 +01:00
|
|
|
reasons.append("Change length from " + str(length) + " to " \
|
|
|
|
+ str(length + 1))
|
|
|
|
# we can add another test here for tags that contain other tags \
|
|
|
|
# where they have more data than there containing tags account for
|
2022-12-14 22:04:40 +01:00
|
|
|
if tag in ["30", "a0", "31"]:
|
2023-02-07 21:01:49 +01:00
|
|
|
mutations.append(data[:data_i -1] + [new_length] + \
|
|
|
|
data[data_i:data_i + length] + ["00"] + \
|
|
|
|
data[data_i + length:])
|
|
|
|
reasons.append("Change contents of tag " + tag + " to contain \
|
|
|
|
one unaccounted extra byte")
|
2022-12-14 22:04:40 +01:00
|
|
|
# change lengths to too small
|
|
|
|
if int(data[data_i - 1], 16) > 0:
|
|
|
|
new_length = str(hex(int(data[data_i - 1], 16) - 1))[2:]
|
|
|
|
if len(new_length) == 1:
|
|
|
|
new_length = "0"+new_length
|
|
|
|
mutations.append(data[:data_i -1] + [new_length] + data[data_i:])
|
2023-02-01 19:40:21 +01:00
|
|
|
reasons.append("Change length from " + str(length) + " to " + str(length - 1))
|
2022-12-14 22:04:40 +01:00
|
|
|
|
|
|
|
# some tag types contain other tag types so we should iterate into the data
|
|
|
|
if tag in ["30", "a0", "31"]:
|
|
|
|
i = data_i
|
|
|
|
else:
|
|
|
|
i = data_i + length
|
|
|
|
|
|
|
|
return list(zip(reasons, mutations))
|
|
|
|
|
2023-02-07 21:01:49 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
print("USAGE: " + sys.argv[0] + " <pkcs7_der_file>")
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
DATA_FILE = sys.argv[1]
|
|
|
|
TEST_DATA = TestData(PKCS7_TEST_FILE)
|
|
|
|
with open(DATA_FILE, 'rb') as f:
|
|
|
|
DATA_STR = f.read().hex()
|
|
|
|
# make data an array of byte strings eg ['de','ad','be','ef']
|
|
|
|
HEX_DATA = list(map(''.join, [[DATA_STR[i], DATA_STR[i+1]] for i in range(0, len(DATA_STR), \
|
|
|
|
2)]))
|
|
|
|
# returns tuples of test_names and modified data buffers
|
|
|
|
MUT_ARR = asn1_mutate(HEX_DATA)
|
|
|
|
|
|
|
|
print("made " + str(len(MUT_ARR)) + " new tests")
|
|
|
|
for new_test in MUT_ARR:
|
|
|
|
TEST_DATA.add(new_test[0], ''.join(new_test[1]))
|
|
|
|
|
|
|
|
TEST_DATA.write_changes()
|