2019-10-13 21:44:25 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2020-03-27 09:23:38 +01:00
|
|
|
"""Assemble Mbed TLS change log entries into the change log file.
|
2020-01-28 19:58:17 +01:00
|
|
|
|
|
|
|
Add changelog entries to the first level-2 section.
|
|
|
|
Create a new level-2 section for unreleased changes if needed.
|
|
|
|
Remove the input files unless --keep-entries is specified.
|
2020-03-26 22:39:18 +01:00
|
|
|
|
|
|
|
In each level-3 section, entries are sorted in chronological order
|
|
|
|
(oldest first). From oldest to newest:
|
|
|
|
* Merged entry files are sorted according to their merge date (date of
|
|
|
|
the merge commit that brought the commit that created the file into
|
|
|
|
the target branch).
|
|
|
|
* Committed but unmerged entry files are sorted according to the date
|
|
|
|
of the commit that adds them.
|
|
|
|
* Uncommitted entry files are sorted according to their modification time.
|
|
|
|
|
|
|
|
You must run this program from within a git working directory.
|
2019-10-13 21:44:25 +02:00
|
|
|
"""
|
|
|
|
|
2020-08-07 13:07:28 +02:00
|
|
|
# Copyright The Mbed TLS Contributors
|
2019-10-13 21:44:25 +02:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import argparse
|
2020-03-27 19:05:18 +01:00
|
|
|
from collections import OrderedDict, namedtuple
|
2020-03-25 16:34:43 +01:00
|
|
|
import datetime
|
|
|
|
import functools
|
2019-10-13 21:44:25 +02:00
|
|
|
import glob
|
|
|
|
import os
|
|
|
|
import re
|
2020-03-25 16:34:43 +01:00
|
|
|
import subprocess
|
2019-10-13 21:44:25 +02:00
|
|
|
import sys
|
|
|
|
|
|
|
|
class InputFormatError(Exception):
|
|
|
|
def __init__(self, filename, line_number, message, *args, **kwargs):
|
2020-01-22 15:55:36 +01:00
|
|
|
message = '{}:{}: {}'.format(filename, line_number,
|
|
|
|
message.format(*args, **kwargs))
|
|
|
|
super().__init__(message)
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:42:50 +01:00
|
|
|
class CategoryParseError(Exception):
|
|
|
|
def __init__(self, line_offset, error_message):
|
|
|
|
self.line_offset = line_offset
|
|
|
|
self.error_message = error_message
|
|
|
|
super().__init__('{}: {}'.format(line_offset, error_message))
|
|
|
|
|
2020-01-22 15:41:50 +01:00
|
|
|
class LostContent(Exception):
|
|
|
|
def __init__(self, filename, line):
|
|
|
|
message = ('Lost content from {}: "{}"'.format(filename, line))
|
|
|
|
super().__init__(message)
|
|
|
|
|
2020-03-27 20:06:12 +01:00
|
|
|
# The category names we use in the changelog.
|
|
|
|
# If you edit this, update ChangeLog.d/README.md.
|
2020-03-27 19:05:18 +01:00
|
|
|
STANDARD_CATEGORIES = (
|
|
|
|
b'API changes',
|
2019-10-13 21:44:25 +02:00
|
|
|
b'Default behavior changes',
|
|
|
|
b'Requirement changes',
|
|
|
|
b'New deprecations',
|
|
|
|
b'Removals',
|
2020-03-27 19:05:18 +01:00
|
|
|
b'Features',
|
2019-10-13 21:44:25 +02:00
|
|
|
b'Security',
|
2020-03-27 19:05:18 +01:00
|
|
|
b'Bugfix',
|
|
|
|
b'Changes',
|
2019-10-13 21:44:25 +02:00
|
|
|
)
|
|
|
|
|
2021-03-05 13:22:51 +01:00
|
|
|
# The maximum line length for an entry
|
|
|
|
MAX_LINE_LENGTH = 80
|
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
CategoryContent = namedtuple('CategoryContent', [
|
|
|
|
'name', 'title_line', # Title text and line number of the title
|
|
|
|
'body', 'body_line', # Body text and starting line number of the body
|
|
|
|
])
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
class ChangelogFormat:
|
|
|
|
"""Virtual class documenting how to write a changelog format class."""
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
@classmethod
|
|
|
|
def extract_top_version(cls, changelog_file_content):
|
|
|
|
"""Split out the top version section.
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:25:38 +01:00
|
|
|
If the top version is already released, create a new top
|
|
|
|
version section for an unreleased version.
|
2020-03-30 11:37:26 +02:00
|
|
|
|
|
|
|
Return ``(header, top_version_title, top_version_body, trailer)``
|
|
|
|
where the "top version" is the existing top version section if it's
|
|
|
|
for unreleased changes, and a newly created section otherwise.
|
|
|
|
To assemble the changelog after modifying top_version_body,
|
|
|
|
concatenate the four pieces.
|
2020-03-27 19:05:18 +01:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def version_title_text(cls, version_title):
|
|
|
|
"""Return the text of a formatted version section title."""
|
|
|
|
raise NotImplementedError
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
@classmethod
|
|
|
|
def split_categories(cls, version_body):
|
|
|
|
"""Split a changelog version section body into categories.
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
Return a list of `CategoryContent` the name is category title
|
|
|
|
without any formatting.
|
2019-10-13 21:44:25 +02:00
|
|
|
"""
|
2020-03-27 19:05:18 +01:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def format_category(cls, title, body):
|
|
|
|
"""Construct the text of a category section from its title and body."""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
class TextChangelogFormat(ChangelogFormat):
|
|
|
|
"""The traditional Mbed TLS changelog format."""
|
|
|
|
|
2020-03-27 19:25:38 +01:00
|
|
|
_unreleased_version_text = b'= mbed TLS x.x.x branch released xxxx-xx-xx'
|
|
|
|
@classmethod
|
|
|
|
def is_released_version(cls, title):
|
|
|
|
# Look for an incomplete release date
|
|
|
|
return not re.search(br'[0-9x]{4}-[0-9x]{2}-[0-9x]?x', title)
|
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
_top_version_re = re.compile(br'(?:\A|\n)(=[^\n]*\n+)(.*?\n)(?:=|$)',
|
|
|
|
re.DOTALL)
|
|
|
|
@classmethod
|
|
|
|
def extract_top_version(cls, changelog_file_content):
|
|
|
|
"""A version section starts with a line starting with '='."""
|
|
|
|
m = re.search(cls._top_version_re, changelog_file_content)
|
|
|
|
top_version_start = m.start(1)
|
|
|
|
top_version_end = m.end(2)
|
2020-03-27 19:25:38 +01:00
|
|
|
top_version_title = m.group(1)
|
|
|
|
top_version_body = m.group(2)
|
|
|
|
if cls.is_released_version(top_version_title):
|
|
|
|
top_version_end = top_version_start
|
|
|
|
top_version_title = cls._unreleased_version_text + b'\n\n'
|
|
|
|
top_version_body = b''
|
2020-03-27 19:05:18 +01:00
|
|
|
return (changelog_file_content[:top_version_start],
|
2020-03-27 19:25:38 +01:00
|
|
|
top_version_title, top_version_body,
|
2020-03-27 19:05:18 +01:00
|
|
|
changelog_file_content[top_version_end:])
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def version_title_text(cls, version_title):
|
|
|
|
return re.sub(br'\n.*', version_title, re.DOTALL)
|
|
|
|
|
|
|
|
_category_title_re = re.compile(br'(^\w.*)\n+', re.MULTILINE)
|
|
|
|
@classmethod
|
|
|
|
def split_categories(cls, version_body):
|
|
|
|
"""A category title is a line with the title in column 0."""
|
2020-03-27 19:42:50 +01:00
|
|
|
if not version_body:
|
2020-03-27 19:05:18 +01:00
|
|
|
return []
|
2020-03-27 19:42:50 +01:00
|
|
|
title_matches = list(re.finditer(cls._category_title_re, version_body))
|
|
|
|
if not title_matches or title_matches[0].start() != 0:
|
|
|
|
# There is junk before the first category.
|
|
|
|
raise CategoryParseError(0, 'Junk found where category expected')
|
2020-03-27 19:05:18 +01:00
|
|
|
title_starts = [m.start(1) for m in title_matches]
|
|
|
|
body_starts = [m.end(0) for m in title_matches]
|
|
|
|
body_ends = title_starts[1:] + [len(version_body)]
|
|
|
|
bodies = [version_body[body_start:body_end].rstrip(b'\n') + b'\n'
|
|
|
|
for (body_start, body_end) in zip(body_starts, body_ends)]
|
|
|
|
title_lines = [version_body[:pos].count(b'\n') for pos in title_starts]
|
|
|
|
body_lines = [version_body[:pos].count(b'\n') for pos in body_starts]
|
|
|
|
return [CategoryContent(title_match.group(1), title_line,
|
|
|
|
body, body_line)
|
|
|
|
for title_match, title_line, body, body_line
|
|
|
|
in zip(title_matches, title_lines, bodies, body_lines)]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def format_category(cls, title, body):
|
|
|
|
# `split_categories` ensures that each body ends with a newline.
|
|
|
|
# Make sure that there is additionally a blank line between categories.
|
|
|
|
if not body.endswith(b'\n\n'):
|
|
|
|
body += b'\n'
|
|
|
|
return title + b'\n' + body
|
|
|
|
|
|
|
|
class ChangeLog:
|
|
|
|
"""An Mbed TLS changelog.
|
|
|
|
|
|
|
|
A changelog file consists of some header text followed by one or
|
|
|
|
more version sections. The version sections are in reverse
|
|
|
|
chronological order. Each version section consists of a title and a body.
|
|
|
|
|
|
|
|
The body of a version section consists of zero or more category
|
|
|
|
subsections. Each category subsection consists of a title and a body.
|
|
|
|
|
|
|
|
A changelog entry file has the same format as the body of a version section.
|
|
|
|
|
|
|
|
A `ChangelogFormat` object defines the concrete syntax of the changelog.
|
|
|
|
Entry files must have the same format as the changelog file.
|
|
|
|
"""
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-01-28 19:58:17 +01:00
|
|
|
# Only accept dotted version numbers (e.g. "3.1", not "3").
|
Fix version number recognition heuristics
The regexp was wrong, for example it matched "2.20x" but failed to
match "3.1".
Some test cases:
>>> def f(title):
... version_number = re.search(_version_number_re, title)
... if version_number:
... return not re.search(_incomplete_version_number_re,
... version_number.group(0))
... else:
... return False
...
>>> [(s, f(s.encode('ascii'))) for s in ['foo', 'foo 3', 'foo 3.', 'foo 3.1', 'foo 3.14', 'foo 3.2.1', 'foo 3.2.1alpha', 'foo 3.1.a', 'foo 3.a', 'foo 3.x.1']]
[('foo', False), ('foo 3', False), ('foo 3.', False), ('foo 3.1', True), ('foo 3.14', True), ('foo 3.2.1', True), ('foo 3.2.1alpha', True), ('foo 3.1.a', False), ('foo 3.a', False), ('foo 3.x.1', False)]
2020-01-30 11:38:01 +01:00
|
|
|
# Refuse ".x" in a version number where x is a letter: this indicates
|
|
|
|
# a version that is not yet released. Something like "3.1a" is accepted.
|
|
|
|
_version_number_re = re.compile(br'[0-9]+\.[0-9A-Za-z.]+')
|
|
|
|
_incomplete_version_number_re = re.compile(br'.*\.[A-Za-z]')
|
2020-01-28 19:58:17 +01:00
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
def add_categories_from_text(self, filename, line_offset,
|
|
|
|
text, allow_unknown_category):
|
|
|
|
"""Parse a version section or entry file."""
|
2020-03-27 19:42:50 +01:00
|
|
|
try:
|
|
|
|
categories = self.format.split_categories(text)
|
|
|
|
except CategoryParseError as e:
|
|
|
|
raise InputFormatError(filename, line_offset + e.line_offset,
|
|
|
|
e.error_message)
|
2020-03-27 19:05:18 +01:00
|
|
|
for category in categories:
|
|
|
|
if not allow_unknown_category and \
|
|
|
|
category.name not in self.categories:
|
|
|
|
raise InputFormatError(filename,
|
|
|
|
line_offset + category.title_line,
|
|
|
|
'Unknown category: "{}"',
|
|
|
|
category.name.decode('utf8'))
|
2021-03-05 13:22:51 +01:00
|
|
|
|
|
|
|
body_split = category.body.splitlines()
|
|
|
|
for line in body_split:
|
|
|
|
if len(line) > MAX_LINE_LENGTH:
|
|
|
|
raise InputFormatError(filename,
|
|
|
|
line_offset + category.title_line,
|
|
|
|
'Category body line too long: "{} ({})"',
|
|
|
|
category.name.decode('utf8'), len(line))
|
|
|
|
|
2020-03-27 19:05:18 +01:00
|
|
|
self.categories[category.name] += category.body
|
|
|
|
|
|
|
|
def __init__(self, input_stream, changelog_format):
|
2019-10-13 21:44:25 +02:00
|
|
|
"""Create a changelog object.
|
|
|
|
|
2020-01-22 12:43:29 +01:00
|
|
|
Populate the changelog object from the content of the file
|
2020-03-27 19:05:18 +01:00
|
|
|
input_stream.
|
2020-01-22 15:40:39 +01:00
|
|
|
"""
|
2020-03-27 19:05:18 +01:00
|
|
|
self.format = changelog_format
|
|
|
|
whole_file = input_stream.read()
|
|
|
|
(self.header,
|
|
|
|
self.top_version_title, top_version_body,
|
|
|
|
self.trailer) = self.format.extract_top_version(whole_file)
|
|
|
|
# Split the top version section into categories.
|
|
|
|
self.categories = OrderedDict()
|
|
|
|
for category in STANDARD_CATEGORIES:
|
|
|
|
self.categories[category] = b''
|
2020-03-27 19:42:38 +01:00
|
|
|
offset = (self.header + self.top_version_title).count(b'\n') + 1
|
2020-03-27 19:05:18 +01:00
|
|
|
self.add_categories_from_text(input_stream.name, offset,
|
|
|
|
top_version_body, True)
|
2019-10-13 21:44:25 +02:00
|
|
|
|
|
|
|
def add_file(self, input_stream):
|
|
|
|
"""Add changelog entries from a file.
|
|
|
|
"""
|
2020-03-27 19:42:38 +01:00
|
|
|
self.add_categories_from_text(input_stream.name, 1,
|
2020-03-27 19:05:18 +01:00
|
|
|
input_stream.read(), False)
|
2019-10-13 21:44:25 +02:00
|
|
|
|
|
|
|
def write(self, filename):
|
|
|
|
"""Write the changelog to the specified file.
|
|
|
|
"""
|
|
|
|
with open(filename, 'wb') as out:
|
2020-03-27 19:05:18 +01:00
|
|
|
out.write(self.header)
|
|
|
|
out.write(self.top_version_title)
|
|
|
|
for title, body in self.categories.items():
|
|
|
|
if not body:
|
2019-10-13 21:44:25 +02:00
|
|
|
continue
|
2020-03-27 19:05:18 +01:00
|
|
|
out.write(self.format.format_category(title, body))
|
|
|
|
out.write(self.trailer)
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-25 16:34:43 +01:00
|
|
|
|
|
|
|
@functools.total_ordering
|
2020-03-26 22:39:18 +01:00
|
|
|
class EntryFileSortKey:
|
|
|
|
"""This classes defines an ordering on changelog entry files: older < newer.
|
|
|
|
|
|
|
|
* Merged entry files are sorted according to their merge date (date of
|
|
|
|
the merge commit that brought the commit that created the file into
|
|
|
|
the target branch).
|
|
|
|
* Committed but unmerged entry files are sorted according to the date
|
|
|
|
of the commit that adds them.
|
|
|
|
* Uncommitted entry files are sorted according to their modification time.
|
|
|
|
|
|
|
|
This class assumes that the file is in a git working directory with
|
|
|
|
the target branch checked out.
|
2020-03-25 16:34:43 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Categories of files. A lower number is considered older.
|
|
|
|
MERGED = 0
|
|
|
|
COMMITTED = 1
|
|
|
|
LOCAL = 2
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def creation_hash(filename):
|
|
|
|
"""Return the git commit id at which the given file was created.
|
|
|
|
|
|
|
|
Return None if the file was never checked into git.
|
|
|
|
"""
|
2020-03-26 22:47:07 +01:00
|
|
|
hashes = subprocess.check_output(['git', 'log', '--format=%H',
|
|
|
|
'--follow',
|
|
|
|
'--', filename])
|
2020-03-26 22:46:47 +01:00
|
|
|
m = re.search(b'(.+)$', hashes)
|
|
|
|
if not m:
|
|
|
|
# The git output is empty. This means that the file was
|
|
|
|
# never checked in.
|
2020-03-25 16:34:43 +01:00
|
|
|
return None
|
2020-03-26 22:46:47 +01:00
|
|
|
# The last commit in the log is the oldest one, which is when the
|
|
|
|
# file was created.
|
|
|
|
return m.group(0)
|
2020-03-25 16:34:43 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def list_merges(some_hash, target, *options):
|
|
|
|
"""List merge commits from some_hash to target.
|
|
|
|
|
|
|
|
Pass options to git to select which commits are included.
|
|
|
|
"""
|
|
|
|
text = subprocess.check_output(['git', 'rev-list',
|
|
|
|
'--merges', *options,
|
|
|
|
b'..'.join([some_hash, target])])
|
|
|
|
return text.rstrip(b'\n').split(b'\n')
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def merge_hash(cls, some_hash):
|
|
|
|
"""Return the git commit id at which the given commit was merged.
|
|
|
|
|
|
|
|
Return None if the given commit was never merged.
|
|
|
|
"""
|
|
|
|
target = b'HEAD'
|
|
|
|
# List the merges from some_hash to the target in two ways.
|
|
|
|
# The ancestry list is the ones that are both descendants of
|
|
|
|
# some_hash and ancestors of the target.
|
|
|
|
ancestry = frozenset(cls.list_merges(some_hash, target,
|
|
|
|
'--ancestry-path'))
|
|
|
|
# The first_parents list only contains merges that are directly
|
|
|
|
# on the target branch. We want it in reverse order (oldest first).
|
|
|
|
first_parents = cls.list_merges(some_hash, target,
|
|
|
|
'--first-parent', '--reverse')
|
|
|
|
# Look for the oldest merge commit that's both on the direct path
|
|
|
|
# and directly on the target branch. That's the place where some_hash
|
|
|
|
# was merged on the target branch. See
|
|
|
|
# https://stackoverflow.com/questions/8475448/find-merge-commit-which-include-a-specific-commit
|
|
|
|
for commit in first_parents:
|
|
|
|
if commit in ancestry:
|
|
|
|
return commit
|
|
|
|
return None
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def commit_timestamp(commit_id):
|
2020-03-27 10:56:45 +01:00
|
|
|
"""Return the timestamp of the given commit."""
|
|
|
|
text = subprocess.check_output(['git', 'show', '-s',
|
|
|
|
'--format=%ct',
|
|
|
|
commit_id])
|
|
|
|
return datetime.datetime.utcfromtimestamp(int(text))
|
2020-03-25 16:34:43 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def file_timestamp(filename):
|
|
|
|
"""Return the modification timestamp of the given file."""
|
|
|
|
mtime = os.stat(filename).st_mtime
|
|
|
|
return datetime.datetime.fromtimestamp(mtime)
|
|
|
|
|
|
|
|
def __init__(self, filename):
|
2020-03-26 22:39:18 +01:00
|
|
|
"""Determine position of the file in the changelog entry order.
|
|
|
|
|
|
|
|
This constructor returns an object that can be used with comparison
|
|
|
|
operators, with `sort` and `sorted`, etc. Older entries are sorted
|
|
|
|
before newer entries.
|
|
|
|
"""
|
2020-03-25 16:34:43 +01:00
|
|
|
self.filename = filename
|
|
|
|
creation_hash = self.creation_hash(filename)
|
|
|
|
if not creation_hash:
|
|
|
|
self.category = self.LOCAL
|
|
|
|
self.datetime = self.file_timestamp(filename)
|
|
|
|
return
|
|
|
|
merge_hash = self.merge_hash(creation_hash)
|
|
|
|
if not merge_hash:
|
|
|
|
self.category = self.COMMITTED
|
|
|
|
self.datetime = self.commit_timestamp(creation_hash)
|
|
|
|
return
|
|
|
|
self.category = self.MERGED
|
|
|
|
self.datetime = self.commit_timestamp(merge_hash)
|
|
|
|
|
|
|
|
def sort_key(self):
|
2020-03-26 22:39:18 +01:00
|
|
|
""""Return a concrete sort key for this entry file sort key object.
|
2020-03-25 16:34:43 +01:00
|
|
|
|
2020-03-26 22:39:18 +01:00
|
|
|
``ts1 < ts2`` is implemented as ``ts1.sort_key() < ts2.sort_key()``.
|
2020-03-25 16:34:43 +01:00
|
|
|
"""
|
|
|
|
return (self.category, self.datetime, self.filename)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.sort_key() == other.sort_key()
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
return self.sort_key() < other.sort_key()
|
|
|
|
|
|
|
|
|
2020-01-22 15:41:50 +01:00
|
|
|
def check_output(generated_output_file, main_input_file, merged_files):
|
|
|
|
"""Make sanity checks on the generated output.
|
|
|
|
|
|
|
|
The intent of these sanity checks is to have reasonable confidence
|
|
|
|
that no content has been lost.
|
|
|
|
|
|
|
|
The sanity check is that every line that is present in an input file
|
|
|
|
is also present in an output file. This is not perfect but good enough
|
|
|
|
for now.
|
|
|
|
"""
|
|
|
|
generated_output = set(open(generated_output_file, 'rb'))
|
|
|
|
for line in open(main_input_file, 'rb'):
|
|
|
|
if line not in generated_output:
|
|
|
|
raise LostContent('original file', line)
|
|
|
|
for merged_file in merged_files:
|
|
|
|
for line in open(merged_file, 'rb'):
|
|
|
|
if line not in generated_output:
|
|
|
|
raise LostContent(merged_file, line)
|
|
|
|
|
|
|
|
def finish_output(changelog, output_file, input_file, merged_files):
|
2019-10-13 21:44:25 +02:00
|
|
|
"""Write the changelog to the output file.
|
|
|
|
|
2020-01-22 15:41:50 +01:00
|
|
|
The input file and the list of merged files are used only for sanity
|
|
|
|
checks on the output.
|
2019-10-13 21:44:25 +02:00
|
|
|
"""
|
|
|
|
if os.path.exists(output_file) and not os.path.isfile(output_file):
|
|
|
|
# The output is a non-regular file (e.g. pipe). Write to it directly.
|
|
|
|
output_temp = output_file
|
|
|
|
else:
|
|
|
|
# The output is a regular file. Write to a temporary file,
|
|
|
|
# then move it into place atomically.
|
|
|
|
output_temp = output_file + '.tmp'
|
|
|
|
changelog.write(output_temp)
|
2020-01-22 15:41:50 +01:00
|
|
|
check_output(output_temp, input_file, merged_files)
|
2019-10-13 21:44:25 +02:00
|
|
|
if output_temp != output_file:
|
|
|
|
os.rename(output_temp, output_file)
|
|
|
|
|
2020-01-22 14:55:37 +01:00
|
|
|
def remove_merged_entries(files_to_remove):
|
|
|
|
for filename in files_to_remove:
|
|
|
|
os.remove(filename)
|
|
|
|
|
2020-03-25 16:34:18 +01:00
|
|
|
def list_files_to_merge(options):
|
|
|
|
"""List the entry files to merge, oldest first.
|
|
|
|
|
2020-03-26 22:39:18 +01:00
|
|
|
"Oldest" is defined by `EntryFileSortKey`.
|
2020-03-25 16:34:18 +01:00
|
|
|
"""
|
2020-03-27 19:05:18 +01:00
|
|
|
files_to_merge = glob.glob(os.path.join(options.dir, '*.txt'))
|
2020-03-26 22:41:32 +01:00
|
|
|
files_to_merge.sort(key=EntryFileSortKey)
|
2020-03-25 16:34:18 +01:00
|
|
|
return files_to_merge
|
|
|
|
|
2019-10-13 21:44:25 +02:00
|
|
|
def merge_entries(options):
|
|
|
|
"""Merge changelog entries into the changelog file.
|
|
|
|
|
|
|
|
Read the changelog file from options.input.
|
|
|
|
Read entries to merge from the directory options.dir.
|
|
|
|
Write the new changelog to options.output.
|
|
|
|
Remove the merged entries if options.keep_entries is false.
|
|
|
|
"""
|
|
|
|
with open(options.input, 'rb') as input_file:
|
2020-03-27 19:05:18 +01:00
|
|
|
changelog = ChangeLog(input_file, TextChangelogFormat)
|
2020-03-25 16:34:18 +01:00
|
|
|
files_to_merge = list_files_to_merge(options)
|
2019-10-13 21:44:25 +02:00
|
|
|
if not files_to_merge:
|
|
|
|
sys.stderr.write('There are no pending changelog entries.\n')
|
|
|
|
return
|
|
|
|
for filename in files_to_merge:
|
|
|
|
with open(filename, 'rb') as input_file:
|
|
|
|
changelog.add_file(input_file)
|
2020-01-22 15:41:50 +01:00
|
|
|
finish_output(changelog, options.output, options.input, files_to_merge)
|
2020-01-22 14:55:37 +01:00
|
|
|
if not options.keep_entries:
|
|
|
|
remove_merged_entries(files_to_merge)
|
2019-10-13 21:44:25 +02:00
|
|
|
|
2020-03-25 16:34:43 +01:00
|
|
|
def show_file_timestamps(options):
|
|
|
|
"""List the files to merge and their timestamp.
|
|
|
|
|
|
|
|
This is only intended for debugging purposes.
|
|
|
|
"""
|
|
|
|
files = list_files_to_merge(options)
|
|
|
|
for filename in files:
|
2020-03-26 22:39:18 +01:00
|
|
|
ts = EntryFileSortKey(filename)
|
2020-03-25 16:34:43 +01:00
|
|
|
print(ts.category, ts.datetime, filename)
|
|
|
|
|
2019-10-13 21:44:25 +02:00
|
|
|
def set_defaults(options):
|
|
|
|
"""Add default values for missing options."""
|
|
|
|
output_file = getattr(options, 'output', None)
|
|
|
|
if output_file is None:
|
|
|
|
options.output = options.input
|
|
|
|
if getattr(options, 'keep_entries', None) is None:
|
|
|
|
options.keep_entries = (output_file is not None)
|
|
|
|
|
|
|
|
def main():
|
|
|
|
"""Command line entry point."""
|
|
|
|
parser = argparse.ArgumentParser(description=__doc__)
|
|
|
|
parser.add_argument('--dir', '-d', metavar='DIR',
|
|
|
|
default='ChangeLog.d',
|
2020-01-22 15:58:18 +01:00
|
|
|
help='Directory to read entries from'
|
|
|
|
' (default: ChangeLog.d)')
|
2019-10-13 21:44:25 +02:00
|
|
|
parser.add_argument('--input', '-i', metavar='FILE',
|
2020-03-27 19:05:18 +01:00
|
|
|
default='ChangeLog',
|
2020-01-22 15:58:18 +01:00
|
|
|
help='Existing changelog file to read from and augment'
|
2020-03-27 19:05:18 +01:00
|
|
|
' (default: ChangeLog)')
|
2019-10-13 21:44:25 +02:00
|
|
|
parser.add_argument('--keep-entries',
|
|
|
|
action='store_true', dest='keep_entries', default=None,
|
2020-01-22 15:58:18 +01:00
|
|
|
help='Keep the files containing entries'
|
|
|
|
' (default: remove them if --output/-o is not specified)')
|
2019-10-13 21:44:25 +02:00
|
|
|
parser.add_argument('--no-keep-entries',
|
|
|
|
action='store_false', dest='keep_entries',
|
2020-01-22 15:58:18 +01:00
|
|
|
help='Remove the files containing entries after they are merged'
|
|
|
|
' (default: remove them if --output/-o is not specified)')
|
2019-10-13 21:44:25 +02:00
|
|
|
parser.add_argument('--output', '-o', metavar='FILE',
|
2020-01-22 15:58:18 +01:00
|
|
|
help='Output changelog file'
|
|
|
|
' (default: overwrite the input)')
|
2020-03-25 16:34:43 +01:00
|
|
|
parser.add_argument('--list-files-only',
|
|
|
|
action='store_true',
|
2020-03-27 19:01:35 +01:00
|
|
|
help=('Only list the files that would be processed '
|
2020-03-27 10:56:45 +01:00
|
|
|
'(with some debugging information)'))
|
2019-10-13 21:44:25 +02:00
|
|
|
options = parser.parse_args()
|
|
|
|
set_defaults(options)
|
2020-03-25 16:34:43 +01:00
|
|
|
if options.list_files_only:
|
|
|
|
show_file_timestamps(options)
|
|
|
|
return
|
2019-10-13 21:44:25 +02:00
|
|
|
merge_entries(options)
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|