mirror of
https://github.com/rclone/rclone.git
synced 2024-11-22 12:36:38 +08:00
build: Automatically compile the changelog to make editing easier
This commit is contained in:
parent
32ab4e9ac6
commit
8e2b3268be
6
Makefile
6
Makefile
|
@ -210,10 +210,10 @@ tag: doc
|
|||
echo -e "package fs\n\n// Version of rclone\nvar Version = \"$(NEW_TAG)\"\n" | gofmt > fs/version.go
|
||||
echo -n "$(NEW_TAG)" > docs/layouts/partials/version.html
|
||||
git tag -s -m "Version $(NEW_TAG)" $(NEW_TAG)
|
||||
bin/make_changelog.py $(LAST_TAG) $(NEW_TAG) > docs/content/changelog.md.new
|
||||
mv docs/content/changelog.md.new docs/content/changelog.md
|
||||
@echo "Edit the new changelog in docs/content/changelog.md"
|
||||
@echo " * $(NEW_TAG) -" `date -I` >> docs/content/changelog.md
|
||||
@git log $(LAST_TAG)..$(NEW_TAG) --oneline >> docs/content/changelog.md
|
||||
@echo "Then commit the changes"
|
||||
@echo "Then commit all the changes"
|
||||
@echo git commit -m \"Version $(NEW_TAG)\" -a -v
|
||||
@echo "And finally run make retag before make cross etc"
|
||||
|
||||
|
|
173
bin/make_changelog.py
Executable file
173
bin/make_changelog.py
Executable file
|
@ -0,0 +1,173 @@
|
|||
#!/usr/bin/python
|
||||
"""
|
||||
Generate a markdown changelog for the rclone project
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import datetime
|
||||
import subprocess
|
||||
from collections import defaultdict
|
||||
|
||||
IGNORE_RES = [
|
||||
r"^Add .* to contributors$",
|
||||
r"^Start v\d+.\d+-DEV development$",
|
||||
r"^Version v\d.\d+$",
|
||||
]
|
||||
|
||||
IGNORE_RE = re.compile("(?:" + "|".join(IGNORE_RES) + ")")
|
||||
|
||||
CATEGORY = re.compile(r"(^[\w/ ]+(?:, *[\w/ ]+)*):\s*(.*)$")
|
||||
|
||||
backends = [ x for x in os.listdir("backend") if x != "all"]
|
||||
|
||||
backend_aliases = {
|
||||
"amazon cloud drive" : "amazonclouddrive",
|
||||
"acd" : "amazonclouddrive",
|
||||
"google cloud storage" : "googlecloudstorage",
|
||||
"gcs" : "googlecloudstorage",
|
||||
"azblob" : "azureblob",
|
||||
"mountlib": "mount",
|
||||
"cmount": "mount",
|
||||
"mount/cmount": "mount",
|
||||
}
|
||||
|
||||
backend_titles = {
|
||||
"amazonclouddrive": "Amazon Cloud Drive",
|
||||
"googlecloudstorage": "Google Cloud Storage",
|
||||
"azureblob": "Azure Blob",
|
||||
"ftp": "FTP",
|
||||
"sftp": "SFTP",
|
||||
"http": "HTTP",
|
||||
"webdav": "WebDAV",
|
||||
}
|
||||
|
||||
STRIP_FIX_RE = re.compile(r"(\s+-)?\s+((fixes|addresses)\s+)?#\d+", flags=re.I)
|
||||
|
||||
STRIP_PATH_RE = re.compile(r"^(backend|fs)/")
|
||||
|
||||
IS_FIX_RE = re.compile(r"\b(fix|fixes)\b", flags=re.I)
|
||||
|
||||
def make_out(data, indent=""):
|
||||
"""Return a out, lines the first being a function for output into the second"""
|
||||
out_lines = []
|
||||
def out(category, title=None):
|
||||
if title == None:
|
||||
title = category
|
||||
lines = data.get(category)
|
||||
if not lines:
|
||||
return
|
||||
del(data[category])
|
||||
if indent != "" and len(lines) == 1:
|
||||
out_lines.append(indent+"* " + title+": " + lines[0])
|
||||
return
|
||||
out_lines.append(indent+"* " + title)
|
||||
for line in lines:
|
||||
out_lines.append(indent+" * " + line)
|
||||
return out, out_lines
|
||||
|
||||
|
||||
def process_log(log):
|
||||
"""Process the incoming log into a category dict of lists"""
|
||||
by_category = defaultdict(list)
|
||||
for log_line in reversed(log.split("\n")):
|
||||
log_line = log_line.strip()
|
||||
hash, author, timestamp, message = log_line.split("|", 3)
|
||||
message = message.strip()
|
||||
if IGNORE_RE.search(message):
|
||||
continue
|
||||
match = CATEGORY.search(message)
|
||||
categories = "UNKNOWN"
|
||||
if match:
|
||||
categories = match.group(1).lower()
|
||||
message = match.group(2)
|
||||
message = STRIP_FIX_RE.sub("", message)
|
||||
message = message +" ("+author+")"
|
||||
message = message[0].upper()+message[1:]
|
||||
seen = set()
|
||||
for category in categories.split(","):
|
||||
category = category.strip()
|
||||
category = STRIP_PATH_RE.sub("", category)
|
||||
category = backend_aliases.get(category, category)
|
||||
if category in seen:
|
||||
continue
|
||||
by_category[category].append(message)
|
||||
seen.add(category)
|
||||
#print category, hash, author, timestamp, message
|
||||
return by_category
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print >>sys.stderr, "Syntax: %s vX.XX vX.XY" % sys.argv[0]
|
||||
sys.exit(1)
|
||||
version, next_version = sys.argv[1], sys.argv[2]
|
||||
log = subprocess.check_output(["git", "log", '''--pretty=format:%H|%an|%aI|%s'''] + [version+".."+next_version])
|
||||
by_category = process_log(log)
|
||||
|
||||
# Output backends first so remaining in by_category are core items
|
||||
out, backend_lines = make_out(by_category)
|
||||
out("mount", title="Mount")
|
||||
out("vfs", title="VFS")
|
||||
out("local", title="Local")
|
||||
out("cache", title="Cache")
|
||||
out("crypt", title="Crypt")
|
||||
backend_names = sorted(x for x in by_category.keys() if x in backends)
|
||||
for backend_name in backend_names:
|
||||
if backend_name in backend_titles:
|
||||
backend_title = backend_titles[backend_name]
|
||||
else:
|
||||
backend_title = backend_name.title()
|
||||
out(backend_name, title=backend_title)
|
||||
|
||||
# Split remaining in by_category into new features and fixes
|
||||
new_features = defaultdict(list)
|
||||
bugfixes = defaultdict(list)
|
||||
for name, messages in by_category.iteritems():
|
||||
for message in messages:
|
||||
if IS_FIX_RE.search(message):
|
||||
bugfixes[name].append(message)
|
||||
else:
|
||||
new_features[name].append(message)
|
||||
|
||||
# Output new features
|
||||
out, new_features_lines = make_out(new_features, indent=" ")
|
||||
for name in sorted(new_features.keys()):
|
||||
out(name)
|
||||
|
||||
# Output bugfixes
|
||||
out, bugfix_lines = make_out(bugfixes, indent=" ")
|
||||
for name in sorted(bugfixes.keys()):
|
||||
out(name)
|
||||
|
||||
# Read old changlog and split
|
||||
with open("docs/content/changelog.md") as fd:
|
||||
old_changelog = fd.read()
|
||||
heading = "# Changelog"
|
||||
i = old_changelog.find(heading)
|
||||
if i < 0:
|
||||
raise AssertionError("Couldn't find heading in old changelog")
|
||||
i += len(heading)
|
||||
old_head, old_tail = old_changelog[:i], old_changelog[i:]
|
||||
|
||||
# Update the build date
|
||||
old_head = re.sub(r"\d\d\d\d-\d\d-\d\d", str(datetime.date.today()), old_head)
|
||||
|
||||
# Output combined changelog with new part
|
||||
sys.stdout.write(old_head)
|
||||
sys.stdout.write("""
|
||||
|
||||
## %s - %s
|
||||
|
||||
* New backends
|
||||
* New commands
|
||||
* New Features
|
||||
%s
|
||||
* Bug Fixes
|
||||
%s
|
||||
%s""" % (version, datetime.date.today(), "\n".join(new_features_lines), "\n".join(bugfix_lines), "\n".join(backend_lines)))
|
||||
sys.stdout.write(old_tail)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue
Block a user