]> jfr.im git - yt-dlp.git/blame - devscripts/make_changelog.py
Add option `--netrc-cmd` (#6682)
[yt-dlp.git] / devscripts / make_changelog.py
CommitLineData
d400e261
SS
1from __future__ import annotations
2
392389b7 3# Allow direct execution
4import os
5import sys
6
7sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
8
d400e261
SS
9import enum
10import itertools
11import json
12import logging
13import re
d400e261
SS
14from collections import defaultdict
15from dataclasses import dataclass
16from functools import lru_cache
17from pathlib import Path
18
392389b7 19from devscripts.utils import read_file, run_process, write_file
20
d400e261
SS
21BASE_URL = 'https://github.com'
22LOCATION_PATH = Path(__file__).parent
392389b7 23HASH_LENGTH = 7
d400e261
SS
24
25logger = logging.getLogger(__name__)
26
27
28class CommitGroup(enum.Enum):
d400e261
SS
29 PRIORITY = 'Important'
30 CORE = 'Core'
31 EXTRACTOR = 'Extractor'
32 DOWNLOADER = 'Downloader'
33 POSTPROCESSOR = 'Postprocessor'
34 MISC = 'Misc.'
35
23c39a4b
SS
36 @classmethod
37 @property
38 def ignorable_prefixes(cls):
39 return ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream')
40
d400e261
SS
41 @classmethod
42 @lru_cache
43 def commit_lookup(cls):
44 return {
45 name: group
46 for group, names in {
47 cls.PRIORITY: {''},
d400e261
SS
48 cls.CORE: {
49 'aes',
50 'cache',
51 'compat_utils',
52 'compat',
53 'cookies',
54 'core',
55 'dependencies',
56 'jsinterp',
57 'outtmpl',
58 'plugins',
59 'update',
23c39a4b 60 'upstream',
d400e261
SS
61 'utils',
62 },
63 cls.MISC: {
64 'build',
65 'cleanup',
66 'devscripts',
67 'docs',
68 'misc',
69 'test',
70 },
71 cls.EXTRACTOR: {'extractor', 'extractors'},
72 cls.DOWNLOADER: {'downloader'},
73 cls.POSTPROCESSOR: {'postprocessor'},
74 }.items()
75 for name in names
76 }
77
78 @classmethod
79 def get(cls, value):
80 result = cls.commit_lookup().get(value)
81 if result:
82 logger.debug(f'Mapped {value!r} => {result.name}')
83 return result
84
85
86@dataclass
87class Commit:
88 hash: str | None
89 short: str
90 authors: list[str]
91
92 def __str__(self):
93 result = f'{self.short!r}'
94
95 if self.hash:
392389b7 96 result += f' ({self.hash[:HASH_LENGTH]})'
d400e261
SS
97
98 if self.authors:
99 authors = ', '.join(self.authors)
100 result += f' by {authors}'
101
102 return result
103
104
105@dataclass
106class CommitInfo:
107 details: str | None
108 sub_details: tuple[str, ...]
109 message: str
110 issues: list[str]
111 commit: Commit
112 fixes: list[Commit]
113
114 def key(self):
115 return ((self.details or '').lower(), self.sub_details, self.message)
116
117
23c39a4b
SS
118def unique(items):
119 return sorted({item.strip().lower(): item for item in items if item}.values())
120
121
d400e261
SS
122class Changelog:
123 MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE)
23c39a4b 124 ALWAYS_SHOWN = (CommitGroup.PRIORITY,)
d400e261 125
23c39a4b 126 def __init__(self, groups, repo, collapsible=False):
d400e261
SS
127 self._groups = groups
128 self._repo = repo
23c39a4b 129 self._collapsible = collapsible
d400e261
SS
130
131 def __str__(self):
132 return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ')
133
134 def _format_groups(self, groups):
23c39a4b 135 first = True
d400e261 136 for item in CommitGroup:
23c39a4b
SS
137 if self._collapsible and item not in self.ALWAYS_SHOWN and first:
138 first = False
139 yield '\n<details><summary><h3>Changelog</h3></summary>\n'
140
d400e261
SS
141 group = groups[item]
142 if group:
143 yield self.format_module(item.value, group)
144
23c39a4b
SS
145 if self._collapsible:
146 yield '\n</details>'
147
d400e261
SS
148 def format_module(self, name, group):
149 result = f'\n#### {name} changes\n' if name else '\n'
150 return result + '\n'.join(self._format_group(group))
151
152 def _format_group(self, group):
153 sorted_group = sorted(group, key=CommitInfo.key)
154 detail_groups = itertools.groupby(sorted_group, lambda item: (item.details or '').lower())
93449642
SS
155 for _, items in detail_groups:
156 items = list(items)
157 details = items[0].details
d400e261
SS
158
159 if details == 'cleanup':
23c39a4b
SS
160 items = self._prepare_cleanup_misc_items(items)
161
162 prefix = '-'
163 if details:
164 if len(items) == 1:
165 prefix = f'- **{details}**:'
166 else:
167 yield f'- **{details}**'
168 prefix = '\t-'
d400e261 169
93449642 170 sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details)))
d400e261
SS
171 for sub_details, entries in sub_detail_groups:
172 if not sub_details:
173 for entry in entries:
23c39a4b 174 yield f'{prefix} {self.format_single_change(entry)}'
d400e261
SS
175 continue
176
d400e261 177 entries = list(entries)
23c39a4b 178 sub_prefix = f'{prefix} {", ".join(entries[0].sub_details)}'
d400e261 179 if len(entries) == 1:
23c39a4b 180 yield f'{sub_prefix}: {self.format_single_change(entries[0])}'
d400e261
SS
181 continue
182
23c39a4b 183 yield sub_prefix
d400e261 184 for entry in entries:
23c39a4b 185 yield f'\t{prefix} {self.format_single_change(entry)}'
d400e261 186
23c39a4b 187 def _prepare_cleanup_misc_items(self, items):
d400e261 188 cleanup_misc_items = defaultdict(list)
23c39a4b 189 sorted_items = []
d400e261
SS
190 for item in items:
191 if self.MISC_RE.search(item.message):
192 cleanup_misc_items[tuple(item.commit.authors)].append(item)
193 else:
23c39a4b 194 sorted_items.append(item)
d400e261 195
23c39a4b
SS
196 for commit_infos in cleanup_misc_items.values():
197 sorted_items.append(CommitInfo(
198 'cleanup', ('Miscellaneous',), ', '.join(
199 self._format_message_link(None, info.commit.hash)
200 for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
201 [], Commit(None, '', commit_infos[0].commit.authors), []))
d400e261 202
23c39a4b 203 return sorted_items
d400e261
SS
204
205 def format_single_change(self, info):
206 message = self._format_message_link(info.message, info.commit.hash)
207 if info.issues:
208 message = f'{message} ({self._format_issues(info.issues)})'
209
210 if info.commit.authors:
211 message = f'{message} by {self._format_authors(info.commit.authors)}'
212
213 if info.fixes:
214 fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
215
216 authors = sorted({author for fix in info.fixes for author in fix.authors}, key=str.casefold)
217 if authors != info.commit.authors:
218 fix_message = f'{fix_message} by {self._format_authors(authors)}'
219
220 message = f'{message} (With fixes in {fix_message})'
221
222 return message
223
224 def _format_message_link(self, message, hash):
225 assert message or hash, 'Improperly defined commit message or override'
392389b7 226 message = message if message else hash[:HASH_LENGTH]
d400e261
SS
227 return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
228
229 def _format_issues(self, issues):
230 return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
231
232 @staticmethod
233 def _format_authors(authors):
234 return ', '.join(f'[{author}]({BASE_URL}/{author})' for author in authors)
235
236 @property
237 def repo_url(self):
238 return f'{BASE_URL}/{self._repo}'
239
240
241class CommitRange:
242 COMMAND = 'git'
243 COMMIT_SEPARATOR = '-----'
244
245 AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE)
246 MESSAGE_RE = re.compile(r'''
23c39a4b
SS
247 (?:\[(?P<prefix>[^\]]+)\]\ )?
248 (?:(?P<sub_details>`?[^:`]+`?): )?
d400e261
SS
249 (?P<message>.+?)
250 (?:\ \((?P<issues>\#\d+(?:,\ \#\d+)*)\))?
251 ''', re.VERBOSE | re.DOTALL)
252 EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
93449642 253 FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
d400e261
SS
254 UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
255
392389b7 256 def __init__(self, start, end, default_author=None):
257 self._start, self._end = start, end
d400e261
SS
258 self._commits, self._fixes = self._get_commits_and_fixes(default_author)
259 self._commits_added = []
260
d400e261
SS
261 def __iter__(self):
262 return iter(itertools.chain(self._commits.values(), self._commits_added))
263
264 def __len__(self):
265 return len(self._commits) + len(self._commits_added)
266
267 def __contains__(self, commit):
268 if isinstance(commit, Commit):
269 if not commit.hash:
270 return False
271 commit = commit.hash
272
273 return commit in self._commits
274
d400e261 275 def _get_commits_and_fixes(self, default_author):
392389b7 276 result = run_process(
d400e261 277 self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
392389b7 278 f'{self._start}..{self._end}' if self._start else self._end).stdout
d400e261
SS
279
280 commits = {}
281 fixes = defaultdict(list)
282 lines = iter(result.splitlines(False))
7accdd98 283 for i, commit_hash in enumerate(lines):
d400e261
SS
284 short = next(lines)
285 skip = short.startswith('Release ') or short == '[version] update'
286
287 authors = [default_author] if default_author else []
288 for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
289 match = self.AUTHOR_INDICATOR_RE.match(line)
290 if match:
291 authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
292
293 commit = Commit(commit_hash, short, authors)
7accdd98 294 if skip and (self._start or not i):
d400e261
SS
295 logger.debug(f'Skipped commit: {commit}')
296 continue
7accdd98 297 elif skip:
298 logger.debug(f'Reached Release commit, breaking: {commit}')
299 break
d400e261
SS
300
301 fix_match = self.FIXES_RE.search(commit.short)
302 if fix_match:
303 commitish = fix_match.group(1)
304 fixes[commitish].append(commit)
305
306 commits[commit.hash] = commit
307
308 for commitish, fix_commits in fixes.items():
309 if commitish in commits:
392389b7 310 hashes = ', '.join(commit.hash[:HASH_LENGTH] for commit in fix_commits)
311 logger.info(f'Found fix(es) for {commitish[:HASH_LENGTH]}: {hashes}')
d400e261
SS
312 for fix_commit in fix_commits:
313 del commits[fix_commit.hash]
314 else:
392389b7 315 logger.debug(f'Commit with fixes not in changes: {commitish[:HASH_LENGTH]}')
d400e261
SS
316
317 return commits, fixes
318
319 def apply_overrides(self, overrides):
320 for override in overrides:
321 when = override.get('when')
322 if when and when not in self and when != self._start:
323 logger.debug(f'Ignored {when!r}, not in commits {self._start!r}')
324 continue
325
326 override_hash = override.get('hash')
327 if override['action'] == 'add':
328 commit = Commit(override.get('hash'), override['short'], override.get('authors') or [])
329 logger.info(f'ADD {commit}')
330 self._commits_added.append(commit)
331
332 elif override['action'] == 'remove':
333 if override_hash in self._commits:
334 logger.info(f'REMOVE {self._commits[override_hash]}')
335 del self._commits[override_hash]
336
337 elif override['action'] == 'change':
338 if override_hash not in self._commits:
339 continue
340 commit = Commit(override_hash, override['short'], override['authors'])
341 logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
342 self._commits[commit.hash] = commit
343
344 self._commits = {key: value for key, value in reversed(self._commits.items())}
345
346 def groups(self):
23c39a4b 347 group_dict = defaultdict(list)
d400e261 348 for commit in self:
23c39a4b 349 upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
d400e261 350 if upstream_re:
23c39a4b 351 commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}'
d400e261
SS
352
353 match = self.MESSAGE_RE.fullmatch(commit.short)
354 if not match:
355 logger.error(f'Error parsing short commit message: {commit.short!r}')
356 continue
357
23c39a4b
SS
358 prefix, sub_details_alt, message, issues = match.groups()
359 issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
d400e261 360
23c39a4b
SS
361 if prefix:
362 groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
363 group = next(iter(filter(None, groups)), None)
364 details = ', '.join(unique(details))
365 sub_details = list(itertools.chain.from_iterable(sub_details))
d400e261
SS
366 else:
367 group = CommitGroup.CORE
23c39a4b
SS
368 details = None
369 sub_details = []
d400e261 370
23c39a4b
SS
371 if sub_details_alt:
372 sub_details.append(sub_details_alt)
373 sub_details = tuple(unique(sub_details))
d400e261
SS
374
375 if not group:
23c39a4b
SS
376 if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
377 group = CommitGroup.EXTRACTOR
378 else:
379 group = CommitGroup.POSTPROCESSOR
380 logger.warning(f'Failed to map {commit.short!r}, selected {group.name.lower()}')
d400e261
SS
381
382 commit_info = CommitInfo(
383 details, sub_details, message.strip(),
384 issues, commit, self._fixes[commit.hash])
23c39a4b 385
d400e261 386 logger.debug(f'Resolved {commit.short!r} to {commit_info!r}')
23c39a4b
SS
387 group_dict[group].append(commit_info)
388
389 return group_dict
390
391 @staticmethod
392 def details_from_prefix(prefix):
393 if not prefix:
394 return CommitGroup.CORE, None, ()
d400e261 395
23c39a4b
SS
396 prefix, _, details = prefix.partition('/')
397 prefix = prefix.strip().lower()
398 details = details.strip()
399
400 group = CommitGroup.get(prefix)
401 if group is CommitGroup.PRIORITY:
402 prefix, _, details = details.partition('/')
403
404 if not details and prefix and prefix not in CommitGroup.ignorable_prefixes:
405 logger.debug(f'Replaced details with {prefix!r}')
406 details = prefix or None
407
408 if details == 'common':
409 details = None
410
411 if details:
412 details, *sub_details = details.split(':')
413 else:
414 sub_details = []
415
416 return group, details, sub_details
d400e261
SS
417
418
419def get_new_contributors(contributors_path, commits):
420 contributors = set()
421 if contributors_path.exists():
392389b7 422 for line in read_file(contributors_path).splitlines():
423 author, _, _ = line.strip().partition(' (')
424 authors = author.split('/')
425 contributors.update(map(str.casefold, authors))
d400e261
SS
426
427 new_contributors = set()
428 for commit in commits:
429 for author in commit.authors:
430 author_folded = author.casefold()
431 if author_folded not in contributors:
432 contributors.add(author_folded)
433 new_contributors.add(author)
434
435 return sorted(new_contributors, key=str.casefold)
436
437
438if __name__ == '__main__':
439 import argparse
440
441 parser = argparse.ArgumentParser(
442 description='Create a changelog markdown from a git commit range')
443 parser.add_argument(
444 'commitish', default='HEAD', nargs='?',
445 help='The commitish to create the range from (default: %(default)s)')
446 parser.add_argument(
447 '-v', '--verbosity', action='count', default=0,
448 help='increase verbosity (can be used twice)')
449 parser.add_argument(
450 '-c', '--contributors', action='store_true',
451 help='update CONTRIBUTORS file (default: %(default)s)')
452 parser.add_argument(
453 '--contributors-path', type=Path, default=LOCATION_PATH.parent / 'CONTRIBUTORS',
454 help='path to the CONTRIBUTORS file')
455 parser.add_argument(
456 '--no-override', action='store_true',
457 help='skip override json in commit generation (default: %(default)s)')
458 parser.add_argument(
459 '--override-path', type=Path, default=LOCATION_PATH / 'changelog_override.json',
460 help='path to the changelog_override.json file')
461 parser.add_argument(
462 '--default-author', default='pukkandan',
463 help='the author to use without a author indicator (default: %(default)s)')
464 parser.add_argument(
465 '--repo', default='yt-dlp/yt-dlp',
466 help='the github repository to use for the operations (default: %(default)s)')
23c39a4b
SS
467 parser.add_argument(
468 '--collapsible', action='store_true',
469 help='make changelog collapsible (default: %(default)s)')
d400e261
SS
470 args = parser.parse_args()
471
472 logging.basicConfig(
473 datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
474 level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
475
7accdd98 476 commits = CommitRange(None, args.commitish, args.default_author)
d400e261
SS
477
478 if not args.no_override:
479 if args.override_path.exists():
392389b7 480 overrides = json.loads(read_file(args.override_path))
d400e261
SS
481 commits.apply_overrides(overrides)
482 else:
483 logger.warning(f'File {args.override_path.as_posix()} does not exist')
484
485 logger.info(f'Loaded {len(commits)} commits')
486
487 new_contributors = get_new_contributors(args.contributors_path, commits)
488 if new_contributors:
489 if args.contributors:
392389b7 490 write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
d400e261
SS
491 logger.info(f'New contributors: {", ".join(new_contributors)}')
492
23c39a4b 493 print(Changelog(commits.groups(), args.repo, args.collapsible))