-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #119 from dwhswenson/release-0.5.0
Release 0.5.0
- Loading branch information
Showing
14 changed files
with
413 additions
and
18 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -27,7 +27,6 @@ jobs: | |
- "3.10" | ||
- "3.9" | ||
- "3.8" | ||
- "3.7" | ||
|
||
steps: | ||
- uses: actions/checkout@v2 | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
""" | ||
Writing release notes based on the GitHub v4 (GraphQL) API | ||
""" | ||
|
||
import typing | ||
import enum | ||
import datetime | ||
|
||
from .pull_requests import PRStatus, PR, graphql_get_all_prs | ||
from .releases import latest_release | ||
|
||
import logging | ||
_logger = logging.getLogger(__name__) | ||
|
||
def filter_release_prs(all_prs, prev_release_date, target_branch="main"): | ||
def is_release_pr(pr): | ||
return ( | ||
pr.status == PRStatus.MERGED | ||
and pr.merge_time > prev_release_date | ||
and pr.target == target_branch | ||
) | ||
|
||
for pr in all_prs: | ||
_logger.info(f"{pr}") | ||
if is_release_pr(pr): | ||
_logger.info(f"Including {pr}") | ||
yield pr | ||
else: | ||
_logger.info("Skipping") | ||
|
||
|
||
def prs_since_latest_release(owner, repo, auth, target_branch="main"): | ||
latest = latest_release(owner, repo, auth) | ||
_logger.info(f"Latest release: {latest}") | ||
|
||
release_date = latest.date | ||
all_prs = [PR.from_api_response(pr) | ||
for pr in graphql_get_all_prs(owner, repo, auth, | ||
target_branch)] | ||
|
||
_logger.info(f"Loaded {len(all_prs)} PRs") | ||
new_prs = list(filter_release_prs( | ||
all_prs=all_prs, | ||
prev_release_date=latest.date, | ||
target_branch=target_branch | ||
)) | ||
_logger.info(f"After filtering, found {len(new_prs)} new PRs") | ||
return new_prs | ||
|
||
|
||
class PRCategory: | ||
def __init__(self, label, heading, topics): | ||
self.label = label | ||
self.heading = heading | ||
self.topics = topics | ||
self.prs = [] | ||
self.topic_prs = {l: [] for l in topics} | ||
|
||
def append(self, pr): | ||
if topics := set(pr.labels) & set(self.topics): | ||
for topic in topics: | ||
self.topic_prs[topic].append(pr) | ||
else: | ||
self.prs.append(pr) | ||
|
||
|
||
class NotesWriter: | ||
def __init__(self, category_labels, topics, standard_contributors): | ||
self.category_labels = category_labels | ||
self.topics = topics | ||
self.standard_contributors = set(standard_contributors) | ||
|
||
@staticmethod | ||
def assign_prs_to_categories(prs, categories): | ||
category_labels = set(categories) | ||
for pr in prs: | ||
selected = [categories[label] | ||
for label in set(pr.labels) & category_labels] | ||
|
||
if not selected: | ||
selected = [categories[None]] | ||
|
||
for category in selected: | ||
category.append(pr) | ||
|
||
def _write_pr_details(self, pr, category_label, topic_label): | ||
out = f"[#{pr.number}]({pr.url})" | ||
if pr.author not in self.standard_contributors: | ||
out += f" @{pr.author}" | ||
|
||
out_labels = [label for label in pr.labels | ||
if label not in {category_label, topic_label}] | ||
if out_labels: | ||
out += " " | ||
out += " ".join(f"#{label}" for label in out_labels) | ||
return out | ||
|
||
def write_single_pr(self, pr, category_label): | ||
details = self._write_pr_details(pr, category_label, "") | ||
out = f"* {pr.title} ({details})\n" | ||
return out | ||
|
||
def write_topic(self, category, topic): | ||
out = "" | ||
topic_prs = category.topic_prs[topic] | ||
topic_text = category.topics[topic] | ||
if len(topic_prs): | ||
out += f"* {topic_text} (" | ||
out += ", ".join( | ||
self._write_pr_details(pr, category.label, topic) | ||
for pr in topic_prs | ||
) | ||
out += ")\n" | ||
|
||
return out | ||
|
||
def write_category(self, category): | ||
out = f"## {category.heading}\n\n" | ||
for pr in category.prs: | ||
out += self.write_single_pr(pr, category.label) | ||
|
||
for topic in category.topics: | ||
out += self.write_topic(category, topic) | ||
|
||
return out | ||
|
||
def write(self, prs): | ||
categories = { | ||
label: PRCategory(label, heading, self.topics.get(label, {})) | ||
for label, heading in self.category_labels.items() | ||
} | ||
categories[None] = PRCategory(None, "Unlabeled PRs", {}) | ||
self.assign_prs_to_categories(prs, categories) | ||
|
||
out = "\n".join(self.write_category(category) | ||
for category in categories.values()) | ||
return out |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
from .query_runner import QueryRunner | ||
|
||
import typing | ||
import enum | ||
import datetime | ||
|
||
from .utils import string_to_datetime | ||
|
||
|
||
|
||
class PRStatus(enum.Enum): | ||
OPEN = "open" | ||
CLOSED = "closed" | ||
MERGED = "merged" | ||
|
||
|
||
class PR(typing.NamedTuple): | ||
number: int | ||
target: str | ||
title: str | ||
status: PRStatus | ||
author: str | ||
labels: typing.List[str] | ||
url: str | ||
merge_time: typing.Optional[datetime.datetime] | ||
|
||
@classmethod | ||
def from_api_response(cls, api_pr): | ||
return cls( | ||
number=int(api_pr["number"]), | ||
target=api_pr["baseRefName"], | ||
title=api_pr["title"], | ||
status=getattr(PRStatus, api_pr["state"]), | ||
author=api_pr["author"]["login"], | ||
labels=[node["name"] for node in api_pr["labels"]["nodes"]], | ||
url=api_pr["url"], | ||
merge_time=string_to_datetime(api_pr["mergedAt"]), | ||
) | ||
|
||
PR_QUERY = """ | ||
{ | ||
repository(name: "$repo_name", owner: "$repo_owner") { | ||
pullRequests( | ||
orderBy: {field: UPDATED_AT, direction: DESC} | ||
first: 100 | ||
$after | ||
states: MERGED | ||
baseRefName: "$target_branch" | ||
) { | ||
nodes { | ||
author { | ||
login | ||
} | ||
merged | ||
mergedAt | ||
number | ||
title | ||
headRefName | ||
closed | ||
baseRefName | ||
state | ||
url | ||
labels(first: 100) { | ||
nodes { | ||
name | ||
} | ||
pageInfo { | ||
endCursor | ||
hasNextPage | ||
startCursor | ||
} | ||
} | ||
} | ||
pageInfo { | ||
startCursor | ||
endCursor | ||
hasNextPage | ||
hasPreviousPage | ||
} | ||
} | ||
} | ||
} | ||
""" | ||
|
||
def graphql_get_all_prs(owner, repo, auth, target_branch): | ||
# TODO: query needs to take repo and owner | ||
def extractor(result): | ||
return result["data"]["repository"]["pullRequests"]["nodes"] | ||
|
||
def next_page_cursor(result): | ||
info = result["data"]["repository"]["pullRequests"]["pageInfo"] | ||
next_cursor = info["endCursor"] if info["hasNextPage"] else None | ||
return next_cursor | ||
|
||
|
||
query_runner = QueryRunner(PR_QUERY, auth=auth, | ||
api_endpoint="https://api.github.com/graphql") | ||
extracted_results = [] | ||
|
||
# TODO: how to manage nested inner loops? | ||
# actually.. better choice is to post-process to remove inner | ||
# paginations: get additional labels for anything with more than 100 | ||
# labels | ||
default_kwargs = { | ||
'repo_owner': owner, | ||
'repo_name': repo, | ||
'target_branch': target_branch, | ||
} | ||
result = query_runner(after="", **default_kwargs) | ||
extracted_results.extend(extractor(result)) | ||
while cursor := next_page_cursor(result): | ||
result = query_runner(after=f'after: "{cursor}"', **default_kwargs) | ||
extracted_results.extend(extractor(result)) | ||
|
||
return extracted_results | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
import requests | ||
import string | ||
|
||
GITHUB_API_ENDPOINT = "https://api.github.com/graphql" | ||
|
||
class QueryRunner: | ||
def __init__(self, query_template, auth, | ||
api_endpoint=GITHUB_API_ENDPOINT): | ||
self.query_template = string.Template(query_template) | ||
self.auth = auth | ||
self.api_endpoint = api_endpoint | ||
|
||
def __call__(self, **kwargs): | ||
query = self.query_template.substitute(**kwargs) | ||
return requests.post(self.api_endpoint, json={'query': query}, | ||
auth=self.auth).json() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
from typing import NamedTuple | ||
from .query_runner import QueryRunner | ||
from .utils import string_to_datetime | ||
import datetime | ||
|
||
class Release(NamedTuple): | ||
name: str | ||
tag: str | ||
draft: bool | ||
prerelease: bool | ||
latest: bool | ||
date: datetime.datetime | ||
|
||
@classmethod | ||
def from_api(cls, api_release): | ||
return cls( | ||
name=api_release['name'], | ||
tag=api_release["tagName"], | ||
draft=api_release["isDraft"], | ||
prerelease=api_release["isPrerelease"], | ||
latest=api_release["isLatest"], | ||
date=string_to_datetime(api_release["publishedAt"]), | ||
) | ||
|
||
RELEASES_QUERY = """ | ||
{ | ||
repository(name: "$repo_name", owner: "$repo_owner") { | ||
releases(orderBy: {field: CREATED_AT, direction: DESC}, first: 100) { | ||
nodes { | ||
publishedAt | ||
isLatest | ||
isPrerelease | ||
isDraft | ||
name | ||
tagName | ||
} | ||
} | ||
} | ||
} | ||
""" | ||
|
||
def latest_release(owner, repo, auth): | ||
# TODO: support paginated releases | ||
runner = QueryRunner(RELEASES_QUERY, auth) | ||
result = runner(repo_name=repo, repo_owner=owner) | ||
api_release_info = result['data']['repository']['releases']['nodes'] | ||
releases = [Release.from_api(rel) for rel in api_release_info] | ||
claim_latest = [rel for rel in releases if rel.latest] | ||
assert len(claim_latest) == 1 | ||
return claim_latest[0] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import datetime | ||
def string_to_datetime(string): | ||
# TODO: move this elsewhere | ||
return datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ") | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.