forked from rerun-io/rerun
-
Notifications
You must be signed in to change notification settings - Fork 0
/
highlight_issues.py
executable file
·120 lines (92 loc) · 3.04 KB
/
highlight_issues.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
#!/usr/bin/env python3
"""Generate a list of GitHub issues that needs attention."""
from __future__ import annotations
import multiprocessing
import sys
import requests
from tqdm import tqdm
OWNER = "rerun-io"
REPO = "rerun"
OFFICIAL_RERUN_DEVS = [
"abey79",
"emilk",
"jleibs",
"jondo2010",
"jprochazk",
"karimo87",
"martenbjork",
"nikolausWest",
"roym899",
"teh-cmc",
"Wumpf",
]
def get_github_token() -> str:
import os
token = os.environ.get("GH_ACCESS_TOKEN", "")
if token != "":
return token
home_dir = os.path.expanduser("~")
token_file = os.path.join(home_dir, ".githubtoken")
try:
with open(token_file) as f:
token = f.read().strip()
return token
except Exception:
pass
print("ERROR: expected a GitHub token in the environment variable GH_ACCESS_TOKEN or in ~/.githubtoken")
sys.exit(1)
def fetch_issue(issue_json: dict) -> dict:
url = issue_json["url"]
gh_access_token = get_github_token()
headers = {"Authorization": f"Token {gh_access_token}"}
response = requests.get(url, headers=headers)
json = response.json()
if response.status_code != 200:
print(f"ERROR {url}: {response.status_code} - {json['message']}")
sys.exit(1)
return json
def main() -> None:
access_token = get_github_token()
headers = {"Authorization": f"Bearer {access_token}"}
all_issues = []
urls = [f"https://api.github.com/repos/{OWNER}/{REPO}/issues"]
while urls:
url = urls.pop()
print(f"Fetching {url}…")
response = requests.get(url, headers=headers)
json = response.json()
if response.status_code != 200:
print(f"ERROR {url}: {response.status_code} - {json['message']}")
sys.exit(1)
all_issues += list(json)
# Check if there is a next page:
if "Link" in response.headers:
links = response.headers["Link"].split(", ")
for link in links:
if 'rel="next"' in link:
next_url = link.split(";")[0][1:-1]
urls += [next_url]
pool = multiprocessing.Pool()
issues_list = list(
tqdm(
pool.imap(fetch_issue, all_issues),
total=len(all_issues),
desc="Fetching issue details",
)
)
issues_list.sort(key=lambda issue: issue["number"])
# Print the response content
for issue in issues_list:
author = issue["user"]["login"]
html_url = issue["html_url"]
comments = issue["comments"]
state = issue["state"]
labels = [label["name"] for label in issue["labels"]]
if "👀 needs triage" in labels:
print(f"{html_url} by {author} needs triage")
elif len(labels) == 0:
print(f"{html_url} by {author} has no labels")
elif comments == 0 and state == "open" and author not in OFFICIAL_RERUN_DEVS:
print(f"{html_url} by {author} has {comments} comments")
if __name__ == "__main__":
main()