forked from dslomov/bazel-gardening-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
github.py
executable file
·136 lines (114 loc) · 3.7 KB
/
github.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
"""Methods for talking to the github API."""
import datetime
import json
import urllib.request
import ssl
import database
import reports
ssl._create_default_https_context = ssl._create_unverified_context
secrets = json.load(open("secrets.json"))
client_id = secrets["client_id"]
client_secret = secrets["client_secret"]
GITHUB_API_URL_BASE = 'https://api.github.com/repos/'
_DEBUG = False
def add_client_secret(url):
sep = '&'
if not '?' in url:
sep = '?'
return url + sep + 'client_id=' + client_id + '&client_secret=' + client_secret
def get_next_url(response):
if "Link" not in response:
return None
link_header = response["Link"]
links = link_header.split(",")
for link_segment in links:
if link_segment.find("rel=\"next\"") == -1:
continue
link = link_segment[link_segment.index('<') +
1:link_segment.index('>')]
return link
return None
def fetch_issues(repo, query, modified_after=None, verbose=False):
"""Fetches issues from a repo.
Args:
repo: (str) '<organization>/<repo>'
query: (str) optional query
modified_after: (float) only fetch issues modified after this (UTC) time.
"""
query_args = [
'state=all', # needed to get closed issues
'per_page=100',
]
if query:
query_args.append(query)
if modified_after:
utc_time_s = datetime.datetime.utcfromtimestamp(
modified_after).strftime('%Y-%m-%dT%H:%M:%SZ')
query_args.append('since=%s' % utc_time_s)
if verbose:
print('Fetching issues changed since: %s' % utc_time_s)
url = GITHUB_API_URL_BASE + repo + '/issues?' + '&'.join(query_args)
result = dict()
i = 0
while url:
if _DEBUG:
print(url)
response = urllib.request.urlopen(add_client_secret(url))
issues = json.loads(response.read())
for issue in issues:
result[issue["number"]] = issue
url = get_next_url(response.info())
if verbose or _DEBUG:
print('%d issues' % len(result))
return list(result.values())
def _fetch_all_from_repo(repo, resource):
ret = []
url = GITHUB_API_URL_BASE + repo + '/' + resource
while url:
if _DEBUG:
print(url)
response = urllib.request.urlopen(add_client_secret(url))
more_data = json.loads(response.read())
ret += more_data
url = get_next_url(response.info())
return ret
def fetch_labels(repo):
return _fetch_all_from_repo(repo, 'labels')
def fetch_releases(repo):
return _fetch_all_from_repo(repo, 'releases')
def fetch_repos(org):
url = 'https://api.github.com/orgs/' + org + '/repos'
ret = []
while url:
if _DEBUG:
print(url)
response = urllib.request.urlopen(add_client_secret(url))
more_repos = json.loads(response.read())
for repo in more_repos:
ret.append(repo['full_name'])
url = get_next_url(response.info())
return ret
def fetch_pr_reviews(repo, pr_number):
url = GITHUB_API_URL_BASE + repo + '/pulls/%d/reviews' % pr_number
ret = []
while url:
if _DEBUG:
print(url)
response = urllib.request.urlopen(add_client_secret(url))
more_data = json.loads(response.read())
ret += more_data
url = get_next_url(response.info())
return ret
def delete_pr_review(repo, pr_number, review_id):
url = GITHUB_API_URL_BASE + repo + '/pulls/%d/reviews/%d' % (pr_number, review_id)
req = urllib.request.Request(
url=add_client_secret(url),
method='DELETE',
headers = {'Authorization': 'Token ghp_XXXXXX'}
)
print(url, str(req))
try:
response = urllib.request.urlopen(req)
print(response)
except Exception as e:
print(e)