forked from enarx-archive/bot
-
Notifications
You must be signed in to change notification settings - Fork 0
/
bot.py
209 lines (185 loc) · 7.17 KB
/
bot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
# SPDX-License-Identifier: Apache-2.0
import requests
import json
import os
import types
PROJECTS = {
'Conferences': 'MDc6UHJvamVjdDQ5OTI0MzM=',
'Planning': 'MDc6UHJvamVjdDQ5NjA4NDg=',
'Sprint': 'MDc6UHJvamVjdDQ4MjA5OTM='
}
COLUMNS = {
'Conferences': {
'Accepted': 'MDEzOlByb2plY3RDb2x1bW4xMDA1NzA1Ng==',
'Completed': 'MDEzOlByb2plY3RDb2x1bW4xMDA1NzA1OQ==',
'Considering': 'MDEzOlByb2plY3RDb2x1bW4xMDA1NzA0OQ==',
'Delivered': 'MDEzOlByb2plY3RDb2x1bW4xMDA1NzA1Nw==',
'Submitted': 'MDEzOlByb2plY3RDb2x1bW4xMDA1NzA1Mw=='
},
'Planning': {
'Accepted': 'MDEzOlByb2plY3RDb2x1bW4xMDAwMzg0OQ==',
'Assigned': 'MDEzOlByb2plY3RDb2x1bW4xMDAwODk1MQ==',
'Backlog': 'MDEzOlByb2plY3RDb2x1bW4xMDAwMzg0Nw==',
'Done': 'MDEzOlByb2plY3RDb2x1bW4xMDAwOTI3OA==',
'Nominated': 'MDEzOlByb2plY3RDb2x1bW4xMDAwMzg0OA==',
'Triage': 'MDEzOlByb2plY3RDb2x1bW4xMDAwMzg0MA=='
},
'Sprint': {
'Active': 'MDEzOlByb2plY3RDb2x1bW4xMDQxMTcwOA==',
'Assigned': 'MDEzOlByb2plY3RDb2x1bW45ODA1MjQ5',
'Done': 'MDEzOlByb2plY3RDb2x1bW45ODA0Mzc2',
'Reviewed': 'MDEzOlByb2plY3RDb2x1bW4xMDQxMTcyMg==',
'Reviewing': 'MDEzOlByb2plY3RDb2x1bW45ODA1MjY1'
}
}
class HTTPError(Exception):
def __init__(self, reply):
self.reply = reply
class GraphQLError(Exception):
def __init__(self, errors):
self.errors = errors
class TokenError(Exception):
def __init__(self, error):
self.error = error
# A multiple, nested depagination example: fetch all issues, PRs, and PR
# timeline items in enarx/enarx.
#
# query = """
# query($owner:String!, $name:String!, $cursor1:String, $cursor2:String, $cursor3:String) {
# repository(owner:$owner, name:$name) {
# issues(first:100, after:$cursor1) {
# pageInfo { endCursor hasNextPage }
# nodes {
# number
# }
# }
# pullRequests(first:100, after:$cursor2) {
# pageInfo { endCursor hasNextPage }
# nodes {
# timelineItems(first:100, after:$cursor3) {
# pageInfo { endCursor hasNextPage }
# nodes {
# __typename
# }
# }
# }
# }
# }
# }
# """
#
# cursors = {
# 'cursor1': {
# 'path': ["repository", "issues"],
# },
# 'cursor2': {
# 'path': ["repository", "pullRequests"],
# 'next': {
# 'cursor3': {
# 'path': ["timelineItems"],
# }
# }
# }
# }
#
# data = graphql(query, cursors=cursors, owner="enarx", name="enarx")
#
# Your query:
# * MUST have a `$cursor:String` variable (it MUST NOT be required!)
# * MUST specify `after: $cursor` correctly
# * MUST fetch `pageInfo { endCursor hasNextPage }`
# * MUST have a `nodes` entity on the pagination object
# * SHOULD fetch as many objects as you can (i.e. `first: 100`)
#
# The results of depagination are merged. Therefore, you receive one big output list.
# Similarly, the `pageInfo` object is removed from the result.
def graphql(query, cursors=None, prev_path=None, **kwargs):
"Perform a GraphQL query."
url = os.environ.get("GITHUB_GRAPHQL_URL", "https://api.github.com/graphql")
params = { "query": query.strip(), "variables": json.dumps(kwargs) }
token = os.environ.get('BOT_TOKEN', None)
headers = {}
if token is not None and len(token) > 0:
headers["Authorization"] = f"token {token}"
else:
raise TokenError(error="""
BOT_TOKEN is unset. If you wish to opt in to bot automation, provide an
appropriately-scoped personal access token as a shared secret named
BOT_TOKEN.
""")
# Opt into preview API fields for PR merge status.
headers["Accept"] = "application/vnd.github.merge-info-preview+json"
# Do the request and check for HTTP errors.
reply = requests.post(url, json=params, headers=headers)
if reply.status_code != 200:
raise HTTPError(reply)
# Check for GraphQL errors.
data = reply.json()
if "errors" in data:
raise GraphQLError(data["errors"])
data = data["data"]
# Do depagination.
if cursors is None:
return data
for cursor in cursors.keys():
# Cursors can simply be path lists. If they are, convert to dict.
if isinstance(cursors[cursor], list):
cursors[cursor] = {
"path": cursors[cursor]
}
current_path = cursors[cursor]['path']
if prev_path:
current_path = prev_path + current_path
obj = data
for name in current_path:
obj = obj[name]
pi = obj.pop("pageInfo")
if pi["hasNextPage"]:
kwargs[cursor] = pi["endCursor"]
next = graphql(query, cursors={cursor:cursors[cursor]}, prev_path=prev_path, **kwargs)
for name in current_path:
next = next[name]
obj["nodes"].extend(next["nodes"])
# If there are nested cursors, depaginate them too.
if cursors[cursor].get('next') is not None and not pi["hasNextPage"]:
for i in range(len(obj["nodes"])):
if not prev_path:
current_path_nodes = current_path.copy()
else:
current_path_nodes = prev_path.copy()
current_path_nodes += ["nodes", i]
# First, check if another recursive call is necessary to
# fully depaginate.
# This happens when
# 1) a nested cursor has further nested cursors
# 2) a nested cursor has more than one page
node_data = data
for name in current_path_nodes:
node_data = node_data[name]
call_required = False
for next_cursor in cursors[cursor]['next']:
if isinstance(cursors[cursor]['next'][next_cursor], list):
cursors[cursor]['next'][next_cursor] = {
"path": cursors[cursor]['next'][next_cursor]
}
page_to_check = node_data
for name in cursors[cursor]['next'][next_cursor]['path']:
page_to_check = page_to_check[name]
hasNextPage = page_to_check['pageInfo']['hasNextPage']
nextNextCursor = cursors[cursor]['next'][next_cursor].get('next')
if hasNextPage or nextNextCursor is not None:
call_required = True
if not call_required:
continue
# If another call is required, make it.
next = graphql(query, cursors=cursors[cursor]['next'], prev_path=current_path_nodes, **kwargs)
# Weld the depaginated data together.
for next_cursor in cursors[cursor]['next']:
join_path = current_path_nodes + cursors[cursor]['next'][next_cursor]['path']
obj_nested = data
next_nested = next
for name in join_path:
obj_nested = obj_nested[name]
next_nested = next_nested[name]
obj_nested["nodes"] = next_nested["nodes"]
return data