blob: abebfae321a7b41b3c8d61e16cfc9fdd5f38c7a9 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
|
from requests import get, Response
from output import output_issue
from argparse import ArgumentParser
parser = ArgumentParser(prog='downloader.py')
parser.add_argument('-r', '--repository', required=True, help="Which repository to download the issues from")
args = parser.parse_args()
per_page = 100
url = f"https://api.github.com/repos/{args.repository}/issues?per_page={per_page}&state=all"
check_url = f"https://api.github.com/repos/{args.repository}"
def pages_iterator(first : Response):
current = first
while current.links.get('next'):
current.raise_for_status()
yield current
current = get(url = current.links.get('next').get('url'))
current.raise_for_status()
yield current
def main():
check = get(check_url)
check.raise_for_status()
for index, response in enumerate(pages_iterator(get(url))):
print(f"Current page: {index+1}")
data = response.json()
for i in data:
if "pull_request" in i:
continue
issue = {
"id": i['number'],
"title": i['title'],
"labels": [label['name'] for label in i['labels']],
"description": i['body'],
}
output_issue(issue)
if __name__ == "__main__":
main()
|