-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgithub_client.py
616 lines (499 loc) · 25.6 KB
/
github_client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
import os
import requests
from datetime import datetime, timedelta
import logging
from utils import validate_token
# Set up logging
logger = logging.getLogger(__name__)
class GitHubClient:
"""
GitHub API client.
"""
def __init__(self, config):
self.config = config
self.token = os.getenv("GITHUB_TOKEN")
validate_token("GITHUB_TOKEN", self.token)
self.owner = config["github"]["owner"]
self.repo = config["github"]["repo"]
self.api_url = config["github"]["api_url"]
self.headers = {
"Authorization": f"token {self.token}",
"Accept": "application/vnd.github.v3+json"
}
logger.info(f"Initialized GitHub client for {self.owner}/{self.repo}")
def get_repository_traffic(self):
"""
Get repository traffic data.
"""
url = f"{self.api_url}/repos/{self.owner}/{self.repo}/traffic/views"
logger.info(f"Fetching repository traffic data from {url}")
response = requests.get(url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch traffic data: {response.json()}")
data = response.json()
# Extract per-day breakdown
daily_entries = []
for entry in data.get("views", []):
date = entry["timestamp"].split("T")[0]
count = entry["count"]
uniques = entry["uniques"]
daily_entries.append({
"date": date,
"total_views": count,
"unique_visitors": uniques
})
logger.info(f"Retrieved {len(daily_entries)} days of traffic data")
return daily_entries
def get_repository_stars(self):
"""
Get repository star count and ranking.
"""
# Get star count
url = f"{self.api_url}/repos/{self.owner}/{self.repo}"
logger.info(f"Fetching repository star count from {url}")
response = requests.get(url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch repository data: {response.json()}")
data = response.json()
star_count = data.get("stargazers_count", 0)
# Get ranking by searching for repositories with more stars
search_url = f"{self.api_url}/search/repositories?q=stars:>{star_count}&sort=stars&order=asc"
logger.info(f"Fetching star ranking from {search_url}")
search_response = requests.get(search_url, headers=self.headers)
if search_response.status_code != 200:
raise Exception(f"Failed to fetch star ranking: {search_response.json()}")
search_data = search_response.json()
ranking = search_data.get("total_count", 0) + 1 # Add 1 for this repository
return {
"date": datetime.now().strftime("%Y-%m-%d"),
"star_count": star_count,
"ranking": ranking
}
def get_new_contributors(self, days_back=30):
"""
Get first-time contributors who submitted PRs in the last N days.
"""
# Calculate date range
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
# Format date for GitHub API query
start_date_formatted = start_date.strftime("%Y-%m-%d")
# Search for PRs created in the date range
search_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:pr+created:>={start_date_formatted}"
logger.info(f"Searching for PRs created since {start_date_str}")
response = requests.get(search_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to search for PRs: {response.json()}")
data = response.json()
prs = data.get("items", [])
# Process each PR to find first-time contributors
new_contributors = []
for pr in prs:
pr_number = pr.get("number")
pr_url = pr.get("html_url")
pr_title = pr.get("title")
pr_created_at = pr.get("created_at")
user = pr.get("user", {})
username = user.get("login")
# Skip if no username
if not username:
continue
# Check if this is the user's first PR
user_prs_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:pr+author:{username}"
user_prs_response = requests.get(user_prs_url, headers=self.headers)
if user_prs_response.status_code != 200:
logger.warning(f"Failed to check PR history for {username}: {user_prs_response.json()}")
continue
user_prs_data = user_prs_response.json()
user_pr_count = user_prs_data.get("total_count", 0)
# If this is their first PR, add to the list
if user_pr_count == 1:
new_contributors.append({
"username": username,
"pr_number": pr_number,
"pr_url": pr_url,
"pr_title": pr_title,
"created_at": pr_created_at,
"date": pr_created_at.split("T")[0] if pr_created_at else None
})
logger.info(f"Found {len(new_contributors)} new contributors")
return new_contributors
def get_new_issue_creators(self, days_back=30):
"""
Get first-time issue creators in the last N days.
"""
# Calculate date range
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
# Format dates for GitHub API query
start_date_str = start_date.strftime("%Y-%m-%d")
# Search for issues created in the date range
search_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:issue+created:>={start_date_str}"
logger.info(f"Searching for issues created since {start_date_str}")
response = requests.get(search_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to search for issues: {response.json()}")
data = response.json()
issues = data.get("items", [])
# Process each issue to find first-time creators
new_issue_creators = []
for issue in issues:
issue_number = issue.get("number")
issue_url = issue.get("html_url")
issue_title = issue.get("title")
issue_created_at = issue.get("created_at")
user = issue.get("user", {})
username = user.get("login")
# Skip if no username
if not username:
continue
# Check if this is the user's first issue
user_issues_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:issue+author:{username}"
user_issues_response = requests.get(user_issues_url, headers=self.headers)
if user_issues_response.status_code != 200:
logger.warning(f"Failed to check issue history for {username}: {user_issues_response.json()}")
continue
user_issues_data = user_issues_response.json()
user_issue_count = user_issues_data.get("total_count", 0)
# If this is their first issue, add to the list
if user_issue_count == 1:
new_issue_creators.append({
"username": username,
"issue_number": issue_number,
"issue_url": issue_url,
"issue_title": issue_title,
"created_at": issue_created_at,
"date": issue_created_at.split("T")[0] if issue_created_at else None
})
logger.info(f"Found {len(new_issue_creators)} new issue creators")
return new_issue_creators
def get_repository_activity(self):
"""
Get repository activity metrics (open PRs, open issues, open discussions).
"""
# Get open PRs count
prs_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:pr+is:open"
logger.info(f"Fetching open PRs count from {prs_url}")
prs_response = requests.get(prs_url, headers=self.headers)
if prs_response.status_code != 200:
raise Exception(f"Failed to fetch open PRs count: {prs_response.json()}")
prs_data = prs_response.json()
open_prs_count = prs_data.get("total_count", 0)
# Get open issues count
issues_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:issue+is:open"
logger.info(f"Fetching open issues count from {issues_url}")
issues_response = requests.get(issues_url, headers=self.headers)
if issues_response.status_code != 200:
raise Exception(f"Failed to fetch open issues count: {issues_response.json()}")
issues_data = issues_response.json()
open_issues_count = issues_data.get("total_count", 0)
# Get open discussions count (if available)
discussions_count = 0
try:
discussions_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/discussions"
discussions_headers = self.headers.copy()
discussions_headers["Accept"] = "application/vnd.github.discussions-preview+json"
logger.info(f"Fetching open discussions count from {discussions_url}")
discussions_response = requests.get(discussions_url, headers=discussions_headers)
if discussions_response.status_code == 200:
discussions_data = discussions_response.json()
discussions_count = len([d for d in discussions_data if not d.get("closed_at")])
else:
logger.warning(f"Failed to fetch discussions: {discussions_response.status_code}")
except Exception as e:
logger.warning(f"Error fetching discussions: {e}")
return {
"date": datetime.now().strftime("%Y-%m-%d"),
"open_prs": open_prs_count,
"open_issues": open_issues_count,
"open_discussions": discussions_count
}
def get_repository_events(self, days_back=1):
"""
Get all GitHub events for the repository in the last N days.
"""
# Calculate date range
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
# Get events
events_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/events"
logger.info(f"Fetching repository events from {events_url}")
events = []
page = 1
per_page = 100
while True:
paginated_url = f"{events_url}?page={page}&per_page={per_page}"
response = requests.get(paginated_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch events: {response.json()}")
page_events = response.json()
if not page_events:
break
# Filter events by date
for event in page_events:
created_at = event.get("created_at")
if not created_at:
continue
event_date = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
if event_date < start_date:
break
# Extract relevant event data
event_data = {
"id": event.get("id"),
"type": event.get("type"),
"created_at": created_at,
"date": created_at.split("T")[0] if created_at else None,
"actor": event.get("actor", {}).get("login"),
"repo": event.get("repo", {}).get("name")
}
# Add payload data based on event type
payload = event.get("payload", {})
if event.get("type") == "PushEvent":
event_data["commits"] = len(payload.get("commits", []))
event_data["ref"] = payload.get("ref")
elif event.get("type") in ["PullRequestEvent", "IssuesEvent"]:
event_data["action"] = payload.get("action")
event_data["number"] = payload.get("number") or payload.get("issue", {}).get("number") or payload.get("pull_request", {}).get("number")
events.append(event_data)
# Check if we've reached the end of the events or the date range
if len(page_events) < per_page or event_date < start_date:
break
page += 1
logger.info(f"Retrieved {len(events)} events")
return events
def get_open_issues_analysis(self):
"""
Get detailed statistics about open issues.
"""
# Search for open issues
issues_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:issue+is:open&sort=created&order=desc&per_page=100"
logger.info(f"Fetching open issues from {issues_url}")
response = requests.get(issues_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch open issues: {response.json()}")
data = response.json()
issues = data.get("items", [])
# Process each issue to extract detailed statistics
issues_analysis = []
for issue in issues:
issue_number = issue.get("number")
issue_url = issue.get("html_url")
issue_title = issue.get("title")
issue_created_at = issue.get("created_at")
issue_updated_at = issue.get("updated_at")
user = issue.get("user", {})
username = user.get("login")
# Get issue details including comments
issue_detail_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/issues/{issue_number}"
detail_response = requests.get(issue_detail_url, headers=self.headers)
if detail_response.status_code != 200:
logger.warning(f"Failed to fetch details for issue #{issue_number}: {detail_response.json()}")
continue
issue_details = detail_response.json()
# Get comments
comments_url = issue_details.get("comments_url")
comments_response = requests.get(comments_url, headers=self.headers)
comments_count = issue_details.get("comments", 0)
commenters = set()
if comments_response.status_code == 200:
comments = comments_response.json()
for comment in comments:
commenter = comment.get("user", {}).get("login")
if commenter:
commenters.add(commenter)
# Get reactions
reactions = issue_details.get("reactions", {})
total_reactions = sum(count for key, count in reactions.items() if key != "url" and key != "+1" and key != "-1")
# Calculate age in days
created_date = datetime.strptime(issue_created_at, "%Y-%m-%dT%H:%M:%SZ") if issue_created_at else None
age_days = (datetime.now() - created_date).days if created_date else 0
# Extract labels
labels = [label.get("name") for label in issue.get("labels", [])]
issues_analysis.append({
"issue_number": issue_number,
"issue_url": issue_url,
"issue_title": issue_title,
"created_at": issue_created_at,
"updated_at": issue_updated_at,
"date": datetime.now().strftime("%Y-%m-%d"),
"username": username,
"comments_count": comments_count,
"unique_commenters": len(commenters),
"reactions_count": total_reactions,
"age_days": age_days,
"labels": ",".join(labels)
})
logger.info(f"Analyzed {len(issues_analysis)} open issues")
return issues_analysis
def get_repository_releases(self, days_back=365):
"""
Get repository releases from the last year.
Args:
days_back: Number of days to go back (default: 365 for one year)
Returns:
List of dictionaries with release information
"""
# Calculate date range
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
# Get releases
releases_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/releases"
logger.info(f"Fetching repository releases from {releases_url}")
releases = []
page = 1
per_page = 100
while True:
paginated_url = f"{releases_url}?page={page}&per_page={per_page}"
response = requests.get(paginated_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch releases: {response.json()}")
page_releases = response.json()
if not page_releases:
break
# Process each release
for release in page_releases:
published_at = release.get("published_at")
if not published_at:
continue
# Check if the release is within the date range
release_date = datetime.strptime(published_at, "%Y-%m-%dT%H:%M:%SZ")
if release_date < start_date:
# We've gone past our date range, stop fetching more pages
break
# Extract release data
release_data = {
"id": release.get("id"),
"tag_name": release.get("tag_name"),
"name": release.get("name"),
"url": release.get("html_url"),
"published_at": published_at,
"date": published_at.split("T")[0] if published_at else None,
"is_prerelease": release.get("prerelease", False),
"is_draft": release.get("draft", False),
"author": release.get("author", {}).get("login"),
"body": release.get("body"),
"sha": None # Will be populated below if available
}
# Get the SHA for the release tag
tag_name = release.get("tag_name")
if tag_name:
# Try to get the commit SHA for this tag
tag_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/git/refs/tags/{tag_name}"
tag_response = requests.get(tag_url, headers=self.headers)
if tag_response.status_code == 200:
tag_data = tag_response.json()
# The SHA might be in different places depending on the tag type
if "object" in tag_data:
if tag_data["object"]["type"] == "commit":
release_data["sha"] = tag_data["object"]["sha"]
elif tag_data["object"]["type"] == "tag":
# For annotated tags, we need to get the tag object
tag_object_url = tag_data["object"]["url"]
tag_object_response = requests.get(tag_object_url, headers=self.headers)
if tag_object_response.status_code == 200:
tag_object_data = tag_object_response.json()
if "object" in tag_object_data and tag_object_data["object"]["type"] == "commit":
release_data["sha"] = tag_object_data["object"]["sha"]
# Get assets information
assets = []
for asset in release.get("assets", []):
assets.append({
"name": asset.get("name"),
"url": asset.get("browser_download_url"),
"size": asset.get("size"),
"download_count": asset.get("download_count"),
"content_type": asset.get("content_type")
})
release_data["assets"] = assets
release_data["assets_count"] = len(assets)
releases.append(release_data)
# Check if we've reached the end of the releases or the date range
if len(page_releases) < per_page or (release_date < start_date):
break
page += 1
logger.info(f"Retrieved {len(releases)} releases")
return releases
def get_open_prs_analysis(self):
"""
Get detailed statistics about open pull requests.
"""
# Search for open PRs
prs_url = f"{self.api_url}/search/issues?q=repo:{self.owner}/{self.repo}+is:pr+is:open&sort=created&order=desc&per_page=100"
logger.info(f"Fetching open PRs from {prs_url}")
response = requests.get(prs_url, headers=self.headers)
if response.status_code != 200:
raise Exception(f"Failed to fetch open PRs: {response.json()}")
data = response.json()
prs = data.get("items", [])
# Process each PR to extract detailed statistics
prs_analysis = []
for pr in prs:
pr_number = pr.get("number")
pr_url = pr.get("html_url")
pr_title = pr.get("title")
pr_created_at = pr.get("created_at")
pr_updated_at = pr.get("updated_at")
user = pr.get("user", {})
username = user.get("login")
# Get PR details including reviews and changes
pr_detail_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/pulls/{pr_number}"
detail_response = requests.get(pr_detail_url, headers=self.headers)
if detail_response.status_code != 200:
logger.warning(f"Failed to fetch details for PR #{pr_number}: {detail_response.json()}")
continue
pr_details = detail_response.json()
# Get comments
comments_url = pr_details.get("comments_url")
comments_response = requests.get(comments_url, headers=self.headers)
comments_count = pr_details.get("comments", 0)
commenters = set()
if comments_response.status_code == 200:
comments = comments_response.json()
for comment in comments:
commenter = comment.get("user", {}).get("login")
if commenter:
commenters.add(commenter)
# Get reviews
reviews_url = f"{self.api_url}/repos/{self.owner}/{self.repo}/pulls/{pr_number}/reviews"
reviews_response = requests.get(reviews_url, headers=self.headers)
reviews_count = 0
reviewers = set()
if reviews_response.status_code == 200:
reviews = reviews_response.json()
reviews_count = len(reviews)
for review in reviews:
reviewer = review.get("user", {}).get("login")
if reviewer:
reviewers.add(reviewer)
# Get changes
additions = pr_details.get("additions", 0)
deletions = pr_details.get("deletions", 0)
changed_files = pr_details.get("changed_files", 0)
# Calculate age in days
created_date = datetime.strptime(pr_created_at, "%Y-%m-%dT%H:%M:%SZ") if pr_created_at else None
age_days = (datetime.now() - created_date).days if created_date else 0
# Extract labels
labels = [label.get("name") for label in pr.get("labels", [])]
# Get mergeable state
mergeable_state = pr_details.get("mergeable_state", "unknown")
prs_analysis.append({
"pr_number": pr_number,
"pr_url": pr_url,
"pr_title": pr_title,
"created_at": pr_created_at,
"updated_at": pr_updated_at,
"date": datetime.now().strftime("%Y-%m-%d"),
"username": username,
"comments_count": comments_count,
"unique_commenters": len(commenters),
"reviews_count": reviews_count,
"unique_reviewers": len(reviewers),
"additions": additions,
"deletions": deletions,
"changed_files": changed_files,
"age_days": age_days,
"labels": ",".join(labels),
"mergeable_state": mergeable_state
})
logger.info(f"Analyzed {len(prs_analysis)} open PRs")
return prs_analysis