-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathgitcolombo.py
executable file
·354 lines (281 loc) · 11.6 KB
/
gitcolombo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
#!/usr/bin/env python3
import argparse
import json
import logging
import os
import re
import subprocess
from threading import Thread
import urllib.request
DELIMITER = '---------------'
LOG_FORMAT = r'%H;"%an %ae";"%cn %ce"'
LOG_REGEXP = r'(\w+);"(.*?)";"(.*?)"'
LOG_NAME_REGEXP = r'^(.*?)\s+(\S+)$'
GIT_EXTRACT_CMD = "git log --pretty='{}' --all".format(LOG_FORMAT)
GIT_CLONE_CMD = "git clone {}"
GITHUB_USER_STATS = 'https://api.github.com/users/{}'
GITHUB_USER_REPOS = 'https://api.github.com/users/{}/repos?per_page=100&page={}'
GITHUB_PER_PAGE_LIMIT = 100
SYSTEM_EMAILS = [
]
def get_public_repos_count(nickname):
url = GITHUB_USER_STATS
req_url = url.format(nickname)
req = urllib.request.Request(req_url)
try:
response = urllib.request.urlopen(req)
except Exception as e:
logging.debug(e)
else:
stats = json.loads((response.read().decode('utf8')))
repos_count = stats["public_repos"]
if repos_count:
return repos_count
def get_github_repos(nickname, only_forks=True, repos_count=GITHUB_PER_PAGE_LIMIT):
repos_links = set()
if not repos_count:
return repos_links
url = GITHUB_USER_REPOS
last_page = int(repos_count / GITHUB_PER_PAGE_LIMIT) + (repos_count % GITHUB_PER_PAGE_LIMIT > 0)
for page_num in range(1, last_page + 1):
req_url = url.format(nickname, page_num)
req = urllib.request.Request(req_url)
try:
response = urllib.request.urlopen(req)
except Exception as e:
logging.debug(e)
else:
repos = json.loads((response.read().decode('utf8')))
result = [r['html_url'] for r in repos if not only_forks or not r['fork']]
repos_links.update(set(result))
return repos_links
def find_all_repos_recursively(path):
git_dirs = []
for current_dir, dirs, _ in os.walk(path):
if current_dir.endswith('.git'):
git_dirs.append(current_dir)
while dirs:
dirs.pop()
return git_dirs
class Commit:
"""
Extract and store basic commit info
"""
@staticmethod
def _extract_name_email(log_str_part):
extracted = re.search(LOG_NAME_REGEXP, log_str_part)
if not extracted:
logging.error('Could not extract name/email from "%s"', log_str_part)
return ('', '')
return extracted.groups()
def __init__(self, log_str):
extracted = re.search(LOG_REGEXP, log_str)
if not extracted:
logging.error('Could not commit info from "%s"', log_str)
else:
self.hash, self.author, self.committer = extracted.groups()
self.author_name, self.author_email = Commit._extract_name_email(self.author)
self.committer_name, self.committer_email = Commit._extract_name_email(self.committer)
self.author_committer_names_same = self.author_name == self.committer_name
self.author_committer_emails_same = self.author_email == self.committer_email
self.author_committer_same = self.author_committer_names_same and self.author_committer_emails_same
def __str__(self):
return """Hash: {hash}
Author name: {author_name}
Author email: {author_email}
Committer name: {committer_name}
Committer email: {committer_email}
""".format(
hash=self.hash,
author_name=self.author_name, author_email=self.author_email,
committer_name=self.committer_name, committer_email=self.committer_email,
)
class Git:
"""
Make external git work
"""
@staticmethod
def get_tree_info(git_dir):
process = subprocess.Popen(GIT_EXTRACT_CMD, cwd=git_dir, shell=True, stdout=subprocess.PIPE)
stat = process.stdout.read().decode()
return stat
@staticmethod
def clone(link):
process = subprocess.Popen(GIT_CLONE_CMD.format(link), shell=True, stdout=subprocess.PIPE)
res = process.stdout.read().decode()
return res
@staticmethod
def get_verified_username(repo_url, commit, person):
if not repo_url.startswith('https://github.com/'):
return
commit_link = repo_url.rstrip('/') + '/commit/' + commit.hash
req = urllib.request.Request(commit_link)
try:
response = urllib.request.urlopen(req)
page_source = response.read()
# TODO: authored and committed
extracted = re.search(r'<a href=".+?commits\?author=(.+?)"', str(page_source))
if not extracted:
return
name = extracted.groups(0)[0]
person.github_link = name
logging.debug(commit_link + '\n' + name)
except Exception as e:
logging.debug(e)
class Person:
"""
Basic person info from commit
"""
def __init__(self, desc):
self.name = ''
self.email = ''
self.desc = desc
self.as_author = 0
self.as_committer = 0
self.also_known = {}
self.github_link = None
def __str__(self):
result = "Name:\t\t\t{name}\nEmail:\t\t\t{email}".format(name=self.name, email=self.email)
if self.as_author:
result += "\nAppears as author:\t{} times".format(self.as_author)
if self.as_committer:
result += "\nAppears as committer:\t{} times".format(self.as_committer)
if self.github_link:
result += "\nVerified account:\n\t\t\thttps://github.com/{}".format(self.github_link)
if self.also_known:
result += '\nAlso appears with:{}'.format(
'\n\t\t\t'.join(['']+list(self.also_known.keys()))
)
return result
class GitAnalyst:
"""
Git analysis
"""
def __init__(self):
self.git = Git()
self.commits = []
self.persons = {}
self.names = {}
self.emails = {}
self.repos = []
self.same_emails_persons = {}
def append(self, source=None):
if not source:
return
if not '://' in source:
git_dir = source
else:
self.git.clone(source)
git_dir = source.split('/')[-1]
self.repos.append(git_dir)
git_info = self.git.get_tree_info(git_dir)
text_commits = filter(lambda x: x, git_info.split('\n'))
new_commits = list(map(Commit, text_commits))
self.commits += new_commits
self.analyze(new_commits, source)
@property
def sorted_persons(self):
return sorted(self.persons.items(), key=lambda p: p[1].as_author + p[1].as_committer)
def resolve_persons(self):
threads = []
for _, person in self.persons.items():
if person.email in SYSTEM_EMAILS:
continue
# TODO: optimize
thread = Thread(target=self.git.get_verified_username, args=(person.repo_url, person.commit, person))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
def analyze(self, new_commits, repo_url):
# save all author and committers as unique persons
for commit in new_commits:
# author saving
person = self.persons.get(commit.author, Person(commit.author))
person.name = commit.author_name
person.email = commit.author_email
person.as_author += 1
person.repo_url = repo_url
person.commit = commit
self.persons[commit.author] = person
# committer saving
person = self.persons.get(commit.committer, Person(commit.committer))
person.name = commit.committer_name
person.email = commit.committer_email
person.as_committer += 1
person.repo_url = repo_url
person.commit = commit
self.persons[commit.committer] = person
# make persons graph links based on author/committer mismatch
for commit in new_commits:
if not commit.author_committer_same:
self.persons[commit.author].also_known[commit.committer] = self.persons[commit.committer]
self.persons[commit.committer].also_known[commit.author] = self.persons[commit.author]
# TODO: probabilistic graph links based on same names/emails and Levenshtein distance
# just checking same names now
for commit in new_commits:
author_emails = self.names.get(commit.author_name, set())
author_emails.add(commit.author_email)
self.names[commit.author_name] = author_emails
committer_emails = self.names.get(commit.committer_name, set())
committer_emails.add(commit.committer_email)
self.names[commit.committer_name] = committer_emails
for emails_set in self.names.values():
names = [name for name, v in self.names.items() if v == emails_set]
key = ','.join(sorted(names))
if len(names) > 1 and key not in self.same_emails_persons:
self.same_emails_persons[key] = (names, emails_set)
return self.sorted_persons
def __str__(self):
result = 'Analyze of the git repo(s) "{}"'.format(', '.join(self.repos))
result += '\nVerbose persons info:\n'
for name, person in self.sorted_persons:
result += ("{}\n{}\n".format(DELIMITER, person))
matching_result = ''
for name, emails in self.names.items():
if len(emails) > 1:
matching_result += '\n{} is the owner of emails:\n\t\t\t{}\n'.format(name, '\n\t\t\t'.join(emails))
if matching_result:
result += '\nMatching info:\n{}{}'.format(DELIMITER, matching_result)
for names, emails in self.same_emails_persons.values():
result += '\n{} are the same person\n'.format(' and '.join(names))
result += '\nStatistics info:\n{}'.format(DELIMITER)
result += '\nTotal persons: {}'.format(len(self.persons))
return result
def main():
parser = argparse.ArgumentParser(description='Extract accounts\' information from git repo and make some researches.')
parser.add_argument('-d', '--dir', help='directory with git project(s)')
parser.add_argument('-u', '--url', help='url of git repo')
parser.add_argument('--github', action='store_true', help='try to extract extended info from GitHub')
parser.add_argument('--nickname', type=str, help='try to download repos from all platforms by nickname')
parser.add_argument('-r', '--recursive', action='store_true', help='recursive directory processing')
parser.add_argument('--debug', action='store_true', help='print debug information')
# TODO: clone repos as bare
# TODO: allow forks
args = parser.parse_args()
log_level = logging.INFO if not args.debug else logging.DEBUG
logging.basicConfig(level=log_level, format='-'*40 + '\n%(levelname)s: %(message)s')
analyst = None
analyst = GitAnalyst()
repos = []
repos.append(args.url)
repos.append(args.dir and args.dir.rstrip('/'))
if args.recursive and args.dir:
dirs = find_all_repos_recursively(args.dir)
repos += dirs
if args.nickname:
repos_count = get_public_repos_count(args.nickname)
if repos_count:
print('found', repos_count, 'repos')
repos += get_github_repos(args.nickname, repos_count=repos_count)
for repo in repos:
analyst.append(source=repo)
logging.info('Resolving GitHub usernames, please wait...')
analyst.resolve_persons()
if analyst.repos:
print(analyst)
else:
print('Run me with git repo link or path!')
if __name__ == '__main__':
main()