forked from ansible/ansibullbot
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
349 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
#!/usr/bin/env python | ||
|
||
import datetime | ||
import json | ||
import os | ||
import sys | ||
|
||
from ansibullbot.utils.receiver_client import get_receiver_metadata | ||
from ansibullbot.utils.receiver_client import get_receiver_summaries | ||
|
||
|
||
def main(): | ||
|
||
# define where to dump the resulting files | ||
if len(sys.argv) > 1: | ||
destdir = sys.argv[1] | ||
else: | ||
destdir = '/tmp' | ||
|
||
if not os.path.isdir(destdir): | ||
os.makedirs(destdir) | ||
|
||
ISSUES = {} | ||
BYFILE = {} | ||
BYISSUE = {} | ||
BYMAINTAINER = {} | ||
BYFILE_STATS = {} | ||
|
||
summaries = get_receiver_summaries('ansible', 'ansible') | ||
for summary in summaries: | ||
number = summary['github_number'] | ||
this_meta = get_receiver_metadata('ansible', 'ansible', number=number) | ||
|
||
if not this_meta: | ||
continue | ||
|
||
this_meta = this_meta[0] | ||
url = this_meta['html_url'] | ||
ISSUES[url] = this_meta | ||
BYISSUE[url] = [] | ||
|
||
try: | ||
this_meta.get('component_matches', []) | ||
except Exception as e: | ||
print(e) | ||
#import epdb; epdb.st() | ||
continue | ||
|
||
if summary['state'] == 'open': | ||
created = datetime.datetime.strptime( | ||
summary['created_at'].split('T')[0], '%Y-%m-%d' | ||
) | ||
age = (datetime.datetime.now() - created).days | ||
else: | ||
print(summary) | ||
import epdb; epdb.st() | ||
|
||
for component in this_meta.get('component_matches', []): | ||
# we seem to have some variation in the keys ... | ||
filename = None | ||
try: | ||
filename = component['repo_filename'] | ||
except KeyError: | ||
filename = component['filename'] | ||
|
||
if not filename: | ||
continue | ||
|
||
if 'maintainers' in component: | ||
for maintainer in component['maintainers']: | ||
if maintainer not in BYMAINTAINER: | ||
BYMAINTAINER[maintainer] = [] | ||
if url not in BYMAINTAINER[maintainer]: | ||
BYMAINTAINER[maintainer].append(url) | ||
|
||
if filename not in BYFILE_STATS: | ||
BYFILE_STATS[filename] = { | ||
'open_ages': [], | ||
'closed_ages': [] | ||
} | ||
|
||
if summary['state'] == 'open': | ||
BYFILE_STATS[filename]['open_ages'].append([ | ||
age, | ||
this_meta['template_data']['issue type'] | ||
]) | ||
else: | ||
import epdb; epdb.st() | ||
|
||
BYISSUE[url].append(filename) | ||
|
||
if filename not in BYFILE: | ||
BYFILE[filename] = [] | ||
if url not in BYFILE[filename]: | ||
BYFILE[filename].append(url) | ||
|
||
# median closure time? | ||
import epdb; epdb.st() | ||
|
||
# most active files? | ||
import epdb; epdb.st() | ||
|
||
|
||
''' | ||
destfile = os.path.join(destdir, 'byissue.json') | ||
with open(destfile, 'w') as f: | ||
f.write(json.dumps(BYISSUE, indent=2, sort_keys=True)) | ||
destfile = os.path.join(destdir, 'byfile.json') | ||
with open(destfile, 'w') as f: | ||
f.write(json.dumps(BYFILE, indent=2, sort_keys=True)) | ||
tuples = BYFILE.items() | ||
for idx, x in enumerate(tuples): | ||
x = [x[0]] + x[1] | ||
tuples[idx] = x | ||
tuples.sort(key=len) | ||
tuples.reverse() | ||
destfile = os.path.join(destdir, 'byfile_sorted.txt') | ||
with open(destfile, 'w') as f: | ||
for tup in tuples: | ||
f.write('{}\n'.format(tup[0])) | ||
for issue in tup[1:]: | ||
issue = issue.encode('ascii', 'ignore') | ||
title = ISSUES[issue]['title'] | ||
title = title.encode('ascii', 'ignore') | ||
f.write('\t{}\t{}\n'.format(issue, title)) | ||
destfile = os.path.join(destdir, 'byfile_sorted.html') | ||
with open(destfile, 'w') as f: | ||
for tup in tuples: | ||
f.write('<div style="background-color: #cfc ; padding: 10px; border: 1px solid green;">\n') | ||
file_ref = '<a href="https://github.com/ansible/ansible/blob/devel/{}">https://github.com/ansible/ansible/blob/devel/{}</a>'.format(tup[0], tup[0]) | ||
f.write('{}\n'.format(file_ref)) | ||
f.write('</div>') | ||
f.write('<br>\n') | ||
for issue in tup[1:]: | ||
issue = issue.encode('ascii', 'ignore') | ||
title = ISSUES[issue]['title'] | ||
title = title.encode('ascii', 'ignore') | ||
issue_ref = '<a href="{}">{}</a>'.format(issue, issue) | ||
f.write('\t{}\t{}<br>\n'.format(issue_ref, title)) | ||
f.write('<br>\n') | ||
tuples = BYMAINTAINER.items() | ||
for idx, x in enumerate(tuples): | ||
x = [x[0]] + x[1] | ||
tuples[idx] = x | ||
tuples.sort(key=len) | ||
tuples.reverse() | ||
destfile = os.path.join(destdir, 'bymaintainer.json') | ||
with open(destfile, 'w') as f: | ||
f.write(json.dumps(BYMAINTAINER, indent=2, sort_keys=True)) | ||
destfile = os.path.join(destdir, 'bymaintainer_sorted.txt') | ||
with open(destfile, 'w') as f: | ||
for tup in tuples: | ||
f.write('{}\n'.format(tup[0])) | ||
for issue in tup[1:]: | ||
f.write('\t{}\n'.format(issue)) | ||
''' | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,161 @@ | ||
#!/usr/bin/env python | ||
|
||
import argparse | ||
import datetime | ||
import json | ||
import os | ||
import requests | ||
import sys | ||
|
||
from ansibullbot.utils.receiver_client import get_receiver_metadata | ||
from ansibullbot.utils.receiver_client import get_receiver_summaries | ||
|
||
|
||
def main(): | ||
|
||
parser = argparse.ArgumentParser() | ||
parser.add_argument("destdir", default='/tmp/ansibot_data') | ||
parser.add_argument('--usecache', action="store_true") | ||
args = parser.parse_args() | ||
|
||
''' | ||
# define where to dump the resulting files | ||
if len(sys.argv) > 1: | ||
destdir = sys.argv[1] | ||
else: | ||
destdir = '/tmp/ansibot_data' | ||
''' | ||
destdir = args.destdir | ||
#import epdb; epdb.st() | ||
|
||
if not os.path.isdir(destdir): | ||
os.makedirs(destdir) | ||
|
||
META = {} | ||
MAINTAINERS = [] | ||
USERNAMES = [] | ||
HISTORY = [] | ||
LAST_SEEN = {} | ||
|
||
|
||
meta_cachefile = os.path.join(destdir, '.meta.json') | ||
if args.usecache and os.path.isfile(meta_cachefile): | ||
try: | ||
with open(meta_cachefile, 'r') as f: | ||
META = json.loads(f.read()) | ||
except Exception as e: | ||
print(e) | ||
META = {} | ||
|
||
summaries = get_receiver_summaries('ansible', 'ansible') | ||
for summary in summaries: | ||
number = summary['github_number'] | ||
|
||
if args.usecache and str(number) in META: | ||
this_meta = META.get(str(number)) | ||
if not this_meta: | ||
continue | ||
else: | ||
this_meta = get_receiver_metadata( | ||
'ansible', | ||
'ansible', | ||
number=number, | ||
keys=[ | ||
'html_url', | ||
'submitter', | ||
'created_at', | ||
'history', | ||
'component_matches' | ||
] | ||
) | ||
if not this_meta: | ||
META[str(number)] = None | ||
continue | ||
this_meta = this_meta[0] | ||
|
||
if not this_meta: | ||
continue | ||
|
||
url = this_meta['html_url'] | ||
META[str(number)] = this_meta.copy() | ||
|
||
created_by = this_meta.get('submitter', None) | ||
if created_by and created_by not in USERNAMES: | ||
USERNAMES.append(created_by) | ||
HISTORY.append({ | ||
'actor': created_by, | ||
'event': 'opened', | ||
'created_at': this_meta['created_at'] | ||
}) | ||
if 'merged_at' in this_meta: | ||
import epdb; epdb.st() | ||
elif 'merged_at' in this_meta: | ||
import epdb; epdb.st() | ||
|
||
for x in this_meta.get('history', []): | ||
newx = { | ||
'actor': x['actor'], | ||
'event': x['event'], | ||
'created_at': x['created_at'] | ||
} | ||
HISTORY.append(newx) | ||
|
||
components = this_meta.get('component_matches', []) | ||
for component in components: | ||
if 'maintainers' in component: | ||
for x in component['maintainers']: | ||
if x not in MAINTAINERS: | ||
MAINTAINERS.append(x) | ||
|
||
if args.usecache: | ||
with open(meta_cachefile, 'w') as f: | ||
f.write(json.dumps(META)) | ||
|
||
for x in HISTORY: | ||
actor = x['actor'] | ||
timestamp = x['created_at'] | ||
if actor not in LAST_SEEN: | ||
LAST_SEEN[actor] = timestamp | ||
else: | ||
if LAST_SEEN[actor] < timestamp: | ||
LAST_SEEN[actor] = timestamp | ||
|
||
for actor in USERNAMES: | ||
if actor not in LAST_SEEN: | ||
LAST_SEEN[actor] = None | ||
|
||
destfile = os.path.join(destdir, 'last_seen.json') | ||
with open(destfile, 'w') as f: | ||
f.write(json.dumps(LAST_SEEN, indent=2, sort_keys=True)) | ||
|
||
ABSENT = {} | ||
for maintainer in MAINTAINERS: | ||
exists = True | ||
is_absent = False | ||
ts = LAST_SEEN.get(maintainer) | ||
if not ts: | ||
is_absent = True | ||
else: | ||
# 2018-01-26T13:24:08+00:00 | ||
ts1 = ts.split('T')[0] | ||
ts1 = datetime.datetime.strptime(ts1, '%Y-%m-%d') | ||
days = (datetime.datetime.now() - ts1).days | ||
if days > 180: | ||
is_absent = True | ||
rr = requests.get('https://github.com/{}'.format(maintainer)) | ||
if rr.status_code != 200: | ||
exists = False | ||
|
||
if is_absent and maintainer not in ABSENT: | ||
ABSENT[maintainer] = { | ||
'last_seen': ts, | ||
'exists': exists | ||
} | ||
|
||
destfile = os.path.join(destdir, 'absent_maintainers.json') | ||
with open(destfile, 'w') as f: | ||
f.write(json.dumps(ABSENT, indent=2, sort_keys=True)) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |