|
| 1 | +import json, logging, datetime, sys |
| 2 | + |
| 3 | +from pprint import pprint,pformat |
| 4 | + |
| 5 | +from pygerrit.client import GerritClient |
| 6 | +from pygerrit2.rest import GerritRestAPI |
| 7 | + |
| 8 | +from requests.auth import HTTPDigestAuth |
| 9 | +from elasticsearch import Elasticsearch |
| 10 | + |
| 11 | +import re |
| 12 | +from jira import JIRA |
| 13 | + |
| 14 | +logging.basicConfig(filename='log/gerrit_listener.log', filemode='w', level=logging.DEBUG, format='%(asctime)s %(name)s %(levelname)s %(lineno)d:%(message)s', datefmt='%m/%d/%Y %I:%M:%S') |
| 15 | +log = logging.getLogger("gerrit_listener") |
| 16 | +logging.getLogger("paramiko").setLevel(logging.WARNING) |
| 17 | +logging.getLogger("root").setLevel(logging.DEBUG) |
| 18 | +logging.getLogger("urllib3").setLevel(logging.ERROR) |
| 19 | +logging.getLogger("elasticsearch").setLevel(logging.ERROR) |
| 20 | + |
| 21 | +class DGerritHandler(): |
| 22 | + |
| 23 | + def __init__(self, gerrit_host="gerrit.mmt.com"): |
| 24 | + self.client = None |
| 25 | + self.ELK_HOST = "127.0.0.1:9200" # elastic search |
| 26 | + self.index_name = datetime.datetime.now().strftime('gerrit-stats-%Y-%m') |
| 27 | + |
| 28 | + url = "http://127.0.0.1:8080" # gerrit servers |
| 29 | + auth = HTTPDigestAuth('admin', 'pass') |
| 30 | + self.rest_client = GerritRestAPI(url=url, auth=auth) |
| 31 | + |
| 32 | + # establish connection with jira |
| 33 | + self.jira = JIRA(basic_auth=('jira', 'pass'), options = {'server': '127.0.0.1'}) # Jira server |
| 34 | + self.regex = r'([A-Z]+-[0-9]+)' |
| 35 | + |
| 36 | + log.info("creating a new connection with %s" % (gerrit_host)) |
| 37 | + self.client = GerritClient(gerrit_host) |
| 38 | + log.info("Gerrit version is %s" % (self.client.gerrit_version())) |
| 39 | + self.start_event_stream() |
| 40 | + |
| 41 | + def start_event_stream(self): |
| 42 | + # start listening to event stream |
| 43 | + log.info("initiating listening to event stream") |
| 44 | + self.client.start_event_stream() |
| 45 | + |
| 46 | + def event_listen(self): |
| 47 | + |
| 48 | + iter = 0 |
| 49 | + while True: |
| 50 | + try: |
| 51 | + elog = {} |
| 52 | + event = self.client.get_event() |
| 53 | + log.info("==============START=====================================") |
| 54 | + log.info("got a new event %s -- %s" % (event.name, type(event.json))) |
| 55 | + log.info("actual event is %s" % pformat(event.json)) |
| 56 | + elog['type'] = event.name |
| 57 | + if event.name == "error-event": |
| 58 | + log.info("got an error-event, exiting the script.............") |
| 59 | + sys.exit() |
| 60 | + |
| 61 | + elog['gerrit_id'] = event.change.number |
| 62 | + log.info(dir(event)) |
| 63 | + if hasattr(event, 'author'): |
| 64 | + elog['author_username'] = event.author.username if hasattr(event, 'author') else None |
| 65 | + elog['author_email'] = event.author.email if hasattr(event, 'author') else None |
| 66 | + elif hasattr(event, 'patchset'): |
| 67 | + elog['author_username'] = event.patchset.author.username if hasattr(event.patchset, 'author') else None |
| 68 | + elog['author_email'] = event.patchset.author.email if hasattr(event.patchset, 'author') else None |
| 69 | + |
| 70 | + elog['project'] = event.change.project |
| 71 | + elog['owner'] = event.change.owner.username |
| 72 | + elog['branch'] = event.change.branch |
| 73 | + elog['patchset_number'] = event.patchset.number |
| 74 | + elog['patchset_size_deletions'] = event.json.get('patchSet').get('sizeDeletions') |
| 75 | + elog['patchset_size_insertions'] = event.json.get('patchSet').get('sizeInsertions') |
| 76 | + elog['subject'] = event.change.subject |
| 77 | + |
| 78 | + if event.name == 'reviewer-added': |
| 79 | + elog['reviewers'] = event.reviewer.username |
| 80 | + |
| 81 | + elif event.name == 'change-merged': |
| 82 | + elog['submitter'] = event.submitter.username |
| 83 | + |
| 84 | + elif event.name == 'comment-added' and 'approvals' in event.json.keys(): |
| 85 | + for i in event.json.get('approvals'): |
| 86 | + log.info(i) |
| 87 | + if 'oldValue' in i: |
| 88 | + log.info("----found old value----") |
| 89 | + elog['approval_approver'] = event.author.username |
| 90 | + elog['approver_type'] = i.get('type') |
| 91 | + elog['approval_value'] = i.get('value') |
| 92 | + elog['approval_description'] = i.get('description') |
| 93 | + break |
| 94 | + else: |
| 95 | + elog['approval_approver'] = event.author.username |
| 96 | + elog['approver_type'] = 'comment-added' |
| 97 | + elog['approval_value'] = '0' |
| 98 | + elog['approval_description'] = 'comment-added' |
| 99 | + |
| 100 | + log.info("~~~~~~~~~~~~~~~~~~~ Start JIRA Analysis ~~~~~~~~~~~~~~~~~~~") |
| 101 | + issue_type = [] |
| 102 | + elog['issue_id'] = re.findall(self.regex, elog['subject']) |
| 103 | + for issue in elog['issue_id']: |
| 104 | + issue_type.append(self.jira.issue(issue).fields.issuetype.name) |
| 105 | + |
| 106 | + elog['issue_id_type'] = issue_type |
| 107 | + log.info("~~~~~~~~~~~~~~~~~~~ End JIRA Analysis ~~~~~~~~~~~~~~~~~~~") |
| 108 | + |
| 109 | + log.info("~~~~~~~~~~~~~~~~~~~ Start Gerrit File Actions ~~~~~~~~~~~~~~~~~~~") |
| 110 | + files_info = { |
| 111 | + "ADDED": { |
| 112 | + "lines_added": 0, |
| 113 | + "lines_removed": 0, |
| 114 | + "count": 0 |
| 115 | + }, |
| 116 | + "MODIFIED": { |
| 117 | + "lines_added": 0, |
| 118 | + "lines_removed": 0, |
| 119 | + "count": 0 |
| 120 | + }, |
| 121 | + "DELETED": { |
| 122 | + "lines_added": 0, |
| 123 | + "lines_removed": 0, |
| 124 | + "count": 0 |
| 125 | + }, |
| 126 | + "RENAMED": { |
| 127 | + "lines_added": 0, |
| 128 | + "lines_removed": 0, |
| 129 | + "count": 0 |
| 130 | + }, |
| 131 | + "COPIED": { |
| 132 | + "lines_added": 0, |
| 133 | + "lines_removed": 0, |
| 134 | + "count": 0 |
| 135 | + }, |
| 136 | + "REWRITE": { |
| 137 | + "lines_added": 0, |
| 138 | + "lines_removed": 0, |
| 139 | + "count": 0 |
| 140 | + } |
| 141 | + } |
| 142 | + query_result = self.client.run_command("query --current-patch-set --format JSON --files change:{}".format(elog['gerrit_id'])) |
| 143 | + output = query_result.stdout.read() |
| 144 | + output = output.split('\n') |
| 145 | + files = json.loads(output[0]) |
| 146 | + log.info(elog['project']) |
| 147 | + for file in files['currentPatchSet']['files']: |
| 148 | + if file['file'] not in ['/COMMIT_MSG', '/MERGE_LIST']: |
| 149 | + files_info[file['type']]['lines_added'] += file['insertions'] |
| 150 | + files_info[file['type']]['lines_removed'] += file['deletions'] |
| 151 | + files_info[file['type']]['count'] += 1 |
| 152 | + |
| 153 | + elog['files'] = files_info |
| 154 | + log.info("~~~~~~~~~~~~~~~~~~~ End Gerrit File Actions ~~~~~~~~~~~~~~~~~~~") |
| 155 | + |
| 156 | + log.info("elk message %d is %s" % (iter, json.dumps(elog, indent=2))) |
| 157 | + log.info("==============END=====================================") |
| 158 | + |
| 159 | + self.log_to_elk(elog) |
| 160 | + except Exception as e: |
| 161 | + log.exception(e) |
| 162 | + if event: |
| 163 | + log.info(str(event.json)) |
| 164 | + finally: |
| 165 | + iter += 1 |
| 166 | + |
| 167 | + def stop_event_stream(self): |
| 168 | + # stop listening to gerrit event stream |
| 169 | + log.info("stop listening to event stream") |
| 170 | + self.client.stop_event_stream() |
| 171 | + |
| 172 | + def log_to_elk(self, log): |
| 173 | + log['timestamp'] = datetime.datetime.now() |
| 174 | + elk = Elasticsearch(self.ELK_HOST) |
| 175 | + elk.index(index=self.index_name, doc_type='gerrit_info', body=log) |
| 176 | + |
| 177 | + def get_reviewer_list(self, change_id): |
| 178 | + |
| 179 | + endpoint = "/changes/%s/reviewers/" % change_id |
| 180 | + data = self.rest_client.get(endpoint) |
| 181 | + reviewers = [] |
| 182 | + for i in data: |
| 183 | + if i.get('username') is not None: |
| 184 | + reviewers.append(i.get('username')) |
| 185 | + |
| 186 | + return reviewers |
| 187 | + |
| 188 | +if __name__ == "__main__": |
| 189 | + gerrit_handler = DGerritHandler() |
| 190 | + gerrit_handler.event_listen() |
| 191 | + gerrit_handler.stop_event_stream() |
0 commit comments