Also parse Github URLs

This will replace valid Github URLs with tags.
This commit is contained in:
Hein-Pieter van Braam-Stewart
2021-01-26 23:32:22 +01:00
parent 82ff9a64f8
commit 9c6f01a610
2 changed files with 162 additions and 110 deletions

255
bot.py
View File

@@ -19,7 +19,8 @@ GITHUB_TOKEN=os.environ.get('GITHUB_TOKEN')
DEFAULT_AVATAR_URL=os.environ.get('DEFAULT_AVATAR_URL')
DEFAULT_REPOSITORY=os.environ.get('DEFAULT_REPOSITORY')
RE_MATCH = re.compile('([A-Za-z0-9_.-]+)?#(\d+)')
RE_TAG_PROG = re.compile('([A-Za-z0-9_.-]+)?#(\d+)')
RE_URL_PROG = re.compile('github.com/([A-Za-z0-9_.-]+)/([A-Za-z0-9_.-]+)/(issues|pulls)/(\d+)\S*')
def debug_print(msg):
if DEBUG:
@@ -83,11 +84,147 @@ class Bot:
}
self.send(subscribe_msg)
def update_msg(self, msg):
def format_issue(self, repository, issue):
headers = { 'User-Agent': 'Godot Issuebot by hpvb', }
url = f"https://api.github.com/repos/{GITHUB_PROJECT}/{repository}/issues/{issue}"
debug_print(f"GitHub API request: {url}")
r = requests.get(url, headers=headers, auth=(GITHUB_USERNAME, GITHUB_TOKEN))
if r.status_code != 200:
debug_print(f"Github API returned an error {r.status_code}")
debug_print(r.content)
return None
issue = r.json()
avatar_url = DEFAULT_AVATAR_URL
if 'avatar_url' in issue['user'] and issue['user']['avatar_url']:
avatar_url = issue['user']['avatar_url']
if 'gravatar_id' in issue['user'] and issue['user']['gravatar_id']:
avatar_url = f"https://www.gravatar.com/avatar/{issue['user']['gravatar_id']}"
is_pr = False
pr_mergeable = None
pr_merged = None
pr_merged_by = None
pr_draft = False
pr_reviewers = None
status = None
closed_by = None
if 'pull_request' in issue and issue['pull_request']:
is_pr = True
debug_print(f"GitHub API request: {issue['pull_request']['url']}")
prr = requests.get(issue['pull_request']['url'], headers=headers, auth=(GITHUB_USERNAME, GITHUB_TOKEN))
if prr.status_code == 200:
pr = prr.json()
status = pr['state']
if 'merged_by' in pr and pr['merged_by']:
pr_merged_by = pr['merged_by']['login']
if 'mergeable' in pr:
pr_mergeable = pr['mergeable']
if 'merged' in pr:
pr_merged = pr['merged']
if 'draft' in pr:
pr_draft = pr['draft']
if 'requested_reviewers' in pr and pr['requested_reviewers']:
reviewers = []
for reviewer in pr['requested_reviewers']:
reviewers.append(reviewer['login'])
pr_reviewers = ', '.join(reviewers)
if 'requested_teams' in pr and pr['requested_teams']:
teams = []
for team in pr['requested_teams']:
teams.append(f"team:{team['name']}")
if pr_reviewers:
pr_reviewers += ' and '
pr_reviewers += ', '.join(teams)
else:
pr_reviewers = ', '.join(teams)
else:
status = issue['state']
if status == 'closed':
if 'closed_by' in issue and issue['closed_by']:
closed_by = issue['closed_by']['login']
issue_type = None
if is_pr:
issue_type = "Pull Request"
if pr_merged:
status = "PR merged"
if pr_merged_by:
status += f" by {pr_merged_by}"
elif status == 'closed':
status = "PR closed"
elif not pr_merged:
status = "PR open"
if pr_draft:
status += " [draft]"
if pr_mergeable != None:
if pr_mergeable:
status += " [mergeable]"
else:
status += " [needs rebase]"
if pr_reviewers:
status += f" reviews required from {pr_reviewers}"
else:
issue_type = "Issue"
status = f"Status: {status}"
if not pr_merged and closed_by and status == 'closed':
status += f" by {closed_by}"
return {
"author_icon": avatar_url,
"author_link": issue['html_url'],
"author_name": f"{repository.title()} [{issue_type}]: {issue['title']} #{issue['number']}",
"text": status,
}
def replace_issue_tags(self, msg):
debug_print("Updating message!")
links = []
for match in re.finditer(RE_MATCH, msg['msg']):
# First replace all the full links that rocket.chat has detected with tags
if 'urls' in msg and msg['urls']:
urls_to_keep = []
for url in msg['urls']:
debug_print(f"URL: {url['url']}")
if f'github.com/{GITHUB_PROJECT}' in url['url']:
match = re.search(RE_URL_PROG, url['url'])
repository = match.group(2)
issue = int(match.group(4))
tag = f'{repository}#{issue}'
debug_print(f"Replacing url {url['url']} with {tag}")
msg['msg'] = msg['msg'].replace(url['url'], tag)
continue
urls_to_keep.append(url)
msg['urls'] = urls_to_keep
# Then we replace all of the part-urls with tags as well
for match in re.finditer(RE_URL_PROG, msg['msg']):
project = match.group(1)
repository = match.group(2)
issue = match.group(4)
tag = f'{repository}#{issue}'
if project == GITHUB_PROJECT:
print('DEBUG: ' + match.group(0))
msg['msg'] = msg['msg'].replace(match.group(0), tag)
# Then finally add the metadata for all our tags
debug_print("Scanning message for tags")
for match in re.finditer(RE_TAG_PROG, msg['msg']):
repository = match.group(1)
issue = int(match.group(2))
@@ -102,108 +239,10 @@ class Bot:
debug_print(f"Message contains issue for {repository}")
headers = { 'User-Agent': 'Godot Issuebot by hpvb', }
url = f"https://api.github.com/repos/{GITHUB_PROJECT}/{repository}/issues/{issue}"
debug_print(f"GitHub API request: {url}")
link = self.format_issue(repository, issue)
if link:
links.append(link)
r = requests.get(url, headers=headers, auth=(GITHUB_USERNAME, GITHUB_TOKEN))
if r.status_code != 200:
debug_print(f"Github API returned an error {r.status_code}")
debug_print(r.content)
continue
issue = r.json()
avatar_url = DEFAULT_AVATAR_URL
if 'avatar_url' in issue['user'] and issue['user']['avatar_url']:
avatar_url = issue['user']['avatar_url']
if 'gravatar_id' in issue['user'] and issue['user']['gravatar_id']:
avatar_url = f"https://www.gravatar.com/avatar/{issue['user']['gravatar_id']}"
is_pr = False
pr_mergeable = None
pr_merged = None
pr_merged_by = None
pr_draft = False
pr_reviewers = None
status = None
closed_by = None
if 'pull_request' in issue and issue['pull_request']:
is_pr = True
debug_print(f"GitHub API request: {issue['pull_request']['url']}")
prr = requests.get(issue['pull_request']['url'], headers=headers, auth=(GITHUB_USERNAME, GITHUB_TOKEN))
if prr.status_code == 200:
pr = prr.json()
status = pr['state']
if 'merged_by' in pr and pr['merged_by']:
pr_merged_by = pr['merged_by']['login']
if 'mergeable' in pr:
pr_mergeable = pr['mergeable']
if 'merged' in pr:
pr_merged = pr['merged']
if 'draft' in pr:
pr_draft = pr['draft']
if 'requested_reviewers' in pr and pr['requested_reviewers']:
reviewers = []
for reviewer in pr['requested_reviewers']:
reviewers.append(reviewer['login'])
pr_reviewers = ', '.join(reviewers)
if 'requested_teams' in pr and pr['requested_teams']:
teams = []
for team in pr['requested_teams']:
teams.append(f"team:{team['name']}")
if pr_reviewers:
pr_reviewers += ' and '
pr_reviewers += ', '.join(teams)
else:
pr_reviewers = ', '.join(teams)
else:
status = issue['state']
if status == 'closed':
if 'closed_by' in issue and issue['closed_by']:
closed_by = issue['closed_by']['login']
issue_type = None
if is_pr:
issue_type = "Pull Request"
if pr_merged:
status = "PR merged"
if pr_merged_by:
status += f" by {pr_merged_by}"
elif status == 'closed':
status = "PR closed"
elif not pr_merged:
status = "PR open"
if pr_draft:
status += " [draft]"
if pr_mergeable != None:
if pr_mergeable:
status += " [mergeable]"
else:
status += " [needs rebase]"
if pr_reviewers:
status += f" reviews required from {pr_reviewers}"
else:
issue_type = "Issue"
status = f"Status: {status}"
if not pr_merged and closed_by and status == 'closed':
status += f" by {closed_by}"
links.append({
"author_icon": avatar_url,
"author_link": issue['html_url'],
"author_name": f"{repository.title()} [{issue_type}]: {issue['title']} #{issue['number']}",
"text": status,
"ts": msg['ts'],
})
if not len(links):
return
@@ -222,6 +261,10 @@ class Bot:
# Hack Hack, the clients won't update without a change to this field. Even if we add or remove attachments.
msg['msg'] = msg['msg'] + " "
# Add timestamp to all attachments. These are visible in the mobile client.
for link in links:
link['ts'] = msg['ts'],
[msg['attachments'].append(x) for x in links if x not in msg['attachments']]
update_msg = {
@@ -265,9 +308,9 @@ class Bot:
for chat_msg in decoded_msg['fields']['args']:
if 'editedBy' in chat_msg and chat_msg['editedBy']['_id'] == self.id:
continue
if re.match(RE_MATCH, chat_msg['msg']):
if re.search(RE_TAG_PROG, chat_msg['msg']) or re.search(RE_URL_PROG, chat_msg['msg']):
debug_print("Sending message to be update")
self.update_msg(chat_msg)
self.replace_issue_tags(chat_msg)
def on_error(self, ws, error):
debug_print(error)

View File

@@ -6,7 +6,7 @@ def makeurl(issue, repo = ''):
if not repo:
repo = 'godot'
return f'https://github.com/{repo}/issues/{issue}'
return f'https://github.com/godotengine/{repo}/issues/{issue}'
tests = [
{ 'text': '#100', 'results' : [ makeurl(100) ] },
@@ -26,19 +26,28 @@ tests = [
{ 'text': '(#100) text', 'results' : [ makeurl(100) ] },
{ 'text': '(repo#100) text', 'results' : [ makeurl(100, 'repo') ] },
{ 'text': 'https://github.com/godotengine/issue-bot/issues/2', 'results': [ makeurl(2, 'issue-bot') ] },
{ 'text': 'https://github.com/godotengine/godot/pulls/100', 'results': [ makeurl(100) ] },
{ 'text': 'https://github.com/godotengine/godot/pulls/100#issuecomment-1', 'results': [ makeurl(100) ] },
{ 'text': 'a long line of text with an url https://github.com/godotengine/godot/issues/100 and some tags #102 repo#103', 'results': [ makeurl(102), makeurl(103, 'repo'), makeurl(100) ] },
{ 'text': 'just a bunch of text', 'results' : [ ] },
{ 'text': 'Bunch of ## nonsense ##sdf $$', 'results' : [ ] },
]
prog = re.compile('([A-Za-z0-9_.-]+)?#(\d+)')
tag_prog = re.compile('([A-Za-z0-9_.-]+)?#(\d+)')
url_prog = re.compile('github.com/([A-Za-z0-9_.-]+)/([A-Za-z0-9_.-]+)/(issues|pulls)/(\d+)\S*')
for test in tests:
text = test['text']
result = []
print(re.match(prog, text))
for match in re.finditer(prog, text):
for match in re.finditer(tag_prog, text):
result.append(makeurl(match.group(2), match.group(1)))
for match in re.finditer(url_prog, text):
result.append(makeurl(match.group(4), match.group(2)))
if test['results'] != result:
print(f'FAILED for {text}: expected {test["results"]} got: {result}')