diff --git a/scripts/release_issue_status/main.py b/scripts/release_issue_status/main.py index 9f9e5a35145d..6bdf38279c4f 100644 --- a/scripts/release_issue_status/main.py +++ b/scripts/release_issue_status/main.py @@ -5,12 +5,15 @@ from datetime import date, datetime import subprocess as sp from azure.storage.blob import BlobClient +import reply_generator as rg +from update_issue_body import update_issue_body, find_readme_link +import traceback _NULL = ' ' _FILE_OUT = 'release_issue_status.csv' +_FILE_OUT_PYTHON = 'release_python_status.md' _PYTHON_SDK_ADMINISTRATORS = {'msyyc', 'RAY-316', 'BigCat20196'} - def my_print(cmd): print('==' + cmd + ' ==\n') @@ -20,6 +23,20 @@ def print_check(cmd): sp.check_call(cmd, shell=True) +def output_python_md(issue_status_python): + with open(_FILE_OUT_PYTHON, 'w') as file_out: + file_out.write('| issue | author | package | assignee | bot advice | created date of issue | delay from created date |\n') + file_out.write('| ------ | ------ | ------ | ------ | ------ | ------ | :-----: |\n') + file_out.writelines([item.output_python() for item in sorted(issue_status_python, key=_key_select)]) + + +def output_csv(issue_status): + with open(_FILE_OUT, 'w') as file_out: + file_out.write('language,issue,author,package,created date,delay from created date,latest update time,' + 'delay from latest update,status,bot advice\n') + file_out.writelines([item.output() for item in sorted(issue_status, key=_key_select)]) + + class IssueStatus: link = _NULL author = _NULL @@ -36,6 +53,7 @@ class IssueStatus: whether_author_comment = True issue_object = _NULL labels = _NULL + assignee = _NULL def output(self): return '{},{},{},{},{},{},{},{},{},{}\n'.format(self.language, self.link, self.author, @@ -46,6 +64,13 @@ def output(self): self.delay_from_latest_update, self.status, self.bot_advice) + + def output_python(self): + return '| [#{}]({}) | {} | {} | {} | {} | {} | {} |\n'.format(self.link.split('/')[-1], self.link, self.author, + self.package, self.assignee, self.bot_advice, + str(date.fromtimestamp(self.create_date)), + self.delay_from_create_date) + def _extract(str_list, key_word): for item in str_list: @@ -86,8 +111,7 @@ def _extract_author_latest_comment(comments): def _whether_author_comment(comments): q = set(comment.user.login for comment in comments) diff = q.difference(_PYTHON_SDK_ADMINISTRATORS) - - return len(diff) > 0 + return len(diff) > 0 def _latest_comment_time(comments, delay_from_create_date): q = [(comment.updated_at.timestamp(), comment.user.login) @@ -96,13 +120,50 @@ def _latest_comment_time(comments, delay_from_create_date): return delay_from_create_date if not q else int((time.time() - q[-1][0]) / 3600 / 24) + +def auto_reply(item, sdk_repo, rest_repo, duplicated_issue): + print("==========new issue number: {}".format(item.issue_object.number)) + if 'auto-link' not in item.labels: + try: + package_name, readme_link = update_issue_body(sdk_repo, rest_repo, item.issue_object.number) + print("pkname, readme", package_name, readme_link) + item.package = package_name + key = ('Python', item.package) + duplicated_issue[key] = duplicated_issue.get(key, 0) + 1 + except Exception as e: + item.bot_advice = 'failed to modify the body of the new issue. Please modify manually' + item.labels.append('attention') + print(e) + raise + item.labels.append('auto-link') + item.issue_object.set_labels(*item.labels) + else: + try: + readme_link = find_readme_link(sdk_repo, item.issue_object.number) + except Exception as e: + print('Issue: {} updates body failed'.format(item.issue_object.number)) + item.bot_advice = 'failed to find Readme link, Please check !!' + item.labels.append('attention') + raise + try: + reply = rg.begin_reply_generate(item=item, rest_repo=rest_repo, readme_link=readme_link) + except Exception as e: + item.bot_advice = 'auto reply failed, Please intervene manually !!' + print('Error from auto reply ========================') + print('Issue:{}'.format(item.issue_object.number)) + print(traceback.format_exc()) + print('==============================================') + + def main(): # get latest issue status g = Github(os.getenv('TOKEN')) # please fill user_token - repo = g.get_repo('Azure/sdk-release-request') - label1 = repo.get_label('ManagementPlane') - open_issues = repo.get_issues(state='open', labels=[label1]) + sdk_repo = g.get_repo('Azure/sdk-release-request') + rest_repo = g.get_repo('Azure/azure-rest-api-specs') + label1 = sdk_repo.get_label('ManagementPlane') + open_issues = sdk_repo.get_issues(state='open', labels=[label1]) issue_status = [] + issue_status_python = [] duplicated_issue = dict() start_time = time.time() for item in open_issues: @@ -124,7 +185,9 @@ def main(): issue.issue_object = item issue.labels = [label.name for label in item.labels] issue.days_from_latest_commit = _latest_comment_time(item.get_comments(), issue.delay_from_create_date) - + if item.assignee: + issue.assignee = item.assignee.login + issue_status.append(issue) key = (issue.language, issue.package) duplicated_issue[key] = duplicated_issue.get(key, 0) + 1 @@ -135,18 +198,23 @@ def main(): # rule2: if latest comment is from author, need response asap # rule3: if comment num is 0, it is new issue, better to deal with it asap # rule4: if delay from latest update is over 7 days, better to deal with it soon. - # rule5: if delay from created date is over 30 days and owner never reply, close it. - # rule6: if delay from created date is over 15 days and owner never reply, remind owner to handle it. + # rule5: if delay from created date is over 30 days, better to close. + # rule6: if delay from created date is over 30 days and owner never reply, close it. + # rule7: if delay from created date is over 15 days and owner never reply, remind owner to handle it. for item in issue_status: if item.status == 'release': item.bot_advice = 'better to release asap.' - elif item.author == item.author_latest_comment: - item.bot_advice = 'new comment for author.' - elif item.comment_num == 0: + elif item.comment_num == 0 and 'Python' in item.labels: item.bot_advice = 'new issue and better to confirm quickly.' + try: + auto_reply(item, sdk_repo, rest_repo, duplicated_issue) + except Exception as e: + continue + elif not item.author_latest_comment in _PYTHON_SDK_ADMINISTRATORS: + item.bot_advice = 'new comment for author.' elif item.delay_from_latest_update >= 7: item.bot_advice = 'delay for a long time and better to handle now.' - + if item.days_from_latest_commit >= 30 and item.language == 'Python' and '30days attention' not in item.labels: item.labels.append('30days attention') item.issue_object.set_labels(*item.labels) @@ -157,17 +225,17 @@ def main(): ' please deal with it ASAP. We will close the issue if there is still no response after 15 days!') item.labels.append('15days attention') item.issue_object.set_labels(*item.labels) - - + # judge whether there is duplicated issue for same package if item.package != _NULL and duplicated_issue.get((item.language, item.package)) > 1: item.bot_advice = f'Warning:There is duplicated issue for {item.package}. ' + item.bot_advice + + if item.language == 'Python': + issue_status_python.append(item) # output result - with open(_FILE_OUT, 'w') as file_out: - file_out.write('language,issue,author,package,created date,delay from created date,latest update time,' - 'delay from latest update,status,bot advice\n') - file_out.writelines([item.output() for item in sorted(issue_status, key=_key_select)]) + output_python_md(issue_status_python) + output_csv(issue_status) # commit to github print_check('git add .') @@ -179,7 +247,7 @@ def main(): blob_name=_FILE_OUT) with open(_FILE_OUT, 'rb') as data: blob.upload_blob(data, overwrite=True) - + if __name__ == '__main__': main() diff --git a/scripts/release_issue_status/reply_generator.py b/scripts/release_issue_status/reply_generator.py new file mode 100644 index 000000000000..dbe6494bd7d1 --- /dev/null +++ b/scripts/release_issue_status/reply_generator.py @@ -0,0 +1,111 @@ +import re + +issue_object_rg = None + + +def weather_change_readme(rest_repo, link_dict, labels): + # to see whether need change readme + contents = str(rest_repo.get_contents(link_dict['readme_path']).decoded_content) + pattern_tag = re.compile(r'tag: package-[\w+-.]+') + package_tag = pattern_tag.search(contents).group() + package_tag = package_tag.split(':')[1].strip() + readme_python_contents = str(rest_repo.get_contents(link_dict['readme_python_path']).decoded_content) + whether_multi_api = 'multi-api' in readme_python_contents + whether_same_tag = package_tag == link_dict['readme_tag'] + whether_change_readme = not whether_same_tag or whether_multi_api and not 'MultiAPI' in labels + return whether_change_readme + + +# parse owner's comment and get links +def get_links(readme_link): + link_dict = {} + comment_body = issue_object_rg.body + pattern_readme = re.compile(r'/specification/([\w-]+/)+readme.md') + pattern_resource_manager = re.compile(r'/specification/([\w-]+/)+resource-manager') + pattern_tag = re.compile(r'package-[\w+-.]+') + readme_path = pattern_readme.search(readme_link).group() + readme_tag = pattern_tag.search(comment_body).group() + resource_manager = pattern_resource_manager.search(readme_link).group() + link_dict['readme_path'] = readme_path + link_dict['readme_python_path'] = readme_path[:readme_path.rfind('/')] + '/readme.python.md' + link_dict['readme_tag'] = readme_tag + link_dict['resource_manager'] = resource_manager + return link_dict + + +def get_latest_pr_from_readme(rest_repo, link_dict): + commits = rest_repo.get_commits(path=link_dict['resource_manager']) + latest_commit = [commit for commit in commits][0] + latest_pr_brief = latest_commit.commit.message + latest_pr_number = re.findall('\(\#[0-9]+\)', latest_pr_brief) + latest_pr_number_int = [] + for number in latest_pr_number: + number = int(re.search('\d+', number).group()) + latest_pr_number_int.append(number) + latest_pr_number_int.sort() + + return latest_pr_number_int[-1] + + +def latest_pr_parse(rest_repo, latest_pr_number): + latest_pr = rest_repo.get_issue(latest_pr_number) + latest_pr_comments = latest_pr.get_comments() + b = [i for i in latest_pr_comments] + for comment in latest_pr_comments: + if '

Swagger Generation Artifacts

' in comment.body: + return swagger_generator_parse(comment.body, latest_pr_number) + + +def swagger_generator_parse(context, latest_pr_number): + track1_info_model = '' + try: + if ' azure-sdk-for-python' in context: + pattern_python_t1 = re.compile(' azure-sdk-for-python.+?', re.DOTALL) + python_t1 = re.search(pattern_python_t1, context).group() + prttern_python_track1 = re.compile('