Skip to content
This repository has been archived by the owner on Jan 25, 2024. It is now read-only.

Fix issue with exceeding rate limits #1892

Merged
merged 2 commits into from
Aug 6, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Refactor and format with black
  • Loading branch information
JosephTLyons committed Aug 6, 2023
commit 2c9867b88665e94e6c12e6e80b4167dcecf66ed1
105 changes: 71 additions & 34 deletions scripts/update_top_ranking_issues/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"design",
"documentation",
"enhancement",
"panic / crash"
"panic / crash",
]
CORE_LABEL_NAMES_SET = set(CORE_LABEL_NAMES_LIST)
IGNORED_LABEL_NAMES_LIST = [
Expand Down Expand Up @@ -42,7 +42,9 @@ def main():
github_access_token = os.getenv("GITHUB_TOKEN")

if not github_access_token:
raise CommandLineArgumentException('A GitHub access token must be provided in the env as: "GITHUB_TOKEN"')
raise CommandLineArgumentException(
'A GitHub access token must be provided in the env as: "GITHUB_TOKEN"'
)

prod_mode = False

Expand Down Expand Up @@ -81,18 +83,55 @@ def main():
print(f"Remaining requests: {remaining_requests}")


# TODO: Refactor this at some point
def get_issue_maps(github, repository):
label_name_to_issue_list_map = get_label_name_to_issue_list_map(github, repository)
label_name_to_issue_data_list_map = get_label_name_to_issue_data_list_map(
label_name_to_issue_list_map
)

error_message_to_erroneous_issue_list_map = (
get_error_message_to_erroneous_issue_list_map(github, repository)
)
error_message_to_erroneous_issue_data_list_map = (
get_error_message_to_erroneous_issue_data_list_map(
error_message_to_erroneous_issue_list_map
)
)

# Create a new dictionary with labels ordered by the summation the of likes on the associated issues
label_names = list(label_name_to_issue_data_list_map.keys())

label_names.sort(
key=lambda label_name: sum(
issue_data.like_count
for issue_data in label_name_to_issue_data_list_map[label_name]
),
reverse=True,
)

label_name_to_issue_data_list_map = {
label_name: label_name_to_issue_data_list_map[label_name]
for label_name in label_names
}

return (
label_name_to_issue_data_list_map,
error_message_to_erroneous_issue_data_list_map,
)


def get_label_name_to_issue_list_map(github, repository):
label_name_to_issue_list_map = defaultdict(list)
error_message_to_erroneous_issue_list_map = defaultdict(list)

for label in CORE_LABEL_NAMES_SET:
query_string = f'repo:{repository.full_name} is:open is:issue label:"{label}" sort:reactions-+1-desc'

issue_count = 0

for issue in github.search_issues(query_string):
labels_on_issue_set = set(label["name"] for label in issue._rawData["labels"])
labels_on_issue_set = set(
label["name"] for label in issue._rawData["labels"]
)
ignored_labels_on_issue_set = labels_on_issue_set & IGNORED_LABEL_NAMES_SET

if ignored_labels_on_issue_set:
Expand All @@ -105,13 +144,10 @@ def get_issue_maps(github, repository):
if issue_count >= ISSUES_PER_LABEL:
break

a = CORE_LABEL_NAMES_SET.union(IGNORED_LABEL_NAMES_SET)
x = " ".join([f'-label:"{label}"' for label in a])
query_string = f'repo:{repository.full_name} is:open is:issue {x}'
return label_name_to_issue_list_map

for issue in github.search_issues(query_string):
error_message_to_erroneous_issue_list_map["missing core label"].append(issue)

def get_label_name_to_issue_data_list_map(label_name_to_issue_list_map):
label_name_to_issue_data_list_map = {}

for label_name in label_name_to_issue_list_map:
Expand All @@ -127,33 +163,35 @@ def get_issue_maps(github, repository):
if issue_data_list:
label_name_to_issue_data_list_map[label_name] = issue_data_list

return label_name_to_issue_data_list_map


def get_error_message_to_erroneous_issue_list_map(github, repository):
error_message_to_erroneous_issue_list_map = defaultdict(list)

filter_labels = CORE_LABEL_NAMES_SET.union(IGNORED_LABEL_NAMES_SET)
filter_labels_string = " ".join([f'-label:"{label}"' for label in filter_labels])
query_string = (
f"repo:{repository.full_name} is:open is:issue {filter_labels_string}"
)

for issue in github.search_issues(query_string):
error_message_to_erroneous_issue_list_map["missing core label"].append(issue)

return error_message_to_erroneous_issue_list_map


def get_error_message_to_erroneous_issue_data_list_map(
error_message_to_erroneous_issue_list_map,
):
error_message_to_erroneous_issue_data_list_map = {}

for label_name in error_message_to_erroneous_issue_list_map:
issue_list = error_message_to_erroneous_issue_list_map[label_name]
issue_data_list = [IssueData(issue) for issue in issue_list]
error_message_to_erroneous_issue_data_list_map[label_name] = issue_data_list

# Create a new dictionary with labels ordered by the summation the of likes on the associated issues
label_names = list(label_name_to_issue_data_list_map.keys())

label_names.sort(
key=lambda label_name: sum(
issue_data.like_count
for issue_data in label_name_to_issue_data_list_map[label_name]
),
reverse=True,
)

label_name_to_issue_data_list_map = {
label_name: label_name_to_issue_data_list_map[label_name]
for label_name in label_names
}

return (
label_name_to_issue_data_list_map,
error_message_to_erroneous_issue_data_list_map,
)
return error_message_to_erroneous_issue_data_list_map


def get_issue_text(
Expand Down Expand Up @@ -217,7 +255,9 @@ def get_highest_ranking_issues_lines(label_name_to_issue_data_list_dictionary):
highest_ranking_issues_lines.append(f"\n## {label}\n")

for issue_data in issue_data_list:
markdown_bullet_point = f"{issue_data.url} ({issue_data.like_count} :thumbsup:)"
markdown_bullet_point = (
f"{issue_data.url} ({issue_data.like_count} :thumbsup:)"
)
markdown_bullet_point = f"- {markdown_bullet_point}"
highest_ranking_issues_lines.append(markdown_bullet_point)

Expand Down Expand Up @@ -245,6 +285,3 @@ def get_erroneous_issues_lines(error_message_to_erroneous_issue_data_list_map):
main()
run_duration = datetime.now() - start_time
print(run_duration)

# TODO: Progress prints
# - "Gathering issues..."