Skip to content

Commit

Permalink
Avoid fetching thousands of pages from GitLab API
Browse files Browse the repository at this point in the history
When you comment on an issue in a project that has thousands of issues,
or operate on a merge request in a project with thousands of MRs, did
attempts to fetch them all to identify the ID of the one you interacted
with.

This is OK for projects that are small in size, but becomes a huge issue
for large projects such as https://gitlab.com/gitlab-org/gitlab, which
has 218k issues.

Check for the number of pages that would be fetched and avoid loading
them all if there are more than 20 pages. This will lead to issue and
merge request IDs missing from the report, but it will allow the report
to finish within your lifetime.

Signed-off-by: Clemens Lang <cllang@redhat.com>
  • Loading branch information
neverpanic committed Oct 18, 2024
1 parent bb23ea5 commit 85d998f
Showing 1 changed file with 40 additions and 6 deletions.
46 changes: 40 additions & 6 deletions did/plugins/gitlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
# Retry fetching
GITLAB_ATTEMPTS = 5
GITLAB_INTERVAL = 5
GITLAB_MAX_PAGE_LIST = 20

# Identifier padding
PADDING = 3
Expand Down Expand Up @@ -143,6 +144,17 @@ def get_project_mr(self, project_id, mr_id):
def get_project_mrs(self, project_id):
if project_id not in self.project_mrs:
query = 'projects/{0}/merge_requests'.format(project_id)

# Check that this will not return more then 20 pages; if it does,
# skip rather than spending a large amount of time to query all of
# the results.
result = self._get_gitlab_api(query)
result.raise_for_status()
log.debug("Page count for {0}: {1}".format(query, result.headers.get('x-total-pages')))
if int(result.headers.get('x-total-pages', GITLAB_MAX_PAGE_LIST + 1)) > GITLAB_MAX_PAGE_LIST:
self.project_mrs[project_id] = []
return []

self.project_mrs[project_id] = self._get_gitlab_api_list(
query, get_all_results=True)
return self.project_mrs[project_id]
Expand All @@ -155,6 +167,17 @@ def get_project_issue(self, project_id, issue_id):
def get_project_issues(self, project_id):
if project_id not in self.project_issues:
query = 'projects/{0}/issues'.format(project_id)

# Check that this will not return more then 20 pages; if it does,
# skip rather than spending a large amount of time to query all of
# the results.
result = self._get_gitlab_api(query)
result.raise_for_status()
log.debug("Page count for {0}: {1}".format(query, result.headers.get('x-total-pages')))
if int(result.headers.get('x-total-pages', GITLAB_MAX_PAGE_LIST + 1)) > GITLAB_MAX_PAGE_LIST:
self.project_issues[project_id] = []
return []

self.project_issues[project_id] = self._get_gitlab_api_list(
query, get_all_results=True)
return self.project_issues[project_id]
Expand Down Expand Up @@ -200,8 +223,12 @@ def __init__(self, data, parent):
self.title = data['target_title']

def iid(self):
return self.gitlabapi.get_project_issue(
self.data['project_id'], self.data['target_id'])['iid']
issue = self.gitlabapi.get_project_issue(
self.data['project_id'], self.data['target_id'])

if issue is not None:
return issue['iid']
return "unknown"

def __str__(self):
""" String representation """
Expand All @@ -226,8 +253,12 @@ def __str__(self):
class MergeRequest(Issue):

def iid(self):
return self.gitlabapi.get_project_mr(
self.data['project_id'], self.data['target_id'])['iid']
merge_request = self.gitlabapi.get_project_mr(
self.data['project_id'], self.data['target_id'])
if merge_request is not None:
return merge_request['iid']

return "unknown"


class Note(Issue):
Expand All @@ -244,9 +275,12 @@ def iid(self):
return issue['iid']
return 'unknown'
elif self.data['note']['noteable_type'] == 'MergeRequest':
return self.gitlabapi.get_project_mr(
merge_request = self.gitlabapi.get_project_mr(
self.data['project_id'],
self.data['note']['noteable_id'])['iid']
self.data['note']['noteable_id'])

if merge_request is not None:
return merge_request['iid']
else:
return "unknown"

Expand Down

0 comments on commit 85d998f

Please sign in to comment.