@@ -132,32 +132,32 @@ async def _run(
132132 base_ref = compare_match .group (3 )
133133 # Group 4 is the separator (.. or ...) - not used, we always use ... for APIs
134134 target_ref = compare_match .group (5 ).rstrip ('.patch' ) # Remove .patch if present
135-
135+
136136 # Construct repository URL
137137 repo_url = f"https://{ parsed .netloc } /{ owner } /{ repo } .git"
138-
138+
139139 # Determine if this is GitHub or GitLab based on the URL pattern
140140 is_github = '/-/' not in parsed .path
141-
141+
142142 # Fetch compare information to get the list of commits
143143 if is_github :
144144 # GitHub API - note: GitHub uses ... for compare
145145 api_url = f"https://api.github.com/repos/{ owner } /{ repo } /compare/{ base_ref } ...{ target_ref } "
146146 else :
147147 # GitLab API
148148 api_url = f"https://{ parsed .netloc } /api/v4/projects/{ owner } %2F{ repo } /repository/compare?from={ base_ref } &to={ target_ref } "
149-
149+
150150 headers = {
151151 'Accept' : 'application/json' ,
152152 'User-Agent' : 'RHEL-Backport-Agent'
153153 }
154-
154+
155155 try :
156156 async with aiohttp .ClientSession () as session :
157157 async with session .get (api_url , headers = headers , timeout = aiohttp .ClientTimeout (total = 15 )) as response :
158158 response .raise_for_status ()
159159 data = await response .json ()
160-
160+
161161 # Extract commits from API response
162162 if is_github :
163163 # GitHub: commits are in 'commits' array (oldest first)
@@ -167,16 +167,16 @@ async def _run(
167167 commits = [commit ['id' ] for commit in data .get ('commits' , [])]
168168 # Reverse to get oldest first
169169 commits = list (reversed (commits ))
170-
170+
171171 # Use the last commit (newest) as the default commit_hash
172172 commit_hash = commits [- 1 ] if commits else target_ref
173-
173+
174174 except (aiohttp .ClientError , KeyError ) as e :
175175 # If API fails, fall back to using target_ref as commit_hash
176176 # This allows the tool to still work even if API is unavailable
177177 commit_hash = target_ref
178178 commits = []
179-
179+
180180 # Return with compare information
181181 return ExtractUpstreamRepositoryOutput (
182182 result = UpstreamRepository (
@@ -197,7 +197,7 @@ async def _run(
197197 if commit_match :
198198 repo_path = commit_match .group (1 ).strip ('/' )
199199 commit_hash = commit_match .group (2 )
200-
200+
201201 # Construct clone URL
202202 scheme = parsed .scheme or 'https'
203203 repo_url = f"{ scheme } ://{ parsed .netloc } /{ repo_path } "
@@ -220,16 +220,16 @@ async def _run(
220220 query_match = re .search (r'(?:id|h)=([a-f0-9]{7,40})' , parsed .query )
221221 if query_match :
222222 commit_hash = query_match .group (1 )
223-
223+
224224 # Extract repo from ?p= parameter
225225 repo_path = None
226226 repo_query_match = re .search (r'[?&]p=([^;&]+)' , parsed .query )
227227 if repo_query_match :
228228 repo_path = repo_query_match .group (1 )
229-
229+
230230 if not repo_path :
231231 raise ToolError (f"Could not extract repository path from URL: { tool_input .upstream_fix_url } " )
232-
232+
233233 # Construct clone URL
234234 scheme = parsed .scheme or 'https'
235235 repo_url = f"{ scheme } ://{ parsed .netloc } /{ repo_path } "
0 commit comments