mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-08 13:04:46 +08:00
Add more checks in parse_metadata.py for the upstream_repository field (#10513)
This commit is contained in:
@@ -255,16 +255,18 @@ async def get_github_repo_info(session: aiohttp.ClientSession, stub_info: StubIn
|
||||
Else, return None.
|
||||
"""
|
||||
if stub_info.upstream_repository:
|
||||
# We have various sanity checks for the upstream_repository field in tests/parse_metadata.py,
|
||||
# so no need to repeat all of them here
|
||||
split_url = urllib.parse.urlsplit(stub_info.upstream_repository)
|
||||
if split_url.netloc == "github.com" and not split_url.query and not split_url.fragment:
|
||||
if split_url.netloc == "github.com":
|
||||
url_path = split_url.path.strip("/")
|
||||
if len(Path(url_path).parts) == 2:
|
||||
github_tags_info_url = f"https://api.github.com/repos/{url_path}/tags"
|
||||
async with session.get(github_tags_info_url, headers=get_github_api_headers()) as response:
|
||||
if response.status == 200:
|
||||
tags: list[dict[str, Any]] = await response.json()
|
||||
assert isinstance(tags, list)
|
||||
return GithubInfo(repo_path=url_path, tags=tags)
|
||||
assert len(Path(url_path).parts) == 2
|
||||
github_tags_info_url = f"https://api.github.com/repos/{url_path}/tags"
|
||||
async with session.get(github_tags_info_url, headers=get_github_api_headers()) as response:
|
||||
if response.status == 200:
|
||||
tags: list[dict[str, Any]] = await response.json()
|
||||
assert isinstance(tags, list)
|
||||
return GithubInfo(repo_path=url_path, tags=tags)
|
||||
return None
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user