perf: use shared HTTP client pool in pypi_download_file

This commit is contained in:
Mondo Diaz
2026-02-04 09:51:05 -06:00
parent 79dd7b833e
commit 8fdb73901e

View File

@@ -642,6 +642,8 @@ async def pypi_download_file(
upstream: Optional[str] = None, upstream: Optional[str] = None,
db: Session = Depends(get_db), db: Session = Depends(get_db),
storage: S3Storage = Depends(get_storage), storage: S3Storage = Depends(get_storage),
http_client: HttpClientManager = Depends(get_http_client),
cache: CacheService = Depends(get_cache),
): ):
""" """
Download a package file, caching it in Orchard. Download a package file, caching it in Orchard.
@@ -723,7 +725,9 @@ async def pypi_download_file(
headers.update(_build_auth_headers(matched_source)) headers.update(_build_auth_headers(matched_source))
auth = _get_basic_auth(matched_source) if matched_source else None auth = _get_basic_auth(matched_source) if matched_source else None
timeout = httpx.Timeout(300.0, connect=PROXY_CONNECT_TIMEOUT) # 5 minutes for large files # Use shared HTTP client from pool with longer timeout for file downloads
client = http_client.get_client()
download_timeout = httpx.Timeout(connect=30.0, read=300.0, write=300.0, pool=30.0)
# Initialize extracted dependencies list # Initialize extracted dependencies list
extracted_deps = [] extracted_deps = []
@@ -731,11 +735,11 @@ async def pypi_download_file(
# Fetch the file # Fetch the file
logger.info(f"PyPI proxy: fetching {filename} from {upstream_url}") logger.info(f"PyPI proxy: fetching {filename} from {upstream_url}")
async with httpx.AsyncClient(timeout=timeout, follow_redirects=False) as client:
response = await client.get( response = await client.get(
upstream_url, upstream_url,
headers=headers, headers=headers,
auth=auth, auth=auth,
timeout=download_timeout,
) )
# Handle redirects manually # Handle redirects manually
@@ -762,6 +766,7 @@ async def pypi_download_file(
headers=redirect_headers, headers=redirect_headers,
auth=redirect_auth, auth=redirect_auth,
follow_redirects=False, follow_redirects=False,
timeout=download_timeout,
) )
redirect_count += 1 redirect_count += 1