fix: filter platform-specific and extra dependencies in PyPI proxy
The dependency parser was stripping environment markers but not checking if they indicated optional or platform-specific packages. This caused packages like jaraco.path to pull in pyobjc (324 sub-packages) even on non-macOS systems. Changes: - Filter dependencies with 'extra ==' markers (optional extras) - Filter dependencies with 'sys_platform' or 'platform_system' markers - Add diagnostic logging for depth exceeded errors - Add unit tests for dependency filtering Fixes tensorflow dependency resolution exceeding max depth.
This commit is contained in:
@@ -730,6 +730,8 @@ def resolve_dependencies(
|
||||
current_path: Dict[str, str] = {}
|
||||
# Resolution order (topological)
|
||||
resolution_order: List[str] = []
|
||||
# Track resolution path for debugging
|
||||
resolution_path_sync: List[str] = []
|
||||
|
||||
def _resolve_recursive(
|
||||
artifact_id: str,
|
||||
@@ -741,12 +743,16 @@ def resolve_dependencies(
|
||||
depth: int = 0,
|
||||
):
|
||||
"""Recursively resolve dependencies with cycle/conflict detection."""
|
||||
pkg_key = f"{proj_name}/{pkg_name}"
|
||||
|
||||
# Safety limit: prevent DoS through deeply nested dependencies
|
||||
if depth > MAX_DEPENDENCY_DEPTH:
|
||||
logger.error(
|
||||
f"Dependency depth exceeded at {pkg_key} (depth={depth}). "
|
||||
f"Resolution path: {' -> '.join(resolution_path_sync[-20:])}"
|
||||
)
|
||||
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
||||
|
||||
pkg_key = f"{proj_name}/{pkg_name}"
|
||||
|
||||
# Cycle detection (at artifact level)
|
||||
if artifact_id in visiting:
|
||||
# Build cycle path from current_path
|
||||
@@ -770,6 +776,9 @@ def resolve_dependencies(
|
||||
if artifact_id in visited:
|
||||
return
|
||||
|
||||
# Track path for debugging (only after early-return checks)
|
||||
resolution_path_sync.append(f"{pkg_key}@{version_or_tag}")
|
||||
|
||||
visiting.add(artifact_id)
|
||||
current_path[artifact_id] = pkg_key
|
||||
|
||||
@@ -838,6 +847,7 @@ def resolve_dependencies(
|
||||
visiting.remove(artifact_id)
|
||||
del current_path[artifact_id]
|
||||
visited.add(artifact_id)
|
||||
resolution_path_sync.pop()
|
||||
|
||||
# Add to resolution order (dependencies before dependents)
|
||||
resolution_order.append(artifact_id)
|
||||
@@ -1086,6 +1096,9 @@ async def resolve_dependencies_with_fetch(
|
||||
logger.warning(f"Error fetching {dep_package}: {e}")
|
||||
return None
|
||||
|
||||
# Track resolution path for debugging
|
||||
resolution_path: List[str] = []
|
||||
|
||||
async def _resolve_recursive_async(
|
||||
artifact_id: str,
|
||||
proj_name: str,
|
||||
@@ -1096,11 +1109,15 @@ async def resolve_dependencies_with_fetch(
|
||||
depth: int = 0,
|
||||
):
|
||||
"""Recursively resolve dependencies with fetch capability."""
|
||||
if depth > MAX_DEPENDENCY_DEPTH:
|
||||
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
||||
|
||||
pkg_key = f"{proj_name}/{pkg_name}"
|
||||
|
||||
if depth > MAX_DEPENDENCY_DEPTH:
|
||||
logger.error(
|
||||
f"Dependency depth exceeded at {pkg_key} (depth={depth}). "
|
||||
f"Resolution path: {' -> '.join(resolution_path[-20:])}"
|
||||
)
|
||||
raise DependencyDepthExceededError(MAX_DEPENDENCY_DEPTH)
|
||||
|
||||
# Cycle detection
|
||||
if artifact_id in visiting:
|
||||
cycle_start = current_path.get(artifact_id, pkg_key)
|
||||
@@ -1123,6 +1140,9 @@ async def resolve_dependencies_with_fetch(
|
||||
if artifact_id in visited:
|
||||
return
|
||||
|
||||
# Track path for debugging (only after early-return checks)
|
||||
resolution_path.append(f"{pkg_key}@{version_or_tag}")
|
||||
|
||||
visiting.add(artifact_id)
|
||||
current_path[artifact_id] = pkg_key
|
||||
|
||||
@@ -1235,6 +1255,7 @@ async def resolve_dependencies_with_fetch(
|
||||
visiting.remove(artifact_id)
|
||||
del current_path[artifact_id]
|
||||
visited.add(artifact_id)
|
||||
resolution_path.pop()
|
||||
|
||||
resolution_order.append(artifact_id)
|
||||
|
||||
|
||||
@@ -47,17 +47,36 @@ PROXY_READ_TIMEOUT = 60.0
|
||||
def _parse_requires_dist(requires_dist: str) -> Tuple[str, Optional[str]]:
|
||||
"""Parse a Requires-Dist line into (package_name, version_constraint).
|
||||
|
||||
Filters out optional/extra dependencies and platform-specific dependencies
|
||||
to avoid pulling in unnecessary packages during dependency resolution.
|
||||
|
||||
Examples:
|
||||
"requests (>=2.25.0)" -> ("requests", ">=2.25.0")
|
||||
"typing-extensions; python_version < '3.8'" -> ("typing-extensions", None)
|
||||
"numpy>=1.21.0" -> ("numpy", ">=1.21.0")
|
||||
"certifi" -> ("certifi", None)
|
||||
"pytest; extra == 'test'" -> (None, None) # Filtered: extra dependency
|
||||
"pyobjc; sys_platform == 'darwin'" -> (None, None) # Filtered: platform-specific
|
||||
|
||||
Returns:
|
||||
Tuple of (normalized_package_name, version_constraint or None)
|
||||
Returns (None, None) for dependencies that should be filtered out.
|
||||
"""
|
||||
# Remove any environment markers (after semicolon)
|
||||
# Check for and filter environment markers (after semicolon)
|
||||
if ';' in requires_dist:
|
||||
marker_part = requires_dist.split(';', 1)[1].lower()
|
||||
|
||||
# Filter out extra/optional dependencies - these are not core dependencies
|
||||
# Examples: "pytest; extra == 'test'", "sphinx; extra == 'docs'"
|
||||
if 'extra' in marker_part:
|
||||
return None, None
|
||||
|
||||
# Filter out platform-specific dependencies to avoid cross-platform bloat
|
||||
# Examples: "pyobjc; sys_platform == 'darwin'", "pywin32; sys_platform == 'win32'"
|
||||
if 'sys_platform' in marker_part or 'platform_system' in marker_part:
|
||||
return None, None
|
||||
|
||||
# Strip the marker for remaining dependencies (like python_version constraints)
|
||||
requires_dist = requires_dist.split(';')[0].strip()
|
||||
|
||||
# Match patterns like "package (>=1.0)" or "package>=1.0" or "package"
|
||||
|
||||
Reference in New Issue
Block a user