Fix httpx.Timeout configuration in PyPI proxy
This commit is contained in:
@@ -39,7 +39,7 @@ class TestDependencySchema:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -59,29 +59,17 @@ class TestDependencySchema:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_requires_version_or_tag(self, integration_client):
|
||||
"""Test that dependency must have either version or tag, not both or neither."""
|
||||
def test_dependency_requires_version(self, integration_client):
|
||||
"""Test that dependency requires version."""
|
||||
from app.schemas import DependencyCreate
|
||||
|
||||
# Test: neither version nor tag
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
# Test: missing version
|
||||
with pytest.raises(ValidationError):
|
||||
DependencyCreate(project="proj", package="pkg")
|
||||
assert "Either 'version' or 'tag' must be specified" in str(exc_info.value)
|
||||
|
||||
# Test: both version and tag
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable")
|
||||
assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value)
|
||||
|
||||
# Test: valid with version
|
||||
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
||||
assert dep.version == "1.0.0"
|
||||
assert dep.tag is None
|
||||
|
||||
# Test: valid with tag
|
||||
dep = DependencyCreate(project="proj", package="pkg", tag="stable")
|
||||
assert dep.tag == "stable"
|
||||
assert dep.version is None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dependency_unique_constraint(
|
||||
@@ -126,7 +114,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
@@ -162,7 +150,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "Invalid ensure file" in response.json().get("detail", "")
|
||||
@@ -188,7 +176,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "Project" in response.json().get("detail", "")
|
||||
@@ -208,7 +196,7 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -226,13 +214,14 @@ class TestEnsureFileParsing:
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
# Test with missing version field (version is now required)
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"}
|
||||
{"project": dep_project_name, "package": "pkg"} # Missing version
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("test-both", unique_test_id, "constraint")
|
||||
content = unique_content("test-missing-version", unique_test_id, "constraint")
|
||||
files = {
|
||||
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
@@ -240,11 +229,10 @@ class TestEnsureFileParsing:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "both" in response.json().get("detail", "").lower() or \
|
||||
"version" in response.json().get("detail", "").lower()
|
||||
assert "version" in response.json().get("detail", "").lower()
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
@@ -271,7 +259,7 @@ class TestDependencyQueryEndpoints:
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
||||
{"project": dep_project_name, "package": "lib-b", "tag": "stable"},
|
||||
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"},
|
||||
]
|
||||
})
|
||||
|
||||
@@ -283,7 +271,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v2.0.0-{unique_test_id}"},
|
||||
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
artifact_id = response.json()["artifact_id"]
|
||||
@@ -299,10 +287,8 @@ class TestDependencyQueryEndpoints:
|
||||
deps = {d["package"]: d for d in data["dependencies"]}
|
||||
assert "lib-a" in deps
|
||||
assert deps["lib-a"]["version"] == "1.0.0"
|
||||
assert deps["lib-a"]["tag"] is None
|
||||
assert "lib-b" in deps
|
||||
assert deps["lib-b"]["tag"] == "stable"
|
||||
assert deps["lib-b"]["version"] is None
|
||||
assert deps["lib-b"]["version"] == "2.0.0"
|
||||
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
@@ -336,7 +322,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": tag_name},
|
||||
data={"version": tag_name},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -381,7 +367,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -400,7 +386,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v4.0.0-{unique_test_id}"},
|
||||
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -419,7 +405,6 @@ class TestDependencyQueryEndpoints:
|
||||
for dep in data["dependents"]:
|
||||
if dep["project"] == project_name:
|
||||
found = True
|
||||
assert dep["constraint_type"] == "version"
|
||||
assert dep["constraint_value"] == "1.0.0"
|
||||
break
|
||||
assert found, "Our package should be in the dependents list"
|
||||
@@ -442,7 +427,7 @@ class TestDependencyQueryEndpoints:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||
data={"version": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
artifact_id = response.json()["artifact_id"]
|
||||
@@ -482,7 +467,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -500,7 +485,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -518,7 +503,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -566,7 +551,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -584,7 +569,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -602,7 +587,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -621,7 +606,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -663,7 +648,7 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"solo-{unique_test_id}"},
|
||||
data={"version": f"solo-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -698,17 +683,21 @@ class TestDependencyResolution:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"tag": f"missing-dep-{unique_test_id}"},
|
||||
data={"version": f"missing-dep-{unique_test_id}"},
|
||||
)
|
||||
# Should fail at upload time since package doesn't exist
|
||||
# OR succeed at upload but fail at resolution
|
||||
# Depending on implementation choice
|
||||
if response.status_code == 200:
|
||||
# Resolution should fail
|
||||
# Resolution should return missing dependencies
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
|
||||
)
|
||||
assert response.status_code == 404
|
||||
# Expect 200 with missing dependencies listed
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
# The missing dependency should be in the 'missing' list
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
|
||||
class TestCircularDependencyDetection:
|
||||
@@ -736,7 +725,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -754,7 +743,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -772,7 +761,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
# Should be rejected with 400 (circular dependency)
|
||||
assert response.status_code == 400
|
||||
@@ -807,7 +796,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -825,7 +814,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -843,7 +832,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -861,7 +850,7 @@ class TestCircularDependencyDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
@@ -884,10 +873,14 @@ class TestCircularDependencyDetection:
|
||||
|
||||
|
||||
class TestConflictDetection:
|
||||
"""Tests for #81: Dependency Conflict Detection and Reporting"""
|
||||
"""Tests for dependency conflict handling.
|
||||
|
||||
The resolver uses "first version wins" strategy for version conflicts,
|
||||
allowing resolution to succeed rather than failing with an error.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_detect_version_conflict(
|
||||
def test_version_conflict_uses_first_version(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test conflict when two deps require different versions of same package."""
|
||||
@@ -910,7 +903,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -920,7 +913,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "2.0.0"},
|
||||
data={"version": "2.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -938,7 +931,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -956,7 +949,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -975,25 +968,23 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Try to resolve app - should report conflict
|
||||
# Try to resolve app - with lenient conflict handling, this should succeed
|
||||
# The resolver uses "first version wins" strategy for conflicting versions
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
||||
)
|
||||
assert response.status_code == 409
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
# Error details are nested in "detail" for HTTPException
|
||||
detail = data.get("detail", data)
|
||||
assert detail.get("error") == "dependency_conflict"
|
||||
assert len(detail.get("conflicts", [])) > 0
|
||||
|
||||
# Verify conflict details
|
||||
conflict = detail["conflicts"][0]
|
||||
assert conflict["package"] == pkg_common
|
||||
assert len(conflict["requirements"]) == 2
|
||||
# Resolution should succeed with first-encountered version of common
|
||||
assert data["artifact_count"] >= 1
|
||||
# Find the common package in resolved list
|
||||
common_resolved = [r for r in data["resolved"] if r["package"] == pkg_common]
|
||||
assert len(common_resolved) == 1 # Only one version should be included
|
||||
|
||||
finally:
|
||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||
@@ -1023,7 +1014,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1042,7 +1033,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1061,7 +1052,7 @@ class TestConflictDetection:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||
files=files,
|
||||
data={"tag": "1.0.0"},
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1078,3 +1069,277 @@ class TestConflictDetection:
|
||||
finally:
|
||||
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
||||
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||
|
||||
|
||||
class TestAutoFetchDependencies:
|
||||
"""Tests for auto-fetch functionality in dependency resolution.
|
||||
|
||||
These tests verify:
|
||||
- Resolution with auto_fetch=true (default) fetches missing dependencies from upstream
|
||||
- Resolution with auto_fetch=false skips network calls for fast resolution
|
||||
- Proper handling of missing/non-existent packages
|
||||
- Response schema includes fetched artifacts list
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_true_is_default(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=true is the default (no fetch needed when all deps cached)."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a simple artifact without dependencies
|
||||
content = unique_content("autofetch-default", unique_test_id, "nodeps")
|
||||
files = {"file": ("default.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve without auto_fetch param (should default to false)
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v1.0.0-{unique_test_id}/resolve"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have empty fetched list
|
||||
assert data.get("fetched", []) == []
|
||||
assert data["artifact_count"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_explicit_false(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=false works explicitly."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = unique_content("autofetch-explicit-false", unique_test_id, "nodeps")
|
||||
files = {"file": ("explicit.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve with explicit auto_fetch=false
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v2.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "false"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data.get("fetched", []) == []
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_auto_fetch_true_no_missing_deps(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test that auto_fetch=true works when all deps are already cached."""
|
||||
pkg_a = f"fetch-a-{unique_test_id}"
|
||||
pkg_b = f"fetch-b-{unique_test_id}"
|
||||
|
||||
for pkg in [pkg_a, pkg_b]:
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
# Upload B (no deps)
|
||||
content_b = unique_content("B", unique_test_id, "fetch")
|
||||
files = {"file": ("b.tar.gz", BytesIO(content_b), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||
files=files,
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload A (depends on B)
|
||||
ensure_a = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
||||
]
|
||||
})
|
||||
content_a = unique_content("A", unique_test_id, "fetch")
|
||||
files = {
|
||||
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||
files=files,
|
||||
data={"version": "1.0.0"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve with auto_fetch=true - should work since deps are cached
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve",
|
||||
params={"auto_fetch": "true"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should resolve successfully
|
||||
assert data["artifact_count"] == 2
|
||||
# Nothing fetched since everything was cached
|
||||
assert len(data.get("fetched", [])) == 0
|
||||
# No missing deps
|
||||
assert len(data.get("missing", [])) == 0
|
||||
|
||||
finally:
|
||||
for pkg in [pkg_a, pkg_b]:
|
||||
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_missing_dep_with_auto_fetch_false(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that missing deps are reported when auto_fetch=false."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Create _pypi system project if it doesn't exist
|
||||
response = integration_client.get("/api/v1/projects/_pypi")
|
||||
if response.status_code == 404:
|
||||
response = integration_client.post(
|
||||
"/api/v1/projects",
|
||||
json={"name": "_pypi", "description": "System project for PyPI packages"}
|
||||
)
|
||||
# May fail if already exists or can't create - that's ok
|
||||
|
||||
# Upload artifact with dependency on _pypi package that doesn't exist locally
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": "_pypi", "package": "nonexistent-pkg-xyz123", "version": ">=1.0.0"}
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("missing-pypi", unique_test_id, "dep")
|
||||
files = {
|
||||
"file": ("missing-pypi-dep.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v3.0.0-{unique_test_id}"},
|
||||
)
|
||||
# Upload should succeed - validation is loose for system projects
|
||||
if response.status_code == 200:
|
||||
# Resolve without auto_fetch - should report missing
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v3.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "false"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have missing dependencies
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
# Verify missing dependency structure
|
||||
missing = data["missing"][0]
|
||||
assert missing["project"] == "_pypi"
|
||||
assert missing["package"] == "nonexistent-pkg-xyz123"
|
||||
# Without auto_fetch, these should be false/None
|
||||
assert missing.get("fetch_attempted", False) is False
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_resolve_response_schema_has_fetched_field(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that the resolve response always includes the fetched field."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
content = unique_content("schema-check", unique_test_id, "nodeps")
|
||||
files = {"file": ("schema.tar.gz", BytesIO(content), "application/gzip")}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check both auto_fetch modes include fetched field
|
||||
for auto_fetch in ["false", "true"]:
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v4.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": auto_fetch},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Required fields
|
||||
assert "requested" in data
|
||||
assert "resolved" in data
|
||||
assert "missing" in data
|
||||
assert "fetched" in data # New field
|
||||
assert "total_size" in data
|
||||
assert "artifact_count" in data
|
||||
|
||||
# Types
|
||||
assert isinstance(data["fetched"], list)
|
||||
assert isinstance(data["missing"], list)
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_missing_dep_schema_has_fetch_fields(
|
||||
self, integration_client, test_package, unique_test_id
|
||||
):
|
||||
"""Test that missing dependency entries have fetch_attempted and fetch_error fields."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Create a dependency on a non-existent package in a real project
|
||||
dep_project_name = f"dep-test-{unique_test_id}"
|
||||
response = integration_client.post(
|
||||
"/api/v1/projects", json={"name": dep_project_name}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
try:
|
||||
ensure_content = yaml.dump({
|
||||
"dependencies": [
|
||||
{"project": dep_project_name, "package": "nonexistent-pkg", "version": "1.0.0"}
|
||||
]
|
||||
})
|
||||
|
||||
content = unique_content("missing-schema", unique_test_id, "check")
|
||||
files = {
|
||||
"file": ("missing-schema.tar.gz", BytesIO(content), "application/gzip"),
|
||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||
}
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||
files=files,
|
||||
data={"version": f"v5.0.0-{unique_test_id}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Resolve
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/{package_name}/+/v5.0.0-{unique_test_id}/resolve",
|
||||
params={"auto_fetch": "true"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should have missing dependencies
|
||||
assert len(data.get("missing", [])) >= 1
|
||||
|
||||
# Check schema for missing dependency
|
||||
missing = data["missing"][0]
|
||||
assert "project" in missing
|
||||
assert "package" in missing
|
||||
assert "constraint" in missing
|
||||
assert "required_by" in missing
|
||||
# New fields
|
||||
assert "fetch_attempted" in missing
|
||||
assert "fetch_error" in missing # May be None
|
||||
|
||||
finally:
|
||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||
|
||||
Reference in New Issue
Block a user