1346 lines
54 KiB
Python
1346 lines
54 KiB
Python
"""Tests for artifact dependency management.
|
|
|
|
Tests cover:
|
|
- #76: Database Schema for Artifact Dependencies
|
|
- #77: Ensure File Parsing and Storage on Upload
|
|
- #78: Dependency Query API Endpoints
|
|
- #79: Server-Side Dependency Resolution
|
|
- #80: Circular Dependency Detection
|
|
- #81: Dependency Conflict Detection and Reporting
|
|
"""
|
|
|
|
import pytest
|
|
import yaml
|
|
from uuid import uuid4
|
|
from io import BytesIO
|
|
|
|
# For schema validation tests
|
|
from pydantic import ValidationError
|
|
|
|
|
|
def unique_content(base: str, test_id: str, extra: str = "") -> bytes:
|
|
"""Generate unique content to avoid artifact hash collisions between tests."""
|
|
return f"{base}-{test_id}-{extra}-{uuid4().hex[:8]}".encode()
|
|
|
|
|
|
class TestDependencySchema:
|
|
"""Tests for #76: Database Schema for Artifact Dependencies"""
|
|
|
|
@pytest.mark.integration
|
|
def test_create_dependency_with_version(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test creating a dependency with version constraint."""
|
|
project_name, package_name = test_package
|
|
|
|
# First upload an artifact
|
|
content = unique_content("test-deps", unique_test_id, "schema1")
|
|
files = {"file": ("test.tar.gz", BytesIO(content), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Create a second project to depend on
|
|
dep_project_name = f"dep-project-{uuid4().hex[:8]}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Now test the dependency creation via API (once implemented)
|
|
# For now, verify the schema constraints work at DB level
|
|
pass
|
|
finally:
|
|
# Cleanup
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
@pytest.mark.integration
|
|
def test_dependency_requires_version(self, integration_client):
|
|
"""Test that dependency requires version."""
|
|
from app.schemas import DependencyCreate
|
|
|
|
# Test: missing version
|
|
with pytest.raises(ValidationError):
|
|
DependencyCreate(project="proj", package="pkg")
|
|
|
|
# Test: valid with version
|
|
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
|
assert dep.version == "1.0.0"
|
|
|
|
@pytest.mark.integration
|
|
def test_dependency_unique_constraint(
|
|
self, integration_client, test_package
|
|
):
|
|
"""Test that an artifact can only have one dependency per project/package."""
|
|
# This will be tested once the upload with ensure file is implemented
|
|
pass
|
|
|
|
|
|
class TestEnsureFileParsing:
|
|
"""Tests for #77: Ensure File Parsing and Storage on Upload"""
|
|
|
|
@pytest.mark.integration
|
|
def test_upload_with_valid_ensure_file(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test uploading an artifact with a valid orchard.ensure file."""
|
|
project_name, package_name = test_package
|
|
|
|
# Create dependency project
|
|
dep_project_name = f"dep-project-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Create ensure file content
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "some-pkg", "version": "1.0.0"}
|
|
]
|
|
})
|
|
|
|
# Upload artifact with ensure file - use unique content to avoid conflicts
|
|
content = unique_content("test-ensure", unique_test_id, "valid")
|
|
files = {
|
|
"file": ("test-artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
artifact_id = data["artifact_id"]
|
|
|
|
# Verify dependencies were stored
|
|
response = integration_client.get(
|
|
f"/api/v1/artifact/{artifact_id}/dependencies"
|
|
)
|
|
assert response.status_code == 200
|
|
deps = response.json()
|
|
assert len(deps["dependencies"]) == 1
|
|
assert deps["dependencies"][0]["project"] == dep_project_name
|
|
assert deps["dependencies"][0]["package"] == "some-pkg"
|
|
assert deps["dependencies"][0]["version"] == "1.0.0"
|
|
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
@pytest.mark.integration
|
|
def test_upload_with_invalid_ensure_file(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test uploading with invalid YAML ensure file."""
|
|
project_name, package_name = test_package
|
|
|
|
# Invalid YAML
|
|
content = unique_content("test-invalid", unique_test_id, "yaml")
|
|
files = {
|
|
"file": ("test-artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(b"invalid: yaml: content: ["), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 400
|
|
assert "Invalid ensure file" in response.json().get("detail", "")
|
|
|
|
@pytest.mark.integration
|
|
def test_upload_with_missing_dependency_project(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test uploading with ensure file referencing non-existent project."""
|
|
project_name, package_name = test_package
|
|
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": "nonexistent-project-xyz", "package": "some-pkg", "version": "1.0.0"}
|
|
]
|
|
})
|
|
|
|
content = unique_content("test-missing", unique_test_id, "project")
|
|
files = {
|
|
"file": ("test-artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 400
|
|
assert "Project" in response.json().get("detail", "")
|
|
assert "not found" in response.json().get("detail", "").lower()
|
|
|
|
@pytest.mark.integration
|
|
def test_upload_without_ensure_file(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test normal upload without ensure file still works."""
|
|
project_name, package_name = test_package
|
|
|
|
content = unique_content("test-nodeps", unique_test_id, "upload")
|
|
files = {
|
|
"file": ("test-artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-nodeps-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
@pytest.mark.integration
|
|
def test_upload_ensure_file_both_version_and_tag(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that ensure file with both version and tag is rejected."""
|
|
project_name, package_name = test_package
|
|
|
|
dep_project_name = f"dep-project-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Test with missing version field (version is now required)
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "pkg"} # Missing version
|
|
]
|
|
})
|
|
|
|
content = unique_content("test-missing-version", unique_test_id, "constraint")
|
|
files = {
|
|
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 400
|
|
assert "version" in response.json().get("detail", "").lower()
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
|
|
class TestDependencyQueryEndpoints:
|
|
"""Tests for #78: Dependency Query API Endpoints"""
|
|
|
|
@pytest.mark.integration
|
|
def test_get_artifact_dependencies(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test GET /api/v1/artifact/{artifact_id}/dependencies"""
|
|
project_name, package_name = test_package
|
|
|
|
# Create dependency project
|
|
dep_project_name = f"dep-project-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload artifact with dependencies
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
|
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"},
|
|
]
|
|
})
|
|
|
|
content = unique_content("test-deps", unique_test_id, "query")
|
|
files = {
|
|
"file": ("artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v2.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
artifact_id = response.json()["artifact_id"]
|
|
|
|
# Get dependencies
|
|
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/dependencies")
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["artifact_id"] == artifact_id
|
|
assert len(data["dependencies"]) == 2
|
|
|
|
# Verify both dependencies
|
|
deps = {d["package"]: d for d in data["dependencies"]}
|
|
assert "lib-a" in deps
|
|
assert deps["lib-a"]["version"] == "1.0.0"
|
|
assert "lib-b" in deps
|
|
assert deps["lib-b"]["version"] == "2.0.0"
|
|
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
@pytest.mark.integration
|
|
def test_get_dependencies_by_ref(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test GET /api/v1/project/{proj}/{pkg}/+/{ref}/dependencies"""
|
|
project_name, package_name = test_package
|
|
|
|
dep_project_name = f"dep-project-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "lib-c", "version": "2.0.0"},
|
|
]
|
|
})
|
|
|
|
tag_name = f"v3.0.0-{unique_test_id}"
|
|
content = unique_content("test-ref", unique_test_id, "deps")
|
|
files = {
|
|
"file": ("artifact.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": tag_name},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Get dependencies by tag
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/{tag_name}/dependencies"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert len(data["dependencies"]) == 1
|
|
assert data["dependencies"][0]["package"] == "lib-c"
|
|
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
@pytest.mark.integration
|
|
def test_get_reverse_dependencies(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test GET /api/v1/project/{proj}/{pkg}/reverse-dependencies"""
|
|
project_name, package_name = test_package
|
|
|
|
# Create the dependency target project/package
|
|
dep_project_name = f"lib-project-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Create the target package with an artifact
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{dep_project_name}/packages",
|
|
json={"name": "target-lib"}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
content = unique_content("lib", unique_test_id, "target")
|
|
files = {
|
|
"file": ("lib.tar.gz", BytesIO(content), "application/gzip"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Now upload an artifact that depends on the target
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "target-lib", "version": "1.0.0"},
|
|
]
|
|
})
|
|
|
|
content = unique_content("app", unique_test_id, "reverse")
|
|
files = {
|
|
"file": ("app.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v4.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Check reverse dependencies
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{dep_project_name}/target-lib/reverse-dependencies"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["project"] == dep_project_name
|
|
assert data["package"] == "target-lib"
|
|
assert len(data["dependents"]) >= 1
|
|
|
|
# Find our dependent
|
|
found = False
|
|
for dep in data["dependents"]:
|
|
if dep["project"] == project_name:
|
|
found = True
|
|
assert dep["constraint_value"] == "1.0.0"
|
|
break
|
|
assert found, "Our package should be in the dependents list"
|
|
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
|
|
|
@pytest.mark.integration
|
|
def test_get_dependencies_empty(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test getting dependencies for artifact with no deps."""
|
|
project_name, package_name = test_package
|
|
|
|
# Upload without ensure file
|
|
content = unique_content("nodeps", unique_test_id, "empty")
|
|
files = {
|
|
"file": ("nodeps.tar.gz", BytesIO(content), "application/gzip"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v5.0.0-nodeps-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
artifact_id = response.json()["artifact_id"]
|
|
|
|
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/dependencies")
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["artifact_id"] == artifact_id
|
|
assert len(data["dependencies"]) == 0
|
|
|
|
|
|
class TestDependencyResolution:
|
|
"""Tests for #79: Server-Side Dependency Resolution"""
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_simple_chain(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test resolving A -> B -> C dependency chain."""
|
|
# Create packages A, B, C
|
|
pkg_a = f"pkg-a-{unique_test_id}"
|
|
pkg_b = f"pkg-b-{unique_test_id}"
|
|
pkg_c = f"pkg-c-{unique_test_id}"
|
|
|
|
# Create all packages
|
|
for pkg in [pkg_a, pkg_b, pkg_c]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload C (no deps)
|
|
content_c = unique_content("pkg-c", unique_test_id, "chain")
|
|
files = {"file": ("c.tar.gz", BytesIO(content_c), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload B (depends on C)
|
|
ensure_b = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_c, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_b = unique_content("pkg-b", unique_test_id, "chain")
|
|
files = {
|
|
"file": ("b.tar.gz", BytesIO(content_b), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_b.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload A (depends on B)
|
|
ensure_a = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_a = unique_content("pkg-a", unique_test_id, "chain")
|
|
files = {
|
|
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve dependencies for A
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should have 3 artifacts: C, B, A (in topological order)
|
|
assert data["artifact_count"] == 3
|
|
packages = [r["package"] for r in data["resolved"]]
|
|
|
|
# C should come before B, B should come before A
|
|
assert packages.index(pkg_c) < packages.index(pkg_b)
|
|
assert packages.index(pkg_b) < packages.index(pkg_a)
|
|
|
|
finally:
|
|
# Cleanup packages
|
|
for pkg in [pkg_a, pkg_b, pkg_c]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_diamond_dependency(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test resolving diamond: A -> B -> D, A -> C -> D (D appears once)."""
|
|
pkg_a = f"diamond-a-{unique_test_id}"
|
|
pkg_b = f"diamond-b-{unique_test_id}"
|
|
pkg_c = f"diamond-c-{unique_test_id}"
|
|
pkg_d = f"diamond-d-{unique_test_id}"
|
|
|
|
for pkg in [pkg_a, pkg_b, pkg_c, pkg_d]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload D (no deps)
|
|
content_d = unique_content("pkg-d", unique_test_id, "diamond")
|
|
files = {"file": ("d.tar.gz", BytesIO(content_d), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload B (depends on D)
|
|
ensure_b = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_d, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_b = unique_content("pkg-b", unique_test_id, "diamond")
|
|
files = {
|
|
"file": ("b.tar.gz", BytesIO(content_b), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_b.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload C (also depends on D)
|
|
ensure_c = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_d, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_c = unique_content("pkg-c", unique_test_id, "diamond")
|
|
files = {
|
|
"file": ("c.tar.gz", BytesIO(content_c), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_c.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload A (depends on B and C)
|
|
ensure_a = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"},
|
|
{"project": test_project, "package": pkg_c, "version": "1.0.0"},
|
|
]
|
|
})
|
|
content_a = unique_content("pkg-a", unique_test_id, "diamond")
|
|
files = {
|
|
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve A
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should have 4 artifacts, D appears only once
|
|
assert data["artifact_count"] == 4
|
|
packages = [r["package"] for r in data["resolved"]]
|
|
assert packages.count(pkg_d) == 1 # D only once
|
|
|
|
# D should come before B and C
|
|
d_idx = packages.index(pkg_d)
|
|
b_idx = packages.index(pkg_b)
|
|
c_idx = packages.index(pkg_c)
|
|
a_idx = packages.index(pkg_a)
|
|
assert d_idx < b_idx
|
|
assert d_idx < c_idx
|
|
assert b_idx < a_idx
|
|
assert c_idx < a_idx
|
|
|
|
finally:
|
|
for pkg in [pkg_a, pkg_b, pkg_c, pkg_d]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_no_dependencies(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test resolving artifact with no dependencies."""
|
|
project_name, package_name = test_package
|
|
|
|
content = unique_content("solo", unique_test_id, "nodeps")
|
|
files = {"file": ("solo.tar.gz", BytesIO(content), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"solo-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/solo-{unique_test_id}/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["artifact_count"] == 1
|
|
assert data["resolved"][0]["package"] == package_name
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_missing_dependency(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test resolution fails when dependency doesn't exist."""
|
|
project_name, package_name = test_package
|
|
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": project_name, "package": "nonexistent-pkg-xyz", "version": "1.0.0"}
|
|
]
|
|
})
|
|
|
|
# First we need a project that exists to reference
|
|
# Upload artifact with dependency on nonexistent package version
|
|
content = unique_content("missing", unique_test_id, "dep")
|
|
files = {
|
|
"file": ("missing-dep.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"missing-dep-{unique_test_id}"},
|
|
)
|
|
# Should fail at upload time since package doesn't exist
|
|
# OR succeed at upload but fail at resolution
|
|
# Depending on implementation choice
|
|
if response.status_code == 200:
|
|
# Resolution should return missing dependencies
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/missing-dep-{unique_test_id}/resolve"
|
|
)
|
|
# Expect 200 with missing dependencies listed
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
# The missing dependency should be in the 'missing' list
|
|
assert len(data.get("missing", [])) >= 1
|
|
|
|
|
|
class TestCircularDependencyDetection:
|
|
"""Tests for #80: Circular Dependency Detection"""
|
|
|
|
@pytest.mark.integration
|
|
def test_detect_direct_cycle(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test detection of direct cycle: A -> B -> A"""
|
|
pkg_a = f"cycle-a-{unique_test_id}"
|
|
pkg_b = f"cycle-b-{unique_test_id}"
|
|
|
|
for pkg in [pkg_a, pkg_b]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload A (no deps initially)
|
|
content_a1 = unique_content("A-v1", unique_test_id, "cycle")
|
|
files = {"file": ("a.tar.gz", BytesIO(content_a1), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload B (depends on A)
|
|
ensure_b = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_a, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_b = unique_content("B", unique_test_id, "cycle")
|
|
files = {
|
|
"file": ("b.tar.gz", BytesIO(content_b), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_b.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Try to upload A v2 that depends on B (creating cycle)
|
|
ensure_a2 = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_a2 = unique_content("A-v2", unique_test_id, "cycle")
|
|
files = {
|
|
"file": ("a2.tar.gz", BytesIO(content_a2), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_a2.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "2.0.0"},
|
|
)
|
|
# Should be rejected with 400 (circular dependency)
|
|
assert response.status_code == 400
|
|
data = response.json()
|
|
assert "circular" in data.get("detail", "").lower() or \
|
|
data.get("error") == "circular_dependency"
|
|
|
|
finally:
|
|
for pkg in [pkg_a, pkg_b]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_detect_indirect_cycle(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test detection of indirect cycle: A -> B -> C -> A"""
|
|
pkg_a = f"icycle-a-{unique_test_id}"
|
|
pkg_b = f"icycle-b-{unique_test_id}"
|
|
pkg_c = f"icycle-c-{unique_test_id}"
|
|
|
|
for pkg in [pkg_a, pkg_b, pkg_c]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload A v1 (no deps)
|
|
content_a1 = unique_content("A-v1", unique_test_id, "icycle")
|
|
files = {"file": ("a.tar.gz", BytesIO(content_a1), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload B (depends on A)
|
|
ensure_b = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_a, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_b = unique_content("B", unique_test_id, "icycle")
|
|
files = {
|
|
"file": ("b.tar.gz", BytesIO(content_b), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_b.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload C (depends on B)
|
|
ensure_c = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_c = unique_content("C", unique_test_id, "icycle")
|
|
files = {
|
|
"file": ("c.tar.gz", BytesIO(content_c), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_c.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Try to upload A v2 that depends on C (creating cycle A -> C -> B -> A)
|
|
ensure_a2 = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_c, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_a2 = unique_content("A-v2", unique_test_id, "icycle")
|
|
files = {
|
|
"file": ("a2.tar.gz", BytesIO(content_a2), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_a2.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "2.0.0"},
|
|
)
|
|
assert response.status_code == 400
|
|
data = response.json()
|
|
assert "circular" in data.get("detail", "").lower() or \
|
|
data.get("error") == "circular_dependency"
|
|
|
|
finally:
|
|
for pkg in [pkg_a, pkg_b, pkg_c]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_diamond_is_not_cycle(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test that diamond dependency is allowed (not a cycle)."""
|
|
# Diamond: A -> B -> D, A -> C -> D
|
|
# This is already tested in test_resolve_diamond_dependency
|
|
# Just verify it doesn't trigger cycle detection
|
|
pass # Covered by TestDependencyResolution.test_resolve_diamond_dependency
|
|
|
|
|
|
class TestConflictDetection:
|
|
"""Tests for dependency conflict handling.
|
|
|
|
The resolver uses "first version wins" strategy for version conflicts,
|
|
allowing resolution to succeed rather than failing with an error.
|
|
"""
|
|
|
|
@pytest.mark.integration
|
|
def test_version_conflict_uses_first_version(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test conflict when two deps require different versions of same package."""
|
|
pkg_app = f"conflict-app-{unique_test_id}"
|
|
pkg_lib_a = f"conflict-lib-a-{unique_test_id}"
|
|
pkg_lib_b = f"conflict-lib-b-{unique_test_id}"
|
|
pkg_common = f"conflict-common-{unique_test_id}"
|
|
|
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload common v1.0.0
|
|
content_common1 = unique_content("common-v1", unique_test_id, "conflict")
|
|
files = {"file": ("common1.tar.gz", BytesIO(content_common1), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload common v2.0.0
|
|
content_common2 = unique_content("common-v2", unique_test_id, "conflict")
|
|
files = {"file": ("common2.tar.gz", BytesIO(content_common2), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
|
files=files,
|
|
data={"version": "2.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload lib-a (depends on common@1.0.0)
|
|
ensure_lib_a = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_common, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_lib_a = unique_content("lib-a", unique_test_id, "conflict")
|
|
files = {
|
|
"file": ("lib-a.tar.gz", BytesIO(content_lib_a), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_lib_a.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload lib-b (depends on common@2.0.0)
|
|
ensure_lib_b = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_common, "version": "2.0.0"}
|
|
]
|
|
})
|
|
content_lib_b = unique_content("lib-b", unique_test_id, "conflict")
|
|
files = {
|
|
"file": ("lib-b.tar.gz", BytesIO(content_lib_b), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_lib_b.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload app (depends on both lib-a and lib-b)
|
|
ensure_app = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_lib_a, "version": "1.0.0"},
|
|
{"project": test_project, "package": pkg_lib_b, "version": "1.0.0"},
|
|
]
|
|
})
|
|
content_app = unique_content("app", unique_test_id, "conflict")
|
|
files = {
|
|
"file": ("app.tar.gz", BytesIO(content_app), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_app.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Try to resolve app - with lenient conflict handling, this should succeed
|
|
# The resolver uses "first version wins" strategy for conflicting versions
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Resolution should succeed with first-encountered version of common
|
|
assert data["artifact_count"] >= 1
|
|
# Find the common package in resolved list
|
|
common_resolved = [r for r in data["resolved"] if r["package"] == pkg_common]
|
|
assert len(common_resolved) == 1 # Only one version should be included
|
|
|
|
finally:
|
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_no_conflict_same_version(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test no conflict when multiple deps require same version."""
|
|
pkg_app = f"noconflict-app-{unique_test_id}"
|
|
pkg_lib_a = f"noconflict-lib-a-{unique_test_id}"
|
|
pkg_lib_b = f"noconflict-lib-b-{unique_test_id}"
|
|
pkg_common = f"noconflict-common-{unique_test_id}"
|
|
|
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload common v1.0.0
|
|
content_common = unique_content("common", unique_test_id, "noconflict")
|
|
files = {"file": ("common.tar.gz", BytesIO(content_common), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Both lib-a and lib-b depend on common@1.0.0
|
|
for lib_name, lib_pkg in [("lib-a", pkg_lib_a), ("lib-b", pkg_lib_b)]:
|
|
ensure = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_common, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_lib = unique_content(lib_name, unique_test_id, "noconflict")
|
|
files = {
|
|
"file": (f"{lib_name}.tar.gz", BytesIO(content_lib), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# App depends on both
|
|
ensure_app = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_lib_a, "version": "1.0.0"},
|
|
{"project": test_project, "package": pkg_lib_b, "version": "1.0.0"},
|
|
]
|
|
})
|
|
content_app = unique_content("app", unique_test_id, "noconflict")
|
|
files = {
|
|
"file": ("app.tar.gz", BytesIO(content_app), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_app.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolution should succeed (no conflict)
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{test_project}/{pkg_app}/+/1.0.0/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
# common should appear only once
|
|
packages = [r["package"] for r in data["resolved"]]
|
|
assert packages.count(pkg_common) == 1
|
|
|
|
finally:
|
|
for pkg in [pkg_app, pkg_lib_a, pkg_lib_b, pkg_common]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
|
|
class TestAutoFetchDependencies:
|
|
"""Tests for auto-fetch functionality in dependency resolution.
|
|
|
|
These tests verify:
|
|
- Resolution with auto_fetch=true (default) fetches missing dependencies from upstream
|
|
- Resolution with auto_fetch=false skips network calls for fast resolution
|
|
- Proper handling of missing/non-existent packages
|
|
- Response schema includes fetched artifacts list
|
|
"""
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_auto_fetch_true_is_default(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that auto_fetch=true is the default (no fetch needed when all deps cached)."""
|
|
project_name, package_name = test_package
|
|
|
|
# Upload a simple artifact without dependencies
|
|
content = unique_content("autofetch-default", unique_test_id, "nodeps")
|
|
files = {"file": ("default.tar.gz", BytesIO(content), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve without auto_fetch param (should default to false)
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/v1.0.0-{unique_test_id}/resolve"
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should have empty fetched list
|
|
assert data.get("fetched", []) == []
|
|
assert data["artifact_count"] == 1
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_auto_fetch_explicit_false(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that auto_fetch=false works explicitly."""
|
|
project_name, package_name = test_package
|
|
|
|
content = unique_content("autofetch-explicit-false", unique_test_id, "nodeps")
|
|
files = {"file": ("explicit.tar.gz", BytesIO(content), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v2.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve with explicit auto_fetch=false
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/v2.0.0-{unique_test_id}/resolve",
|
|
params={"auto_fetch": "false"},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data.get("fetched", []) == []
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_auto_fetch_true_no_missing_deps(
|
|
self, integration_client, test_project, unique_test_id
|
|
):
|
|
"""Test that auto_fetch=true works when all deps are already cached."""
|
|
pkg_a = f"fetch-a-{unique_test_id}"
|
|
pkg_b = f"fetch-b-{unique_test_id}"
|
|
|
|
for pkg in [pkg_a, pkg_b]:
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/packages",
|
|
json={"name": pkg}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
# Upload B (no deps)
|
|
content_b = unique_content("B", unique_test_id, "fetch")
|
|
files = {"file": ("b.tar.gz", BytesIO(content_b), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Upload A (depends on B)
|
|
ensure_a = yaml.dump({
|
|
"dependencies": [
|
|
{"project": test_project, "package": pkg_b, "version": "1.0.0"}
|
|
]
|
|
})
|
|
content_a = unique_content("A", unique_test_id, "fetch")
|
|
files = {
|
|
"file": ("a.tar.gz", BytesIO(content_a), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_a.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
|
files=files,
|
|
data={"version": "1.0.0"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve with auto_fetch=true - should work since deps are cached
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{test_project}/{pkg_a}/+/1.0.0/resolve",
|
|
params={"auto_fetch": "true"},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should resolve successfully
|
|
assert data["artifact_count"] == 2
|
|
# Nothing fetched since everything was cached
|
|
assert len(data.get("fetched", [])) == 0
|
|
# No missing deps
|
|
assert len(data.get("missing", [])) == 0
|
|
|
|
finally:
|
|
for pkg in [pkg_a, pkg_b]:
|
|
integration_client.delete(f"/api/v1/project/{test_project}/packages/{pkg}")
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_missing_dep_with_auto_fetch_false(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that missing deps are reported when auto_fetch=false."""
|
|
project_name, package_name = test_package
|
|
|
|
# Create _pypi system project if it doesn't exist
|
|
response = integration_client.get("/api/v1/projects/_pypi")
|
|
if response.status_code == 404:
|
|
response = integration_client.post(
|
|
"/api/v1/projects",
|
|
json={"name": "_pypi", "description": "System project for PyPI packages"}
|
|
)
|
|
# May fail if already exists or can't create - that's ok
|
|
|
|
# Upload artifact with dependency on _pypi package that doesn't exist locally
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": "_pypi", "package": "nonexistent-pkg-xyz123", "version": ">=1.0.0"}
|
|
]
|
|
})
|
|
|
|
content = unique_content("missing-pypi", unique_test_id, "dep")
|
|
files = {
|
|
"file": ("missing-pypi-dep.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v3.0.0-{unique_test_id}"},
|
|
)
|
|
# Upload should succeed - validation is loose for system projects
|
|
if response.status_code == 200:
|
|
# Resolve without auto_fetch - should report missing
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/v3.0.0-{unique_test_id}/resolve",
|
|
params={"auto_fetch": "false"},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should have missing dependencies
|
|
assert len(data.get("missing", [])) >= 1
|
|
|
|
# Verify missing dependency structure
|
|
missing = data["missing"][0]
|
|
assert missing["project"] == "_pypi"
|
|
assert missing["package"] == "nonexistent-pkg-xyz123"
|
|
# Without auto_fetch, these should be false/None
|
|
assert missing.get("fetch_attempted", False) is False
|
|
|
|
@pytest.mark.integration
|
|
def test_resolve_response_schema_has_fetched_field(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that the resolve response always includes the fetched field."""
|
|
project_name, package_name = test_package
|
|
|
|
content = unique_content("schema-check", unique_test_id, "nodeps")
|
|
files = {"file": ("schema.tar.gz", BytesIO(content), "application/gzip")}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v4.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Check both auto_fetch modes include fetched field
|
|
for auto_fetch in ["false", "true"]:
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/v4.0.0-{unique_test_id}/resolve",
|
|
params={"auto_fetch": auto_fetch},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Required fields
|
|
assert "requested" in data
|
|
assert "resolved" in data
|
|
assert "missing" in data
|
|
assert "fetched" in data # New field
|
|
assert "total_size" in data
|
|
assert "artifact_count" in data
|
|
|
|
# Types
|
|
assert isinstance(data["fetched"], list)
|
|
assert isinstance(data["missing"], list)
|
|
|
|
@pytest.mark.integration
|
|
def test_missing_dep_schema_has_fetch_fields(
|
|
self, integration_client, test_package, unique_test_id
|
|
):
|
|
"""Test that missing dependency entries have fetch_attempted and fetch_error fields."""
|
|
project_name, package_name = test_package
|
|
|
|
# Create a dependency on a non-existent package in a real project
|
|
dep_project_name = f"dep-test-{unique_test_id}"
|
|
response = integration_client.post(
|
|
"/api/v1/projects", json={"name": dep_project_name}
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
try:
|
|
ensure_content = yaml.dump({
|
|
"dependencies": [
|
|
{"project": dep_project_name, "package": "nonexistent-pkg", "version": "1.0.0"}
|
|
]
|
|
})
|
|
|
|
content = unique_content("missing-schema", unique_test_id, "check")
|
|
files = {
|
|
"file": ("missing-schema.tar.gz", BytesIO(content), "application/gzip"),
|
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
|
}
|
|
response = integration_client.post(
|
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
|
files=files,
|
|
data={"version": f"v5.0.0-{unique_test_id}"},
|
|
)
|
|
assert response.status_code == 200
|
|
|
|
# Resolve
|
|
response = integration_client.get(
|
|
f"/api/v1/project/{project_name}/{package_name}/+/v5.0.0-{unique_test_id}/resolve",
|
|
params={"auto_fetch": "true"},
|
|
)
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
|
|
# Should have missing dependencies
|
|
assert len(data.get("missing", [])) >= 1
|
|
|
|
# Check schema for missing dependency
|
|
missing = data["missing"][0]
|
|
assert "project" in missing
|
|
assert "package" in missing
|
|
assert "constraint" in missing
|
|
assert "required_by" in missing
|
|
# New fields
|
|
assert "fetch_attempted" in missing
|
|
assert "fetch_error" in missing # May be None
|
|
|
|
finally:
|
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|