Fix tests for tag removal and version behavior
- Fix upload response to return actual version (not requested version) when artifact already has a version in the package - Update ref_count tests to use multiple packages (one version per artifact per package design constraint) - Remove allow_public_internet references from upstream caching tests - Update consistency check test to not assert global system health - Add versions field to artifact schemas - Fix dependencies resolution to handle removed tag constraint
This commit is contained in:
@@ -195,19 +195,38 @@ class TestConcurrentUploads:
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.concurrent
|
||||
def test_concurrent_uploads_same_file_deduplication(self, integration_client, test_package):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||
project, package = test_package
|
||||
def test_concurrent_uploads_same_file_deduplication(
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly.
|
||||
|
||||
Same content uploaded to different packages should result in:
|
||||
- Same artifact_id (content-addressable)
|
||||
- ref_count = number of packages (one version per package)
|
||||
"""
|
||||
project = test_project
|
||||
api_key = get_api_key(integration_client)
|
||||
assert api_key, "Failed to create API key"
|
||||
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||
num_concurrent = 5
|
||||
package_names = []
|
||||
|
||||
# Create multiple packages for concurrent uploads
|
||||
for i in range(num_concurrent):
|
||||
pkg_name = f"dedup-pkg-{unique_test_id}-{i}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{project}/packages",
|
||||
json={"name": pkg_name, "description": f"Dedup test package {i}"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
package_names.append(pkg_name)
|
||||
|
||||
content, expected_hash = generate_content_with_hash(4096, seed=999)
|
||||
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(idx):
|
||||
def upload_worker(idx, package):
|
||||
try:
|
||||
from httpx import Client
|
||||
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
|
||||
@@ -219,7 +238,7 @@ class TestConcurrentUploads:
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
files=files,
|
||||
data={"version": f"dedup-{idx}"},
|
||||
data={"version": "1.0.0"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -230,7 +249,10 @@ class TestConcurrentUploads:
|
||||
errors.append(f"Worker {idx}: {str(e)}")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
|
||||
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
|
||||
futures = [
|
||||
executor.submit(upload_worker, i, package_names[i])
|
||||
for i in range(num_concurrent)
|
||||
]
|
||||
for future in as_completed(futures):
|
||||
pass
|
||||
|
||||
@@ -242,7 +264,7 @@ class TestConcurrentUploads:
|
||||
assert len(artifact_ids) == 1
|
||||
assert expected_hash in artifact_ids
|
||||
|
||||
# Verify final ref_count equals number of uploads
|
||||
# Verify final ref_count equals number of packages
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ref_count"] == num_concurrent
|
||||
|
||||
@@ -323,7 +323,13 @@ class TestConsistencyCheck:
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_after_upload(self, integration_client, test_package):
|
||||
"""Test consistency check passes after valid upload."""
|
||||
"""Test consistency check runs successfully after a valid upload.
|
||||
|
||||
Note: We don't assert healthy=True because other tests (especially
|
||||
corruption detection tests) may leave orphaned S3 objects behind.
|
||||
This test validates the consistency check endpoint works and the
|
||||
uploaded artifact is included in the check count.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"Consistency check test content"
|
||||
|
||||
@@ -335,9 +341,10 @@ class TestConsistencyCheck:
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify check ran and no issues
|
||||
# Verify check ran - at least 1 artifact was checked
|
||||
assert data["total_artifacts_checked"] >= 1
|
||||
assert data["healthy"] is True
|
||||
# Verify no missing S3 objects (uploaded artifact should exist)
|
||||
assert data["missing_s3_objects"] == 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_consistency_check_limit_parameter(self, integration_client):
|
||||
|
||||
@@ -234,7 +234,11 @@ class TestPackageCascadeDelete:
|
||||
def test_ref_count_decrements_on_package_delete(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test ref_count decrements for all tags when package is deleted."""
|
||||
"""Test ref_count decrements when package is deleted.
|
||||
|
||||
Each package can only have one version per artifact (same content = same version).
|
||||
This test verifies that deleting a package decrements the artifact's ref_count.
|
||||
"""
|
||||
project_name = f"cascade-pkg-{unique_test_id}"
|
||||
package_name = f"test-pkg-{unique_test_id}"
|
||||
|
||||
@@ -256,23 +260,17 @@ class TestPackageCascadeDelete:
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload content with multiple tags
|
||||
# Upload content with version
|
||||
content = f"cascade delete test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, version="v1"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, version="v2"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package_name, content, version="v3"
|
||||
integration_client, project_name, package_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Verify ref_count is 3
|
||||
# Verify ref_count is 1
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 3
|
||||
assert response.json()["ref_count"] == 1
|
||||
|
||||
# Delete the package
|
||||
delete_response = integration_client.delete(
|
||||
|
||||
@@ -229,7 +229,11 @@ class TestProjectCascadeDelete:
|
||||
def test_ref_count_decrements_on_project_delete(
|
||||
self, integration_client, unique_test_id
|
||||
):
|
||||
"""Test ref_count decrements for all tags when project is deleted."""
|
||||
"""Test ref_count decrements for all versions when project is deleted.
|
||||
|
||||
Each package can only have one version per artifact (same content = same version).
|
||||
With 2 packages, ref_count should be 2, and go to 0 when project is deleted.
|
||||
"""
|
||||
project_name = f"cascade-proj-{unique_test_id}"
|
||||
package1_name = f"pkg1-{unique_test_id}"
|
||||
package2_name = f"pkg2-{unique_test_id}"
|
||||
@@ -253,26 +257,20 @@ class TestProjectCascadeDelete:
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Upload same content with tags in both packages
|
||||
# Upload same content to both packages
|
||||
content = f"project cascade test {unique_test_id}".encode()
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
upload_test_file(
|
||||
integration_client, project_name, package1_name, content, version="v1"
|
||||
integration_client, project_name, package1_name, content, version="1.0.0"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package1_name, content, version="v2"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package2_name, content, version="latest"
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, project_name, package2_name, content, version="stable"
|
||||
integration_client, project_name, package2_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Verify ref_count is 4 (2 tags in each of 2 packages)
|
||||
# Verify ref_count is 2 (1 version in each of 2 packages)
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.json()["ref_count"] == 4
|
||||
assert response.json()["ref_count"] == 2
|
||||
|
||||
# Delete the project
|
||||
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
|
||||
|
||||
@@ -215,10 +215,15 @@ class TestDuplicateUploads:
|
||||
assert result1["artifact_id"] == result2["artifact_id"]
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_same_file_twice_increments_ref_count(
|
||||
def test_same_file_twice_returns_existing_version(
|
||||
self, integration_client, test_package
|
||||
):
|
||||
"""Test uploading same file twice increments ref_count to 2."""
|
||||
"""Test uploading same file twice in same package returns existing version.
|
||||
|
||||
Same artifact can only have one version per package. Uploading the same content
|
||||
with a different version name returns the existing version, not a new one.
|
||||
ref_count stays at 1 because there's still only one PackageVersion reference.
|
||||
"""
|
||||
project, package = test_package
|
||||
content = b"content for ref count increment test"
|
||||
|
||||
@@ -228,11 +233,14 @@ class TestDuplicateUploads:
|
||||
)
|
||||
assert result1["ref_count"] == 1
|
||||
|
||||
# Second upload
|
||||
# Second upload with different version name returns existing version
|
||||
result2 = upload_test_file(
|
||||
integration_client, project, package, content, version="v2"
|
||||
)
|
||||
assert result2["ref_count"] == 2
|
||||
# Same artifact, same package = same version returned, ref_count stays 1
|
||||
assert result2["ref_count"] == 1
|
||||
assert result2["deduplicated"] is True
|
||||
assert result1["version"] == result2["version"] # Both return "v1"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_same_file_different_packages_shares_artifact(
|
||||
@@ -515,17 +523,31 @@ class TestConcurrentUploads:
|
||||
"""Tests for concurrent upload handling."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_concurrent_uploads_same_file(self, integration_client, test_package):
|
||||
"""Test concurrent uploads of same file handle deduplication correctly."""
|
||||
project, package = test_package
|
||||
def test_concurrent_uploads_same_file(self, integration_client, test_project, unique_test_id):
|
||||
"""Test concurrent uploads of same file to different packages handle deduplication correctly.
|
||||
|
||||
Same artifact can only have one version per package, so we create multiple packages
|
||||
to test that concurrent uploads to different packages correctly increment ref_count.
|
||||
"""
|
||||
content = b"content for concurrent upload test"
|
||||
expected_hash = compute_sha256(content)
|
||||
num_concurrent = 5
|
||||
|
||||
# Create packages for each concurrent upload
|
||||
packages = []
|
||||
for i in range(num_concurrent):
|
||||
pkg_name = f"concurrent-pkg-{unique_test_id}-{i}"
|
||||
response = integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg_name},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
packages.append(pkg_name)
|
||||
|
||||
# Create an API key for worker threads
|
||||
api_key_response = integration_client.post(
|
||||
"/api/v1/auth/keys",
|
||||
json={"name": "concurrent-test-key"},
|
||||
json={"name": f"concurrent-test-key-{unique_test_id}"},
|
||||
)
|
||||
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
|
||||
api_key = api_key_response.json()["key"]
|
||||
@@ -533,7 +555,7 @@ class TestConcurrentUploads:
|
||||
results = []
|
||||
errors = []
|
||||
|
||||
def upload_worker(tag_suffix):
|
||||
def upload_worker(idx):
|
||||
try:
|
||||
from httpx import Client
|
||||
|
||||
@@ -541,15 +563,15 @@ class TestConcurrentUploads:
|
||||
with Client(base_url=base_url, timeout=30.0) as client:
|
||||
files = {
|
||||
"file": (
|
||||
f"concurrent-{tag_suffix}.bin",
|
||||
f"concurrent-{idx}.bin",
|
||||
io.BytesIO(content),
|
||||
"application/octet-stream",
|
||||
)
|
||||
}
|
||||
response = client.post(
|
||||
f"/api/v1/project/{project}/{package}/upload",
|
||||
f"/api/v1/project/{test_project}/{packages[idx]}/upload",
|
||||
files=files,
|
||||
data={"version": f"concurrent-{tag_suffix}"},
|
||||
data={"version": "1.0.0"},
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
@@ -572,7 +594,7 @@ class TestConcurrentUploads:
|
||||
assert len(artifact_ids) == 1
|
||||
assert expected_hash in artifact_ids
|
||||
|
||||
# Verify final ref_count
|
||||
# Verify final ref_count equals number of packages
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ref_count"] == num_concurrent
|
||||
@@ -731,16 +753,26 @@ class TestS3StorageVerification:
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_artifact_table_single_row_after_duplicates(
|
||||
self, integration_client, test_package
|
||||
self, integration_client, test_project, unique_test_id
|
||||
):
|
||||
"""Test artifact table contains only one row after duplicate uploads."""
|
||||
project, package = test_package
|
||||
"""Test artifact table contains only one row after duplicate uploads to different packages.
|
||||
|
||||
Same artifact can only have one version per package, so we create multiple packages
|
||||
to test deduplication across packages.
|
||||
"""
|
||||
content = b"content for single row test"
|
||||
expected_hash = compute_sha256(content)
|
||||
|
||||
# Upload same content multiple times
|
||||
for tag in ["v1", "v2", "v3"]:
|
||||
upload_test_file(integration_client, project, package, content, version=tag)
|
||||
# Create 3 packages and upload same content to each
|
||||
for i in range(3):
|
||||
pkg_name = f"single-row-pkg-{unique_test_id}-{i}"
|
||||
integration_client.post(
|
||||
f"/api/v1/project/{test_project}/packages",
|
||||
json={"name": pkg_name},
|
||||
)
|
||||
upload_test_file(
|
||||
integration_client, test_project, pkg_name, content, version="1.0.0"
|
||||
)
|
||||
|
||||
# Query artifact
|
||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||
|
||||
Reference in New Issue
Block a user