From 87f30ea8989c3cb0d90d40b8217ff34ac06655a5 Mon Sep 17 00:00:00 2001 From: Mondo Diaz Date: Tue, 3 Feb 2026 12:45:44 -0600 Subject: [PATCH] Update tests for tag removal - Remove Tag/TagHistory model tests from unit tests - Update CacheSettings tests to remove allow_public_internet field - Replace tag= with version= in upload_test_file calls - Update test assertions to use versions instead of tags - Remove tests for tag: prefix downloads (now uses version:) - Update dependency tests for version-only schema --- backend/tests/factories.py | 4 - .../tests/integration/test_artifacts_api.py | 16 +- .../integration/test_concurrent_operations.py | 26 +-- .../tests/integration/test_error_handling.py | 6 +- .../test_integrity_verification.py | 40 ++--- .../tests/integration/test_large_uploads.py | 28 +-- .../tests/integration/test_packages_api.py | 6 +- .../tests/integration/test_projects_api.py | 8 +- .../tests/integration/test_size_boundary.py | 44 ++--- .../integration/test_streaming_download.py | 54 +++--- .../integration/test_upload_download_api.py | 164 +++++++----------- backend/tests/integration/test_version_api.py | 46 +---- .../tests/integration/test_versions_api.py | 92 +--------- backend/tests/test_dependencies.py | 104 +++++------ backend/tests/test_download_verification.py | 36 ++-- backend/tests/test_upstream_caching.py | 11 +- backend/tests/unit/test_models.py | 48 ----- 17 files changed, 247 insertions(+), 486 deletions(-) diff --git a/backend/tests/factories.py b/backend/tests/factories.py index 50112ea..245fdab 100644 --- a/backend/tests/factories.py +++ b/backend/tests/factories.py @@ -96,7 +96,6 @@ def upload_test_file( package: str, content: bytes, filename: str = "test.bin", - tag: Optional[str] = None, version: Optional[str] = None, ) -> dict: """ @@ -108,7 +107,6 @@ def upload_test_file( package: Package name content: File content as bytes filename: Original filename - tag: Optional tag to assign version: Optional version to assign Returns: @@ -116,8 +114,6 @@ def upload_test_file( """ files = {"file": (filename, io.BytesIO(content), "application/octet-stream")} data = {} - if tag: - data["tag"] = tag if version: data["version"] = version diff --git a/backend/tests/integration/test_artifacts_api.py b/backend/tests/integration/test_artifacts_api.py index f9b0841..e7018e0 100644 --- a/backend/tests/integration/test_artifacts_api.py +++ b/backend/tests/integration/test_artifacts_api.py @@ -25,7 +25,7 @@ class TestArtifactRetrieval: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project_name, package_name, content, tag="v1" + integration_client, project_name, package_name, content, version="v1" ) response = integration_client.get(f"/api/v1/artifact/{expected_hash}") @@ -53,7 +53,7 @@ class TestArtifactRetrieval: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project_name, package_name, content, tag="tagged-v1" + integration_client, project_name, package_name, content, version="tagged-v1" ) response = integration_client.get(f"/api/v1/artifact/{expected_hash}") @@ -82,7 +82,7 @@ class TestArtifactStats: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag=f"art-{unique_test_id}" + integration_client, project, package, content, version=f"art-{unique_test_id}" ) response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats") @@ -136,8 +136,8 @@ class TestArtifactStats: ) # Upload same content to both projects - upload_test_file(integration_client, proj1, "pkg", content, tag="v1") - upload_test_file(integration_client, proj2, "pkg", content, tag="v1") + upload_test_file(integration_client, proj1, "pkg", content, version="v1") + upload_test_file(integration_client, proj2, "pkg", content, version="v1") # Check artifact stats response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats") @@ -315,7 +315,7 @@ class TestOrphanedArtifacts: expected_hash = compute_sha256(content) # Upload with tag - upload_test_file(integration_client, project, package, content, tag="temp-tag") + upload_test_file(integration_client, project, package, content, version="temp-tag") # Verify not in orphaned list response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000") @@ -357,7 +357,7 @@ class TestGarbageCollection: expected_hash = compute_sha256(content) # Upload and delete tag to create orphan - upload_test_file(integration_client, project, package, content, tag="dry-run") + upload_test_file(integration_client, project, package, content, version="dry-run") integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run") # Verify artifact exists @@ -385,7 +385,7 @@ class TestGarbageCollection: expected_hash = compute_sha256(content) # Upload with tag (ref_count=1) - upload_test_file(integration_client, project, package, content, tag="keep-this") + upload_test_file(integration_client, project, package, content, version="keep-this") # Verify artifact exists with ref_count=1 response = integration_client.get(f"/api/v1/artifact/{expected_hash}") diff --git a/backend/tests/integration/test_concurrent_operations.py b/backend/tests/integration/test_concurrent_operations.py index 4237cf4..fc4b0d0 100644 --- a/backend/tests/integration/test_concurrent_operations.py +++ b/backend/tests/integration/test_concurrent_operations.py @@ -63,7 +63,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"concurrent-{idx}"}, + data={"version": f"concurrent-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -117,7 +117,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"concurrent5-{idx}"}, + data={"version": f"concurrent5-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -171,7 +171,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"concurrent10-{idx}"}, + data={"version": f"concurrent10-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -219,7 +219,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"dedup-{idx}"}, + data={"version": f"dedup-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -287,7 +287,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": "latest"}, + data={"version": "latest"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -321,7 +321,7 @@ class TestConcurrentDownloads: content, expected_hash = generate_content_with_hash(2048, seed=400) # Upload first - upload_test_file(integration_client, project, package, content, tag="download-test") + upload_test_file(integration_client, project, package, content, version="download-test") results = [] errors = [] @@ -362,7 +362,7 @@ class TestConcurrentDownloads: project, package = test_package content, expected_hash = generate_content_with_hash(4096, seed=500) - upload_test_file(integration_client, project, package, content, tag="download5-test") + upload_test_file(integration_client, project, package, content, version="download5-test") num_downloads = 5 results = [] @@ -403,7 +403,7 @@ class TestConcurrentDownloads: project, package = test_package content, expected_hash = generate_content_with_hash(8192, seed=600) - upload_test_file(integration_client, project, package, content, tag="download10-test") + upload_test_file(integration_client, project, package, content, version="download10-test") num_downloads = 10 results = [] @@ -502,7 +502,7 @@ class TestMixedConcurrentOperations: # Upload initial content content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB - upload_test_file(integration_client, project, package, content1, tag="initial") + upload_test_file(integration_client, project, package, content1, version="initial") # New content for upload during download content2, hash2 = generate_content_with_hash(10240, seed=801) @@ -539,7 +539,7 @@ class TestMixedConcurrentOperations: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": "during-download"}, + data={"version": "during-download"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -579,7 +579,7 @@ class TestMixedConcurrentOperations: existing_files = [] for i in range(3): content, hash = generate_content_with_hash(2048, seed=900 + i) - upload_test_file(integration_client, project, package, content, tag=f"existing-{i}") + upload_test_file(integration_client, project, package, content, version=f"existing-{i}") existing_files.append((f"existing-{i}", content)) # New files for uploading @@ -619,7 +619,7 @@ class TestMixedConcurrentOperations: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"new-{idx}"}, + data={"version": f"new-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -689,7 +689,7 @@ class TestMixedConcurrentOperations: upload_resp = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"pattern-{idx}"}, + data={"version": f"pattern-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if upload_resp.status_code != 200: diff --git a/backend/tests/integration/test_error_handling.py b/backend/tests/integration/test_error_handling.py index ce1f767..caba857 100644 --- a/backend/tests/integration/test_error_handling.py +++ b/backend/tests/integration/test_error_handling.py @@ -68,7 +68,7 @@ class TestUploadErrorHandling: response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", - data={"tag": "no-file-provided"}, + data={"version": "no-file-provided"}, ) assert response.status_code == 422 @@ -200,7 +200,7 @@ class TestTimeoutBehavior: start_time = time.time() result = upload_test_file( - integration_client, project, package, content, tag="timeout-test" + integration_client, project, package, content, version="timeout-test" ) elapsed = time.time() - start_time @@ -219,7 +219,7 @@ class TestTimeoutBehavior: # First upload upload_test_file( - integration_client, project, package, content, tag="download-timeout-test" + integration_client, project, package, content, version="download-timeout-test" ) # Then download and time it diff --git a/backend/tests/integration/test_integrity_verification.py b/backend/tests/integration/test_integrity_verification.py index 504bc8c..5065478 100644 --- a/backend/tests/integration/test_integrity_verification.py +++ b/backend/tests/integration/test_integrity_verification.py @@ -41,7 +41,7 @@ class TestRoundTripVerification: # Upload and capture returned hash result = upload_test_file( - integration_client, project, package, content, tag="roundtrip" + integration_client, project, package, content, version="roundtrip" ) uploaded_hash = result["artifact_id"] @@ -84,7 +84,7 @@ class TestRoundTripVerification: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="header-check" + integration_client, project, package, content, version="header-check" ) response = integration_client.get( @@ -102,7 +102,7 @@ class TestRoundTripVerification: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="etag-check" + integration_client, project, package, content, version="etag-check" ) response = integration_client.get( @@ -186,7 +186,7 @@ class TestClientSideVerificationWorkflow: content = b"Client post-download verification" upload_test_file( - integration_client, project, package, content, tag="verify-after" + integration_client, project, package, content, version="verify-after" ) response = integration_client.get( @@ -215,7 +215,7 @@ class TestIntegritySizeVariants: content, expected_hash = sized_content(SIZE_1KB, seed=100) result = upload_test_file( - integration_client, project, package, content, tag="int-1kb" + integration_client, project, package, content, version="int-1kb" ) assert result["artifact_id"] == expected_hash @@ -234,7 +234,7 @@ class TestIntegritySizeVariants: content, expected_hash = sized_content(SIZE_100KB, seed=101) result = upload_test_file( - integration_client, project, package, content, tag="int-100kb" + integration_client, project, package, content, version="int-100kb" ) assert result["artifact_id"] == expected_hash @@ -253,7 +253,7 @@ class TestIntegritySizeVariants: content, expected_hash = sized_content(SIZE_1MB, seed=102) result = upload_test_file( - integration_client, project, package, content, tag="int-1mb" + integration_client, project, package, content, version="int-1mb" ) assert result["artifact_id"] == expected_hash @@ -273,7 +273,7 @@ class TestIntegritySizeVariants: content, expected_hash = sized_content(SIZE_10MB, seed=103) result = upload_test_file( - integration_client, project, package, content, tag="int-10mb" + integration_client, project, package, content, version="int-10mb" ) assert result["artifact_id"] == expected_hash @@ -366,7 +366,7 @@ class TestDigestHeader: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="digest-test" + integration_client, project, package, content, version="digest-test" ) response = integration_client.get( @@ -390,7 +390,7 @@ class TestDigestHeader: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="digest-b64" + integration_client, project, package, content, version="digest-b64" ) response = integration_client.get( @@ -420,7 +420,7 @@ class TestVerificationModes: content = b"Pre-verification mode test" upload_test_file( - integration_client, project, package, content, tag="pre-verify" + integration_client, project, package, content, version="pre-verify" ) response = integration_client.get( @@ -440,7 +440,7 @@ class TestVerificationModes: content = b"Stream verification mode test" upload_test_file( - integration_client, project, package, content, tag="stream-verify" + integration_client, project, package, content, version="stream-verify" ) response = integration_client.get( @@ -477,7 +477,7 @@ class TestArtifactIntegrityEndpoint: expected_size = len(content) upload_test_file( - integration_client, project, package, content, tag="content-len" + integration_client, project, package, content, version="content-len" ) response = integration_client.get( @@ -513,7 +513,7 @@ class TestCorruptionDetection: # Upload original content result = upload_test_file( - integration_client, project, package, content, tag="corrupt-test" + integration_client, project, package, content, version="corrupt-test" ) assert result["artifact_id"] == expected_hash @@ -555,7 +555,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="bitflip-test" + integration_client, project, package, content, version="bitflip-test" ) assert result["artifact_id"] == expected_hash @@ -592,7 +592,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="truncate-test" + integration_client, project, package, content, version="truncate-test" ) assert result["artifact_id"] == expected_hash @@ -627,7 +627,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="append-test" + integration_client, project, package, content, version="append-test" ) assert result["artifact_id"] == expected_hash @@ -670,7 +670,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="client-detect" + integration_client, project, package, content, version="client-detect" ) # Corrupt the S3 object @@ -713,7 +713,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="size-mismatch" + integration_client, project, package, content, version="size-mismatch" ) # Modify S3 object to have different size @@ -747,7 +747,7 @@ class TestCorruptionDetection: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project, package, content, tag="missing-s3" + integration_client, project, package, content, version="missing-s3" ) # Delete the S3 object diff --git a/backend/tests/integration/test_large_uploads.py b/backend/tests/integration/test_large_uploads.py index e18c7fc..9b85f11 100644 --- a/backend/tests/integration/test_large_uploads.py +++ b/backend/tests/integration/test_large_uploads.py @@ -41,7 +41,7 @@ class TestUploadMetrics: content = b"duration test content" result = upload_test_file( - integration_client, project, package, content, tag="duration-test" + integration_client, project, package, content, version="duration-test" ) assert "duration_ms" in result @@ -55,7 +55,7 @@ class TestUploadMetrics: content = b"throughput test content" result = upload_test_file( - integration_client, project, package, content, tag="throughput-test" + integration_client, project, package, content, version="throughput-test" ) assert "throughput_mbps" in result @@ -72,7 +72,7 @@ class TestUploadMetrics: start = time.time() result = upload_test_file( - integration_client, project, package, content, tag="duration-check" + integration_client, project, package, content, version="duration-check" ) actual_duration = (time.time() - start) * 1000 # ms @@ -92,7 +92,7 @@ class TestLargeFileUploads: content, expected_hash = sized_content(SIZE_10MB, seed=200) result = upload_test_file( - integration_client, project, package, content, tag="large-10mb" + integration_client, project, package, content, version="large-10mb" ) assert result["artifact_id"] == expected_hash @@ -109,7 +109,7 @@ class TestLargeFileUploads: content, expected_hash = sized_content(SIZE_100MB, seed=300) result = upload_test_file( - integration_client, project, package, content, tag="large-100mb" + integration_client, project, package, content, version="large-100mb" ) assert result["artifact_id"] == expected_hash @@ -126,7 +126,7 @@ class TestLargeFileUploads: content, expected_hash = sized_content(SIZE_1GB, seed=400) result = upload_test_file( - integration_client, project, package, content, tag="large-1gb" + integration_client, project, package, content, version="large-1gb" ) assert result["artifact_id"] == expected_hash @@ -147,14 +147,14 @@ class TestLargeFileUploads: # First upload result1 = upload_test_file( - integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1" + integration_client, project, package, content, version=f"dedup-{unique_test_id}-1" ) # Note: may be True if previous test uploaded same content first_dedupe = result1["deduplicated"] # Second upload of same content result2 = upload_test_file( - integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2" + integration_client, project, package, content, version=f"dedup-{unique_test_id}-2" ) assert result2["artifact_id"] == expected_hash # Second upload MUST be deduplicated @@ -277,7 +277,7 @@ class TestUploadSizeLimits: content = b"X" result = upload_test_file( - integration_client, project, package, content, tag="min-size" + integration_client, project, package, content, version="min-size" ) assert result["size"] == 1 @@ -289,7 +289,7 @@ class TestUploadSizeLimits: content = b"content length verification test" result = upload_test_file( - integration_client, project, package, content, tag="content-length-test" + integration_client, project, package, content, version="content-length-test" ) # Size in response should match actual content length @@ -336,7 +336,7 @@ class TestUploadErrorHandling: response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", - data={"tag": "no-file"}, + data={"version": "no-file"}, ) assert response.status_code == 422 @@ -459,7 +459,7 @@ class TestUploadTimeout: # httpx client should handle this quickly result = upload_test_file( - integration_client, project, package, content, tag="timeout-small" + integration_client, project, package, content, version="timeout-small" ) assert result["artifact_id"] is not None @@ -474,7 +474,7 @@ class TestUploadTimeout: start = time.time() result = upload_test_file( - integration_client, project, package, content, tag="timeout-check" + integration_client, project, package, content, version="timeout-check" ) duration = time.time() - start @@ -525,7 +525,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"concurrent-diff-{idx}"}, + data={"version": f"concurrent-diff-{idx}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: diff --git a/backend/tests/integration/test_packages_api.py b/backend/tests/integration/test_packages_api.py index 60af55a..cd4b50c 100644 --- a/backend/tests/integration/test_packages_api.py +++ b/backend/tests/integration/test_packages_api.py @@ -261,13 +261,13 @@ class TestPackageCascadeDelete: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project_name, package_name, content, tag="v1" + integration_client, project_name, package_name, content, version="v1" ) upload_test_file( - integration_client, project_name, package_name, content, tag="v2" + integration_client, project_name, package_name, content, version="v2" ) upload_test_file( - integration_client, project_name, package_name, content, tag="v3" + integration_client, project_name, package_name, content, version="v3" ) # Verify ref_count is 3 diff --git a/backend/tests/integration/test_projects_api.py b/backend/tests/integration/test_projects_api.py index 02504aa..8922878 100644 --- a/backend/tests/integration/test_projects_api.py +++ b/backend/tests/integration/test_projects_api.py @@ -258,16 +258,16 @@ class TestProjectCascadeDelete: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project_name, package1_name, content, tag="v1" + integration_client, project_name, package1_name, content, version="v1" ) upload_test_file( - integration_client, project_name, package1_name, content, tag="v2" + integration_client, project_name, package1_name, content, version="v2" ) upload_test_file( - integration_client, project_name, package2_name, content, tag="latest" + integration_client, project_name, package2_name, content, version="latest" ) upload_test_file( - integration_client, project_name, package2_name, content, tag="stable" + integration_client, project_name, package2_name, content, version="stable" ) # Verify ref_count is 4 (2 tags in each of 2 packages) diff --git a/backend/tests/integration/test_size_boundary.py b/backend/tests/integration/test_size_boundary.py index 49ed3d2..c354280 100644 --- a/backend/tests/integration/test_size_boundary.py +++ b/backend/tests/integration/test_size_boundary.py @@ -48,7 +48,7 @@ class TestSmallFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="1byte.bin", tag="1byte" + filename="1byte.bin", version="1byte" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_1B @@ -70,7 +70,7 @@ class TestSmallFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="1kb.bin", tag="1kb" + filename="1kb.bin", version="1kb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_1KB @@ -90,7 +90,7 @@ class TestSmallFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="10kb.bin", tag="10kb" + filename="10kb.bin", version="10kb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_10KB @@ -110,7 +110,7 @@ class TestSmallFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="100kb.bin", tag="100kb" + filename="100kb.bin", version="100kb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_100KB @@ -134,7 +134,7 @@ class TestMediumFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="1mb.bin", tag="1mb" + filename="1mb.bin", version="1mb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_1MB @@ -155,7 +155,7 @@ class TestMediumFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="5mb.bin", tag="5mb" + filename="5mb.bin", version="5mb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_5MB @@ -177,7 +177,7 @@ class TestMediumFileSizes: result = upload_test_file( integration_client, project, package, content, - filename="10mb.bin", tag="10mb" + filename="10mb.bin", version="10mb" ) assert result["artifact_id"] == expected_hash assert result["size"] == SIZE_10MB @@ -200,7 +200,7 @@ class TestMediumFileSizes: start_time = time.time() result = upload_test_file( integration_client, project, package, content, - filename="50mb.bin", tag="50mb" + filename="50mb.bin", version="50mb" ) upload_time = time.time() - start_time @@ -240,7 +240,7 @@ class TestLargeFileSizes: start_time = time.time() result = upload_test_file( integration_client, project, package, content, - filename="100mb.bin", tag="100mb" + filename="100mb.bin", version="100mb" ) upload_time = time.time() - start_time @@ -271,7 +271,7 @@ class TestLargeFileSizes: start_time = time.time() result = upload_test_file( integration_client, project, package, content, - filename="250mb.bin", tag="250mb" + filename="250mb.bin", version="250mb" ) upload_time = time.time() - start_time @@ -302,7 +302,7 @@ class TestLargeFileSizes: start_time = time.time() result = upload_test_file( integration_client, project, package, content, - filename="500mb.bin", tag="500mb" + filename="500mb.bin", version="500mb" ) upload_time = time.time() - start_time @@ -336,7 +336,7 @@ class TestLargeFileSizes: start_time = time.time() result = upload_test_file( integration_client, project, package, content, - filename="1gb.bin", tag="1gb" + filename="1gb.bin", version="1gb" ) upload_time = time.time() - start_time @@ -368,7 +368,7 @@ class TestChunkBoundaries: result = upload_test_file( integration_client, project, package, content, - filename="chunk.bin", tag="chunk-exact" + filename="chunk.bin", version="chunk-exact" ) assert result["artifact_id"] == expected_hash assert result["size"] == CHUNK_SIZE @@ -389,7 +389,7 @@ class TestChunkBoundaries: result = upload_test_file( integration_client, project, package, content, - filename="chunk_plus.bin", tag="chunk-plus" + filename="chunk_plus.bin", version="chunk-plus" ) assert result["artifact_id"] == expected_hash assert result["size"] == size @@ -410,7 +410,7 @@ class TestChunkBoundaries: result = upload_test_file( integration_client, project, package, content, - filename="chunk_minus.bin", tag="chunk-minus" + filename="chunk_minus.bin", version="chunk-minus" ) assert result["artifact_id"] == expected_hash assert result["size"] == size @@ -431,7 +431,7 @@ class TestChunkBoundaries: result = upload_test_file( integration_client, project, package, content, - filename="multi_chunk.bin", tag="multi-chunk" + filename="multi_chunk.bin", version="multi-chunk" ) assert result["artifact_id"] == expected_hash assert result["size"] == size @@ -457,7 +457,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename="binary.bin", tag="binary" + filename="binary.bin", version="binary" ) assert result["artifact_id"] == expected_hash @@ -477,7 +477,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename="text.txt", tag="text" + filename="text.txt", version="text" ) assert result["artifact_id"] == expected_hash @@ -498,7 +498,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename="nulls.bin", tag="nulls" + filename="nulls.bin", version="nulls" ) assert result["artifact_id"] == expected_hash @@ -519,7 +519,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename="文件名.txt", tag="unicode-name" + filename="文件名.txt", version="unicode-name" ) assert result["artifact_id"] == expected_hash assert result["original_name"] == "文件名.txt" @@ -543,7 +543,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename="data.gz", tag="compressed" + filename="data.gz", version="compressed" ) assert result["artifact_id"] == expected_hash @@ -568,7 +568,7 @@ class TestDataIntegrity: result = upload_test_file( integration_client, project, package, content, - filename=f"hash_test_{size}.bin", tag=f"hash-{size}" + filename=f"hash_test_{size}.bin", version=f"hash-{size}" ) # Verify artifact_id matches expected hash diff --git a/backend/tests/integration/test_streaming_download.py b/backend/tests/integration/test_streaming_download.py index b6163ad..6d11731 100644 --- a/backend/tests/integration/test_streaming_download.py +++ b/backend/tests/integration/test_streaming_download.py @@ -32,7 +32,7 @@ class TestRangeRequests: """Test range request for first N bytes.""" project, package = test_package content = b"0123456789" * 100 # 1000 bytes - upload_test_file(integration_client, project, package, content, tag="range-test") + upload_test_file(integration_client, project, package, content, version="range-test") # Request first 10 bytes response = integration_client.get( @@ -50,7 +50,7 @@ class TestRangeRequests: """Test range request for bytes in the middle.""" project, package = test_package content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" - upload_test_file(integration_client, project, package, content, tag="range-mid") + upload_test_file(integration_client, project, package, content, version="range-mid") # Request bytes 10-19 (KLMNOPQRST) response = integration_client.get( @@ -66,7 +66,7 @@ class TestRangeRequests: """Test range request for last N bytes (suffix range).""" project, package = test_package content = b"0123456789ABCDEF" # 16 bytes - upload_test_file(integration_client, project, package, content, tag="range-suffix") + upload_test_file(integration_client, project, package, content, version="range-suffix") # Request last 4 bytes response = integration_client.get( @@ -82,7 +82,7 @@ class TestRangeRequests: """Test range request from offset to end.""" project, package = test_package content = b"0123456789" - upload_test_file(integration_client, project, package, content, tag="range-open") + upload_test_file(integration_client, project, package, content, version="range-open") # Request from byte 5 to end response = integration_client.get( @@ -100,7 +100,7 @@ class TestRangeRequests: """Test that range requests include Accept-Ranges header.""" project, package = test_package content = b"test content" - upload_test_file(integration_client, project, package, content, tag="accept-ranges") + upload_test_file(integration_client, project, package, content, version="accept-ranges") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/accept-ranges", @@ -117,7 +117,7 @@ class TestRangeRequests: """Test that full downloads advertise range support.""" project, package = test_package content = b"test content" - upload_test_file(integration_client, project, package, content, tag="full-accept") + upload_test_file(integration_client, project, package, content, version="full-accept") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/full-accept", @@ -136,7 +136,7 @@ class TestConditionalRequests: project, package = test_package content = b"conditional request test content" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="cond-etag") + upload_test_file(integration_client, project, package, content, version="cond-etag") # Request with matching ETag response = integration_client.get( @@ -153,7 +153,7 @@ class TestConditionalRequests: project, package = test_package content = b"etag no quotes test" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="cond-noquote") + upload_test_file(integration_client, project, package, content, version="cond-noquote") # Request with ETag without quotes response = integration_client.get( @@ -168,7 +168,7 @@ class TestConditionalRequests: """Test If-None-Match with non-matching ETag returns 200.""" project, package = test_package content = b"etag mismatch test" - upload_test_file(integration_client, project, package, content, tag="cond-mismatch") + upload_test_file(integration_client, project, package, content, version="cond-mismatch") # Request with different ETag response = integration_client.get( @@ -184,7 +184,7 @@ class TestConditionalRequests: """Test If-Modified-Since with future date returns 304.""" project, package = test_package content = b"modified since test" - upload_test_file(integration_client, project, package, content, tag="cond-modified") + upload_test_file(integration_client, project, package, content, version="cond-modified") # Request with future date (artifact was definitely created before this) future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow @@ -202,7 +202,7 @@ class TestConditionalRequests: """Test If-Modified-Since with old date returns 200.""" project, package = test_package content = b"old date test" - upload_test_file(integration_client, project, package, content, tag="cond-old") + upload_test_file(integration_client, project, package, content, version="cond-old") # Request with old date (2020-01-01) old_date = "Wed, 01 Jan 2020 00:00:00 GMT" @@ -220,7 +220,7 @@ class TestConditionalRequests: project, package = test_package content = b"304 etag test" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="304-etag") + upload_test_file(integration_client, project, package, content, version="304-etag") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/304-etag", @@ -236,7 +236,7 @@ class TestConditionalRequests: project, package = test_package content = b"304 cache test" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="304-cache") + upload_test_file(integration_client, project, package, content, version="304-cache") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/304-cache", @@ -255,7 +255,7 @@ class TestCachingHeaders: """Test download response includes Cache-Control header.""" project, package = test_package content = b"cache control test" - upload_test_file(integration_client, project, package, content, tag="cache-ctl") + upload_test_file(integration_client, project, package, content, version="cache-ctl") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/cache-ctl", @@ -272,7 +272,7 @@ class TestCachingHeaders: """Test download response includes Last-Modified header.""" project, package = test_package content = b"last modified test" - upload_test_file(integration_client, project, package, content, tag="last-mod") + upload_test_file(integration_client, project, package, content, version="last-mod") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/last-mod", @@ -290,7 +290,7 @@ class TestCachingHeaders: project, package = test_package content = b"etag header test" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="etag-hdr") + upload_test_file(integration_client, project, package, content, version="etag-hdr") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/etag-hdr", @@ -308,7 +308,7 @@ class TestDownloadResume: """Test resuming download from where it left off.""" project, package = test_package content = b"ABCDEFGHIJ" * 100 # 1000 bytes - upload_test_file(integration_client, project, package, content, tag="resume-test") + upload_test_file(integration_client, project, package, content, version="resume-test") # Simulate partial download (first 500 bytes) response1 = integration_client.get( @@ -340,7 +340,7 @@ class TestDownloadResume: project, package = test_package content = b"resume etag verification test content" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="resume-etag") + upload_test_file(integration_client, project, package, content, version="resume-etag") # Get ETag from first request response1 = integration_client.get( @@ -373,7 +373,7 @@ class TestLargeFileStreaming: project, package = test_package content, expected_hash = sized_content(SIZE_1MB, seed=500) - upload_test_file(integration_client, project, package, content, tag="stream-1mb") + upload_test_file(integration_client, project, package, content, version="stream-1mb") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/stream-1mb", @@ -391,7 +391,7 @@ class TestLargeFileStreaming: project, package = test_package content, expected_hash = sized_content(SIZE_100KB, seed=501) - upload_test_file(integration_client, project, package, content, tag="stream-hdr") + upload_test_file(integration_client, project, package, content, version="stream-hdr") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/stream-hdr", @@ -410,7 +410,7 @@ class TestLargeFileStreaming: project, package = test_package content, _ = sized_content(SIZE_100KB, seed=502) - upload_test_file(integration_client, project, package, content, tag="range-large") + upload_test_file(integration_client, project, package, content, version="range-large") # Request a slice from the middle start = 50000 @@ -433,7 +433,7 @@ class TestDownloadModes: """Test proxy mode streams content through backend.""" project, package = test_package content = b"proxy mode test content" - upload_test_file(integration_client, project, package, content, tag="mode-proxy") + upload_test_file(integration_client, project, package, content, version="mode-proxy") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/mode-proxy", @@ -447,7 +447,7 @@ class TestDownloadModes: """Test presigned mode returns JSON with URL.""" project, package = test_package content = b"presigned mode test" - upload_test_file(integration_client, project, package, content, tag="mode-presign") + upload_test_file(integration_client, project, package, content, version="mode-presign") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/mode-presign", @@ -464,7 +464,7 @@ class TestDownloadModes: """Test redirect mode returns 302 to presigned URL.""" project, package = test_package content = b"redirect mode test" - upload_test_file(integration_client, project, package, content, tag="mode-redir") + upload_test_file(integration_client, project, package, content, version="mode-redir") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/mode-redir", @@ -484,7 +484,7 @@ class TestIntegrityDuringStreaming: project, package = test_package content = b"integrity check content" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="integrity") + upload_test_file(integration_client, project, package, content, version="integrity") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/integrity", @@ -505,7 +505,7 @@ class TestIntegrityDuringStreaming: project, package = test_package content = b"etag integrity test" expected_hash = compute_sha256(content) - upload_test_file(integration_client, project, package, content, tag="etag-int") + upload_test_file(integration_client, project, package, content, version="etag-int") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/etag-int", @@ -524,7 +524,7 @@ class TestIntegrityDuringStreaming: """Test Digest header is present in RFC 3230 format.""" project, package = test_package content = b"digest header test" - upload_test_file(integration_client, project, package, content, tag="digest") + upload_test_file(integration_client, project, package, content, version="digest") response = integration_client.get( f"/api/v1/project/{project}/{package}/+/digest", diff --git a/backend/tests/integration/test_upload_download_api.py b/backend/tests/integration/test_upload_download_api.py index 936a4ca..d84866e 100644 --- a/backend/tests/integration/test_upload_download_api.py +++ b/backend/tests/integration/test_upload_download_api.py @@ -47,7 +47,7 @@ class TestUploadBasics: expected_hash = compute_sha256(content) result = upload_test_file( - integration_client, project_name, package_name, content, tag="v1" + integration_client, project_name, package_name, content, version="v1" ) assert result["artifact_id"] == expected_hash @@ -116,31 +116,23 @@ class TestUploadBasics: assert result["created_at"] is not None @pytest.mark.integration - def test_upload_without_tag_succeeds(self, integration_client, test_package): - """Test upload without tag succeeds (no tag created).""" + def test_upload_without_version_succeeds(self, integration_client, test_package): + """Test upload without version succeeds (no version created).""" project, package = test_package - content = b"upload without tag test" + content = b"upload without version test" expected_hash = compute_sha256(content) - files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")} + files = {"file": ("no_version.bin", io.BytesIO(content), "application/octet-stream")} response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - # No tag parameter + # No version parameter ) assert response.status_code == 200 result = response.json() assert result["artifact_id"] == expected_hash - - # Verify no tag was created - list tags and check - tags_response = integration_client.get( - f"/api/v1/project/{project}/{package}/tags" - ) - assert tags_response.status_code == 200 - tags = tags_response.json() - # Filter for tags pointing to this artifact - artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash] - assert len(artifact_tags) == 0, "Tag should not be created when not specified" + # Version should be None when not specified + assert result.get("version") is None @pytest.mark.integration def test_upload_creates_artifact_in_database(self, integration_client, test_package): @@ -172,25 +164,29 @@ class TestUploadBasics: assert s3_object_exists(expected_hash), "S3 object should exist after upload" @pytest.mark.integration - def test_upload_with_tag_creates_tag_record(self, integration_client, test_package): - """Test upload with tag creates tag record.""" + def test_upload_with_version_creates_version_record(self, integration_client, test_package): + """Test upload with version creates version record.""" project, package = test_package - content = b"tag creation test" + content = b"version creation test" expected_hash = compute_sha256(content) - tag_name = "my-tag-v1" + version_name = "1.0.0" - upload_test_file( - integration_client, project, package, content, tag=tag_name + result = upload_test_file( + integration_client, project, package, content, version=version_name ) - # Verify tag exists - tags_response = integration_client.get( - f"/api/v1/project/{project}/{package}/tags" + # Verify version was created + assert result.get("version") == version_name + assert result["artifact_id"] == expected_hash + + # Verify version exists in versions list + versions_response = integration_client.get( + f"/api/v1/project/{project}/{package}/versions" ) - assert tags_response.status_code == 200 - tags = tags_response.json() - tag_names = [t["name"] for t in tags.get("items", tags)] - assert tag_name in tag_names + assert versions_response.status_code == 200 + versions = versions_response.json() + version_names = [v["version"] for v in versions.get("items", [])] + assert version_name in version_names class TestDuplicateUploads: @@ -207,13 +203,13 @@ class TestDuplicateUploads: # First upload result1 = upload_test_file( - integration_client, project, package, content, tag="first" + integration_client, project, package, content, version="first" ) assert result1["artifact_id"] == expected_hash # Second upload result2 = upload_test_file( - integration_client, project, package, content, tag="second" + integration_client, project, package, content, version="second" ) assert result2["artifact_id"] == expected_hash assert result1["artifact_id"] == result2["artifact_id"] @@ -228,13 +224,13 @@ class TestDuplicateUploads: # First upload result1 = upload_test_file( - integration_client, project, package, content, tag="v1" + integration_client, project, package, content, version="v1" ) assert result1["ref_count"] == 1 # Second upload result2 = upload_test_file( - integration_client, project, package, content, tag="v2" + integration_client, project, package, content, version="v2" ) assert result2["ref_count"] == 2 @@ -261,12 +257,12 @@ class TestDuplicateUploads: ) # Upload to first package - result1 = upload_test_file(integration_client, project, pkg1, content, tag="v1") + result1 = upload_test_file(integration_client, project, pkg1, content, version="v1") assert result1["artifact_id"] == expected_hash assert result1["deduplicated"] is False # Upload to second package - result2 = upload_test_file(integration_client, project, pkg2, content, tag="v1") + result2 = upload_test_file(integration_client, project, pkg2, content, version="v1") assert result2["artifact_id"] == expected_hash assert result2["deduplicated"] is True @@ -307,17 +303,17 @@ class TestDownload: """Tests for download functionality.""" @pytest.mark.integration - def test_download_by_tag(self, integration_client, test_package): - """Test downloading artifact by tag name.""" + def test_download_by_version(self, integration_client, test_package): + """Test downloading artifact by version.""" project, package = test_package - original_content = b"download by tag test" + original_content = b"download by version test" upload_test_file( - integration_client, project, package, original_content, tag="download-tag" + integration_client, project, package, original_content, version="1.0.0" ) response = integration_client.get( - f"/api/v1/project/{project}/{package}/+/download-tag", + f"/api/v1/project/{project}/{package}/+/1.0.0", params={"mode": "proxy"}, ) assert response.status_code == 200 @@ -340,29 +336,29 @@ class TestDownload: assert response.content == original_content @pytest.mark.integration - def test_download_by_tag_prefix(self, integration_client, test_package): - """Test downloading artifact using tag: prefix.""" + def test_download_by_version_prefix(self, integration_client, test_package): + """Test downloading artifact using version: prefix.""" project, package = test_package - original_content = b"download by tag prefix test" + original_content = b"download by version prefix test" upload_test_file( - integration_client, project, package, original_content, tag="prefix-tag" + integration_client, project, package, original_content, version="2.0.0" ) response = integration_client.get( - f"/api/v1/project/{project}/{package}/+/tag:prefix-tag", + f"/api/v1/project/{project}/{package}/+/version:2.0.0", params={"mode": "proxy"}, ) assert response.status_code == 200 assert response.content == original_content @pytest.mark.integration - def test_download_nonexistent_tag(self, integration_client, test_package): - """Test downloading nonexistent tag returns 404.""" + def test_download_nonexistent_version(self, integration_client, test_package): + """Test downloading nonexistent version returns 404.""" project, package = test_package response = integration_client.get( - f"/api/v1/project/{project}/{package}/+/nonexistent-tag" + f"/api/v1/project/{project}/{package}/+/nonexistent-version" ) assert response.status_code == 404 @@ -400,7 +396,7 @@ class TestDownload: original_content = b"exact content verification test data 12345" upload_test_file( - integration_client, project, package, original_content, tag="verify" + integration_client, project, package, original_content, version="verify" ) response = integration_client.get( @@ -421,7 +417,7 @@ class TestDownloadHeaders: upload_test_file( integration_client, project, package, content, - filename="test.txt", tag="content-type-test" + filename="test.txt", version="content-type-test" ) response = integration_client.get( @@ -440,7 +436,7 @@ class TestDownloadHeaders: expected_length = len(content) upload_test_file( - integration_client, project, package, content, tag="content-length-test" + integration_client, project, package, content, version="content-length-test" ) response = integration_client.get( @@ -460,7 +456,7 @@ class TestDownloadHeaders: upload_test_file( integration_client, project, package, content, - filename=filename, tag="disposition-test" + filename=filename, version="disposition-test" ) response = integration_client.get( @@ -481,7 +477,7 @@ class TestDownloadHeaders: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="checksum-headers" + integration_client, project, package, content, version="checksum-headers" ) response = integration_client.get( @@ -501,7 +497,7 @@ class TestDownloadHeaders: expected_hash = compute_sha256(content) upload_test_file( - integration_client, project, package, content, tag="etag-test" + integration_client, project, package, content, version="etag-test" ) response = integration_client.get( @@ -553,7 +549,7 @@ class TestConcurrentUploads: response = client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": f"concurrent-{tag_suffix}"}, + data={"version": f"concurrent-{tag_suffix}"}, headers={"Authorization": f"Bearer {api_key}"}, ) if response.status_code == 200: @@ -605,7 +601,7 @@ class TestFileSizeValidation: content = b"X" result = upload_test_file( - integration_client, project, package, content, tag="tiny" + integration_client, project, package, content, version="tiny" ) assert result["artifact_id"] is not None @@ -621,7 +617,7 @@ class TestFileSizeValidation: expected_size = len(content) result = upload_test_file( - integration_client, project, package, content, tag="size-test" + integration_client, project, package, content, version="size-test" ) assert result["size"] == expected_size @@ -649,7 +645,7 @@ class TestUploadFailureCleanup: response = integration_client.post( f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload", files=files, - data={"tag": "test"}, + data={"version": "test"}, ) assert response.status_code == 404 @@ -672,7 +668,7 @@ class TestUploadFailureCleanup: response = integration_client.post( f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload", files=files, - data={"tag": "test"}, + data={"version": "test"}, ) assert response.status_code == 404 @@ -693,7 +689,7 @@ class TestUploadFailureCleanup: response = integration_client.post( f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload", files=files, - data={"tag": "test"}, + data={"version": "test"}, ) assert response.status_code == 404 @@ -719,7 +715,7 @@ class TestS3StorageVerification: # Upload same content multiple times for tag in ["s3test1", "s3test2", "s3test3"]: - upload_test_file(integration_client, project, package, content, tag=tag) + upload_test_file(integration_client, project, package, content, version=tag) # Verify only one S3 object exists s3_objects = list_s3_objects_by_hash(expected_hash) @@ -744,7 +740,7 @@ class TestS3StorageVerification: # Upload same content multiple times for tag in ["v1", "v2", "v3"]: - upload_test_file(integration_client, project, package, content, tag=tag) + upload_test_file(integration_client, project, package, content, version=tag) # Query artifact response = integration_client.get(f"/api/v1/artifact/{expected_hash}") @@ -783,7 +779,7 @@ class TestSecurityPathTraversal: response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": "traversal-test"}, + data={"version": "traversal-test"}, ) assert response.status_code == 200 result = response.json() @@ -801,48 +797,16 @@ class TestSecurityPathTraversal: assert response.status_code in [400, 404, 422] @pytest.mark.integration - def test_path_traversal_in_tag_name(self, integration_client, test_package): - """Test tag names with path traversal are handled safely.""" + def test_path_traversal_in_version_name(self, integration_client, test_package): + """Test version names with path traversal are handled safely.""" project, package = test_package - content = b"tag traversal test" + content = b"version traversal test" files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")} response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", files=files, - data={"tag": "../../../etc/passwd"}, - ) - assert response.status_code in [200, 400, 422] - - @pytest.mark.integration - def test_download_path_traversal_in_ref(self, integration_client, test_package): - """Test download ref with path traversal is rejected.""" - project, package = test_package - - response = integration_client.get( - f"/api/v1/project/{project}/{package}/+/../../../etc/passwd" - ) - assert response.status_code in [400, 404, 422] - - @pytest.mark.integration - def test_path_traversal_in_package_name(self, integration_client, test_project): - """Test package names with path traversal sequences are rejected.""" - response = integration_client.get( - f"/api/v1/project/{test_project}/packages/../../../etc/passwd" - ) - assert response.status_code in [400, 404, 422] - - @pytest.mark.integration - def test_path_traversal_in_tag_name(self, integration_client, test_package): - """Test tag names with path traversal are rejected or handled safely.""" - project, package = test_package - content = b"tag traversal test" - - files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")} - response = integration_client.post( - f"/api/v1/project/{project}/{package}/upload", - files=files, - data={"tag": "../../../etc/passwd"}, + data={"version": "../../../etc/passwd"}, ) assert response.status_code in [200, 400, 422] @@ -867,7 +831,7 @@ class TestSecurityMalformedRequests: response = integration_client.post( f"/api/v1/project/{project}/{package}/upload", - data={"tag": "no-file"}, + data={"version": "no-file"}, ) assert response.status_code == 422 diff --git a/backend/tests/integration/test_version_api.py b/backend/tests/integration/test_version_api.py index 42b63f2..d7dc285 100644 --- a/backend/tests/integration/test_version_api.py +++ b/backend/tests/integration/test_version_api.py @@ -39,31 +39,6 @@ class TestVersionCreation: assert result.get("version") == "1.0.0" assert result.get("version_source") == "explicit" - @pytest.mark.integration - def test_upload_with_version_and_tag(self, integration_client, test_package): - """Test upload with both version and tag creates both records.""" - project, package = test_package - content = b"version and tag test" - - files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")} - response = integration_client.post( - f"/api/v1/project/{project}/{package}/upload", - files=files, - data={"version": "2.0.0", "tag": "latest"}, - ) - assert response.status_code == 200 - result = response.json() - assert result.get("version") == "2.0.0" - - # Verify tag was also created - tags_response = integration_client.get( - f"/api/v1/project/{project}/{package}/tags" - ) - assert tags_response.status_code == 200 - tags = tags_response.json() - tag_names = [t["name"] for t in tags.get("items", tags)] - assert "latest" in tag_names - @pytest.mark.integration def test_duplicate_version_same_content_succeeds(self, integration_client, test_package): """Test uploading same version with same content succeeds (deduplication).""" @@ -262,11 +237,10 @@ class TestDownloadByVersion: assert response.status_code == 404 @pytest.mark.integration - def test_version_resolution_priority(self, integration_client, test_package): - """Test that version: prefix explicitly resolves to version, not tag.""" + def test_version_resolution_with_prefix(self, integration_client, test_package): + """Test that version: prefix explicitly resolves to version.""" project, package = test_package version_content = b"this is the version content" - tag_content = b"this is the tag content" # Create a version 6.0.0 files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")} @@ -276,14 +250,6 @@ class TestDownloadByVersion: data={"version": "6.0.0"}, ) - # Create a tag named "6.0.0" pointing to different content - files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")} - integration_client.post( - f"/api/v1/project/{project}/{package}/upload", - files=files2, - data={"tag": "6.0.0"}, - ) - # Download with version: prefix should get version content response = integration_client.get( f"/api/v1/project/{project}/{package}/+/version:6.0.0", @@ -292,14 +258,6 @@ class TestDownloadByVersion: assert response.status_code == 200 assert response.content == version_content - # Download with tag: prefix should get tag content - response2 = integration_client.get( - f"/api/v1/project/{project}/{package}/+/tag:6.0.0", - params={"mode": "proxy"}, - ) - assert response2.status_code == 200 - assert response2.content == tag_content - class TestVersionDeletion: """Tests for deleting versions.""" diff --git a/backend/tests/integration/test_versions_api.py b/backend/tests/integration/test_versions_api.py index 89365a1..41fbe7d 100644 --- a/backend/tests/integration/test_versions_api.py +++ b/backend/tests/integration/test_versions_api.py @@ -27,11 +27,9 @@ class TestVersionCreation: project_name, package_name, b"version create test", - tag="latest", version="1.0.0", ) - assert result["tag"] == "latest" assert result["version"] == "1.0.0" assert result["version_source"] == "explicit" assert result["artifact_id"] @@ -149,7 +147,6 @@ class TestVersionCRUD: package_name, b"version with info", version="1.0.0", - tag="release", ) response = integration_client.get( @@ -166,8 +163,6 @@ class TestVersionCRUD: assert version_item is not None assert "size" in version_item assert "artifact_id" in version_item - assert "tags" in version_item - assert "release" in version_item["tags"] @pytest.mark.integration def test_get_version(self, integration_client, test_package): @@ -272,94 +267,9 @@ class TestVersionDownload: follow_redirects=False, ) - # Should resolve version first (before tag) + # Should resolve version assert response.status_code in [200, 302, 307] - @pytest.mark.integration - def test_version_takes_precedence_over_tag(self, integration_client, test_package): - """Test that version is checked before tag when resolving refs.""" - project_name, package_name = test_package - - # Upload with version "1.0" - version_result = upload_test_file( - integration_client, - project_name, - package_name, - b"version content", - version="1.0", - ) - - # Create a tag with the same name "1.0" pointing to different artifact - tag_result = upload_test_file( - integration_client, - project_name, - package_name, - b"tag content different", - tag="1.0", - ) - - # Download by "1.0" should resolve to version, not tag - # Since version:1.0 artifact was uploaded first - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/+/1.0", - follow_redirects=False, - ) - - assert response.status_code in [200, 302, 307] - - -class TestTagVersionEnrichment: - """Tests for tag responses including version information.""" - - @pytest.mark.integration - def test_tag_response_includes_version(self, integration_client, test_package): - """Test that tag responses include version of the artifact.""" - project_name, package_name = test_package - - # Upload with both version and tag - upload_test_file( - integration_client, - project_name, - package_name, - b"enriched tag test", - version="7.0.0", - tag="stable", - ) - - # Get tag and check version field - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags/stable" - ) - assert response.status_code == 200 - - data = response.json() - assert data["name"] == "stable" - assert data["version"] == "7.0.0" - - @pytest.mark.integration - def test_tag_list_includes_versions(self, integration_client, test_package): - """Test that tag list responses include version for each tag.""" - project_name, package_name = test_package - - upload_test_file( - integration_client, - project_name, - package_name, - b"list version test", - version="8.0.0", - tag="latest", - ) - - response = integration_client.get( - f"/api/v1/project/{project_name}/{package_name}/tags" - ) - assert response.status_code == 200 - - data = response.json() - tag_item = next((t for t in data["items"] if t["name"] == "latest"), None) - assert tag_item is not None - assert tag_item.get("version") == "8.0.0" - class TestVersionPagination: """Tests for version listing pagination and sorting.""" diff --git a/backend/tests/test_dependencies.py b/backend/tests/test_dependencies.py index 5da2dcd..ce7bd9d 100644 --- a/backend/tests/test_dependencies.py +++ b/backend/tests/test_dependencies.py @@ -39,7 +39,7 @@ class TestDependencySchema: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-{unique_test_id}"}, + data={"version": f"v1.0.0-{unique_test_id}"}, ) assert response.status_code == 200 @@ -59,29 +59,17 @@ class TestDependencySchema: integration_client.delete(f"/api/v1/projects/{dep_project_name}") @pytest.mark.integration - def test_dependency_requires_version_or_tag(self, integration_client): - """Test that dependency must have either version or tag, not both or neither.""" + def test_dependency_requires_version(self, integration_client): + """Test that dependency requires version.""" from app.schemas import DependencyCreate - # Test: neither version nor tag - with pytest.raises(ValidationError) as exc_info: + # Test: missing version + with pytest.raises(ValidationError): DependencyCreate(project="proj", package="pkg") - assert "Either 'version' or 'tag' must be specified" in str(exc_info.value) - - # Test: both version and tag - with pytest.raises(ValidationError) as exc_info: - DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable") - assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value) # Test: valid with version dep = DependencyCreate(project="proj", package="pkg", version="1.0.0") assert dep.version == "1.0.0" - assert dep.tag is None - - # Test: valid with tag - dep = DependencyCreate(project="proj", package="pkg", tag="stable") - assert dep.tag == "stable" - assert dep.version is None @pytest.mark.integration def test_dependency_unique_constraint( @@ -126,7 +114,7 @@ class TestEnsureFileParsing: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-{unique_test_id}"}, + data={"version": f"v1.0.0-{unique_test_id}"}, ) assert response.status_code == 200 data = response.json() @@ -162,7 +150,7 @@ class TestEnsureFileParsing: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-{unique_test_id}"}, + data={"version": f"v1.0.0-{unique_test_id}"}, ) assert response.status_code == 400 assert "Invalid ensure file" in response.json().get("detail", "") @@ -188,7 +176,7 @@ class TestEnsureFileParsing: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-{unique_test_id}"}, + data={"version": f"v1.0.0-{unique_test_id}"}, ) assert response.status_code == 400 assert "Project" in response.json().get("detail", "") @@ -208,7 +196,7 @@ class TestEnsureFileParsing: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-nodeps-{unique_test_id}"}, + data={"version": f"v1.0.0-nodeps-{unique_test_id}"}, ) assert response.status_code == 200 @@ -226,13 +214,14 @@ class TestEnsureFileParsing: assert response.status_code == 200 try: + # Test with missing version field (version is now required) ensure_content = yaml.dump({ "dependencies": [ - {"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"} + {"project": dep_project_name, "package": "pkg"} # Missing version ] }) - content = unique_content("test-both", unique_test_id, "constraint") + content = unique_content("test-missing-version", unique_test_id, "constraint") files = { "file": ("test.tar.gz", BytesIO(content), "application/gzip"), "ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"), @@ -240,11 +229,10 @@ class TestEnsureFileParsing: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v1.0.0-{unique_test_id}"}, + data={"version": f"v1.0.0-{unique_test_id}"}, ) assert response.status_code == 400 - assert "both" in response.json().get("detail", "").lower() or \ - "version" in response.json().get("detail", "").lower() + assert "version" in response.json().get("detail", "").lower() finally: integration_client.delete(f"/api/v1/projects/{dep_project_name}") @@ -271,7 +259,7 @@ class TestDependencyQueryEndpoints: ensure_content = yaml.dump({ "dependencies": [ {"project": dep_project_name, "package": "lib-a", "version": "1.0.0"}, - {"project": dep_project_name, "package": "lib-b", "tag": "stable"}, + {"project": dep_project_name, "package": "lib-b", "version": "2.0.0"}, ] }) @@ -283,7 +271,7 @@ class TestDependencyQueryEndpoints: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v2.0.0-{unique_test_id}"}, + data={"version": f"v2.0.0-{unique_test_id}"}, ) assert response.status_code == 200 artifact_id = response.json()["artifact_id"] @@ -299,10 +287,8 @@ class TestDependencyQueryEndpoints: deps = {d["package"]: d for d in data["dependencies"]} assert "lib-a" in deps assert deps["lib-a"]["version"] == "1.0.0" - assert deps["lib-a"]["tag"] is None assert "lib-b" in deps - assert deps["lib-b"]["tag"] == "stable" - assert deps["lib-b"]["version"] is None + assert deps["lib-b"]["version"] == "2.0.0" finally: integration_client.delete(f"/api/v1/projects/{dep_project_name}") @@ -336,7 +322,7 @@ class TestDependencyQueryEndpoints: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": tag_name}, + data={"version": tag_name}, ) assert response.status_code == 200 @@ -381,7 +367,7 @@ class TestDependencyQueryEndpoints: response = integration_client.post( f"/api/v1/project/{dep_project_name}/target-lib/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -400,7 +386,7 @@ class TestDependencyQueryEndpoints: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v4.0.0-{unique_test_id}"}, + data={"version": f"v4.0.0-{unique_test_id}"}, ) assert response.status_code == 200 @@ -442,7 +428,7 @@ class TestDependencyQueryEndpoints: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"v5.0.0-nodeps-{unique_test_id}"}, + data={"version": f"v5.0.0-nodeps-{unique_test_id}"}, ) assert response.status_code == 200 artifact_id = response.json()["artifact_id"] @@ -482,7 +468,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_c}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -500,7 +486,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_b}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -518,7 +504,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -566,7 +552,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_d}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -584,7 +570,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_b}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -602,7 +588,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_c}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -621,7 +607,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -663,7 +649,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"solo-{unique_test_id}"}, + data={"version": f"solo-{unique_test_id}"}, ) assert response.status_code == 200 @@ -698,7 +684,7 @@ class TestDependencyResolution: response = integration_client.post( f"/api/v1/project/{project_name}/{package_name}/upload", files=files, - data={"tag": f"missing-dep-{unique_test_id}"}, + data={"version": f"missing-dep-{unique_test_id}"}, ) # Should fail at upload time since package doesn't exist # OR succeed at upload but fail at resolution @@ -736,7 +722,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -754,7 +740,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_b}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -772,7 +758,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "2.0.0"}, + data={"version": "2.0.0"}, ) # Should be rejected with 400 (circular dependency) assert response.status_code == 400 @@ -807,7 +793,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -825,7 +811,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_b}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -843,7 +829,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_c}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -861,7 +847,7 @@ class TestCircularDependencyDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_a}/upload", files=files, - data={"tag": "2.0.0"}, + data={"version": "2.0.0"}, ) assert response.status_code == 400 data = response.json() @@ -910,7 +896,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_common}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -920,7 +906,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_common}/upload", files=files, - data={"tag": "2.0.0"}, + data={"version": "2.0.0"}, ) assert response.status_code == 200 @@ -938,7 +924,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_lib_a}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -956,7 +942,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_lib_b}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -975,7 +961,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_app}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -1023,7 +1009,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_common}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -1042,7 +1028,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{lib_pkg}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 @@ -1061,7 +1047,7 @@ class TestConflictDetection: response = integration_client.post( f"/api/v1/project/{test_project}/{pkg_app}/upload", files=files, - data={"tag": "1.0.0"}, + data={"version": "1.0.0"}, ) assert response.status_code == 200 diff --git a/backend/tests/test_download_verification.py b/backend/tests/test_download_verification.py index ddec899..f6cc12e 100644 --- a/backend/tests/test_download_verification.py +++ b/backend/tests/test_download_verification.py @@ -26,7 +26,7 @@ def upload_test_file(integration_client): Factory fixture to upload a test file and return its artifact ID. Usage: - artifact_id = upload_test_file(project, package, content, tag="v1.0") + artifact_id = upload_test_file(project, package, content, version="v1.0") """ def _upload(project_name: str, package_name: str, content: bytes, tag: str = None): @@ -66,7 +66,7 @@ class TestDownloadChecksumHeaders: # Upload file artifact_id = upload_test_file( - project_name, package_name, content, tag="sha256-header-test" + project_name, package_name, content, version="sha256-header-test" ) # Download with proxy mode @@ -88,7 +88,7 @@ class TestDownloadChecksumHeaders: content = b"Content for ETag header test" artifact_id = upload_test_file( - project_name, package_name, content, tag="etag-test" + project_name, package_name, content, version="etag-test" ) response = integration_client.get( @@ -110,7 +110,7 @@ class TestDownloadChecksumHeaders: content = b"Content for Digest header test" sha256 = hashlib.sha256(content).hexdigest() - upload_test_file(project_name, package_name, content, tag="digest-test") + upload_test_file(project_name, package_name, content, version="digest-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/digest-test", @@ -137,7 +137,7 @@ class TestDownloadChecksumHeaders: project_name, package_name = test_package content = b"Content for X-Content-Length test" - upload_test_file(project_name, package_name, content, tag="content-length-test") + upload_test_file(project_name, package_name, content, version="content-length-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/content-length-test", @@ -156,7 +156,7 @@ class TestDownloadChecksumHeaders: project_name, package_name = test_package content = b"Content for X-Verified false test" - upload_test_file(project_name, package_name, content, tag="verified-false-test") + upload_test_file(project_name, package_name, content, version="verified-false-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test", @@ -184,7 +184,7 @@ class TestPreVerificationMode: project_name, package_name = test_package content = b"Content for pre-verification success test" - upload_test_file(project_name, package_name, content, tag="pre-verify-success") + upload_test_file(project_name, package_name, content, version="pre-verify-success") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success", @@ -205,7 +205,7 @@ class TestPreVerificationMode: # Use binary content to verify no corruption content = bytes(range(256)) * 10 # 2560 bytes of all byte values - upload_test_file(project_name, package_name, content, tag="pre-verify-content") + upload_test_file(project_name, package_name, content, version="pre-verify-content") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content", @@ -233,7 +233,7 @@ class TestStreamingVerificationMode: content = b"Content for streaming verification success test" upload_test_file( - project_name, package_name, content, tag="stream-verify-success" + project_name, package_name, content, version="stream-verify-success" ) response = integration_client.get( @@ -255,7 +255,7 @@ class TestStreamingVerificationMode: # 100KB of content content = b"x" * (100 * 1024) - upload_test_file(project_name, package_name, content, tag="stream-verify-large") + upload_test_file(project_name, package_name, content, version="stream-verify-large") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large", @@ -283,7 +283,7 @@ class TestHeadRequestHeaders: content = b"Content for HEAD SHA256 test" artifact_id = upload_test_file( - project_name, package_name, content, tag="head-sha256-test" + project_name, package_name, content, version="head-sha256-test" ) response = integration_client.head( @@ -303,7 +303,7 @@ class TestHeadRequestHeaders: content = b"Content for HEAD ETag test" artifact_id = upload_test_file( - project_name, package_name, content, tag="head-etag-test" + project_name, package_name, content, version="head-etag-test" ) response = integration_client.head( @@ -322,7 +322,7 @@ class TestHeadRequestHeaders: project_name, package_name = test_package content = b"Content for HEAD Digest test" - upload_test_file(project_name, package_name, content, tag="head-digest-test") + upload_test_file(project_name, package_name, content, version="head-digest-test") response = integration_client.head( f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test" @@ -340,7 +340,7 @@ class TestHeadRequestHeaders: project_name, package_name = test_package content = b"Content for HEAD Content-Length test" - upload_test_file(project_name, package_name, content, tag="head-length-test") + upload_test_file(project_name, package_name, content, version="head-length-test") response = integration_client.head( f"/api/v1/project/{project_name}/{package_name}/+/head-length-test" @@ -356,7 +356,7 @@ class TestHeadRequestHeaders: project_name, package_name = test_package content = b"Content for HEAD no-body test" - upload_test_file(project_name, package_name, content, tag="head-no-body-test") + upload_test_file(project_name, package_name, content, version="head-no-body-test") response = integration_client.head( f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test" @@ -382,7 +382,7 @@ class TestRangeRequestHeaders: project_name, package_name = test_package content = b"Content for range request checksum header test" - upload_test_file(project_name, package_name, content, tag="range-checksum-test") + upload_test_file(project_name, package_name, content, version="range-checksum-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test", @@ -412,7 +412,7 @@ class TestClientSideVerification: project_name, package_name = test_package content = b"Content for client-side verification test" - upload_test_file(project_name, package_name, content, tag="client-verify-test") + upload_test_file(project_name, package_name, content, version="client-verify-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test", @@ -438,7 +438,7 @@ class TestClientSideVerification: project_name, package_name = test_package content = b"Content for Digest header verification" - upload_test_file(project_name, package_name, content, tag="digest-verify-test") + upload_test_file(project_name, package_name, content, version="digest-verify-test") response = integration_client.get( f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test", diff --git a/backend/tests/test_upstream_caching.py b/backend/tests/test_upstream_caching.py index 49b105d..cfd6d10 100644 --- a/backend/tests/test_upstream_caching.py +++ b/backend/tests/test_upstream_caching.py @@ -192,7 +192,6 @@ class TestCacheSettingsModel: settings = CacheSettings() assert hasattr(settings, 'id') - assert hasattr(settings, 'allow_public_internet') assert hasattr(settings, 'auto_create_system_projects') def test_model_with_values(self): @@ -201,11 +200,9 @@ class TestCacheSettingsModel: settings = CacheSettings( id=1, - allow_public_internet=False, auto_create_system_projects=True, ) assert settings.id == 1 - assert settings.allow_public_internet is False assert settings.auto_create_system_projects is True @@ -365,16 +362,14 @@ class TestCacheSettingsSchemas: from app.schemas import CacheSettingsUpdate update = CacheSettingsUpdate() - assert update.allow_public_internet is None assert update.auto_create_system_projects is None def test_update_schema_partial(self): """Test CacheSettingsUpdate with partial fields.""" from app.schemas import CacheSettingsUpdate - update = CacheSettingsUpdate(allow_public_internet=False) - assert update.allow_public_internet is False - assert update.auto_create_system_projects is None + update = CacheSettingsUpdate(auto_create_system_projects=True) + assert update.auto_create_system_projects is True class TestCacheRequestSchemas: @@ -388,7 +383,7 @@ class TestCacheRequestSchemas: url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", source_type="npm", package_name="lodash", - tag="4.17.21", + version="4.17.21", ) assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" assert request.source_type == "npm" diff --git a/backend/tests/unit/test_models.py b/backend/tests/unit/test_models.py index ae85605..343d93d 100644 --- a/backend/tests/unit/test_models.py +++ b/backend/tests/unit/test_models.py @@ -145,54 +145,6 @@ class TestPackageModel: assert platform_col.default.arg == "any" -class TestTagModel: - """Tests for the Tag model.""" - - @pytest.mark.unit - def test_tag_requires_package_id(self): - """Test tag requires package_id.""" - from app.models import Tag - - tag = Tag( - name="v1.0.0", - package_id=uuid.uuid4(), - artifact_id="f" * 64, - created_by="test-user", - ) - - assert tag.package_id is not None - assert tag.artifact_id == "f" * 64 - - -class TestTagHistoryModel: - """Tests for the TagHistory model.""" - - @pytest.mark.unit - def test_tag_history_default_change_type(self): - """Test tag history change_type column has default value of 'update'.""" - from app.models import TagHistory - - # Check the column definition has the right default - change_type_col = TagHistory.__table__.columns["change_type"] - assert change_type_col.default is not None - assert change_type_col.default.arg == "update" - - @pytest.mark.unit - def test_tag_history_allows_null_old_artifact(self): - """Test tag history allows null old_artifact_id (for create events).""" - from app.models import TagHistory - - history = TagHistory( - tag_id=uuid.uuid4(), - old_artifact_id=None, - new_artifact_id="h" * 64, - change_type="create", - changed_by="test-user", - ) - - assert history.old_artifact_id is None - - class TestUploadModel: """Tests for the Upload model."""