Update tests for tag removal
- Remove Tag/TagHistory model tests from unit tests - Update CacheSettings tests to remove allow_public_internet field - Replace tag= with version= in upload_test_file calls - Update test assertions to use versions instead of tags - Remove tests for tag: prefix downloads (now uses version:) - Update dependency tests for version-only schema
This commit is contained in:
@@ -96,7 +96,6 @@ def upload_test_file(
|
|||||||
package: str,
|
package: str,
|
||||||
content: bytes,
|
content: bytes,
|
||||||
filename: str = "test.bin",
|
filename: str = "test.bin",
|
||||||
tag: Optional[str] = None,
|
|
||||||
version: Optional[str] = None,
|
version: Optional[str] = None,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
@@ -108,7 +107,6 @@ def upload_test_file(
|
|||||||
package: Package name
|
package: Package name
|
||||||
content: File content as bytes
|
content: File content as bytes
|
||||||
filename: Original filename
|
filename: Original filename
|
||||||
tag: Optional tag to assign
|
|
||||||
version: Optional version to assign
|
version: Optional version to assign
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -116,8 +114,6 @@ def upload_test_file(
|
|||||||
"""
|
"""
|
||||||
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
|
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
|
||||||
data = {}
|
data = {}
|
||||||
if tag:
|
|
||||||
data["tag"] = tag
|
|
||||||
if version:
|
if version:
|
||||||
data["version"] = version
|
data["version"] = version
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ class TestArtifactRetrieval:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -53,7 +53,7 @@ class TestArtifactRetrieval:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="tagged-v1"
|
integration_client, project_name, package_name, content, version="tagged-v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -82,7 +82,7 @@ class TestArtifactStats:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"art-{unique_test_id}"
|
integration_client, project, package, content, version=f"art-{unique_test_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||||
@@ -136,8 +136,8 @@ class TestArtifactStats:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Upload same content to both projects
|
# Upload same content to both projects
|
||||||
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
|
upload_test_file(integration_client, proj1, "pkg", content, version="v1")
|
||||||
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
|
upload_test_file(integration_client, proj2, "pkg", content, version="v1")
|
||||||
|
|
||||||
# Check artifact stats
|
# Check artifact stats
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
|
||||||
@@ -315,7 +315,7 @@ class TestOrphanedArtifacts:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload with tag
|
# Upload with tag
|
||||||
upload_test_file(integration_client, project, package, content, tag="temp-tag")
|
upload_test_file(integration_client, project, package, content, version="temp-tag")
|
||||||
|
|
||||||
# Verify not in orphaned list
|
# Verify not in orphaned list
|
||||||
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
|
||||||
@@ -357,7 +357,7 @@ class TestGarbageCollection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload and delete tag to create orphan
|
# Upload and delete tag to create orphan
|
||||||
upload_test_file(integration_client, project, package, content, tag="dry-run")
|
upload_test_file(integration_client, project, package, content, version="dry-run")
|
||||||
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
|
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
|
||||||
|
|
||||||
# Verify artifact exists
|
# Verify artifact exists
|
||||||
@@ -385,7 +385,7 @@ class TestGarbageCollection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
# Upload with tag (ref_count=1)
|
# Upload with tag (ref_count=1)
|
||||||
upload_test_file(integration_client, project, package, content, tag="keep-this")
|
upload_test_file(integration_client, project, package, content, version="keep-this")
|
||||||
|
|
||||||
# Verify artifact exists with ref_count=1
|
# Verify artifact exists with ref_count=1
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-{idx}"},
|
data={"version": f"concurrent-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -117,7 +117,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent5-{idx}"},
|
data={"version": f"concurrent5-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -171,7 +171,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent10-{idx}"},
|
data={"version": f"concurrent10-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -219,7 +219,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"dedup-{idx}"},
|
data={"version": f"dedup-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -287,7 +287,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "latest"},
|
data={"version": "latest"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -321,7 +321,7 @@ class TestConcurrentDownloads:
|
|||||||
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
content, expected_hash = generate_content_with_hash(2048, seed=400)
|
||||||
|
|
||||||
# Upload first
|
# Upload first
|
||||||
upload_test_file(integration_client, project, package, content, tag="download-test")
|
upload_test_file(integration_client, project, package, content, version="download-test")
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
errors = []
|
errors = []
|
||||||
@@ -362,7 +362,7 @@ class TestConcurrentDownloads:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
content, expected_hash = generate_content_with_hash(4096, seed=500)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="download5-test")
|
upload_test_file(integration_client, project, package, content, version="download5-test")
|
||||||
|
|
||||||
num_downloads = 5
|
num_downloads = 5
|
||||||
results = []
|
results = []
|
||||||
@@ -403,7 +403,7 @@ class TestConcurrentDownloads:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
content, expected_hash = generate_content_with_hash(8192, seed=600)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="download10-test")
|
upload_test_file(integration_client, project, package, content, version="download10-test")
|
||||||
|
|
||||||
num_downloads = 10
|
num_downloads = 10
|
||||||
results = []
|
results = []
|
||||||
@@ -502,7 +502,7 @@ class TestMixedConcurrentOperations:
|
|||||||
|
|
||||||
# Upload initial content
|
# Upload initial content
|
||||||
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
content1, hash1 = generate_content_with_hash(10240, seed=800) # 10KB
|
||||||
upload_test_file(integration_client, project, package, content1, tag="initial")
|
upload_test_file(integration_client, project, package, content1, version="initial")
|
||||||
|
|
||||||
# New content for upload during download
|
# New content for upload during download
|
||||||
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
content2, hash2 = generate_content_with_hash(10240, seed=801)
|
||||||
@@ -539,7 +539,7 @@ class TestMixedConcurrentOperations:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "during-download"},
|
data={"version": "during-download"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -579,7 +579,7 @@ class TestMixedConcurrentOperations:
|
|||||||
existing_files = []
|
existing_files = []
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
content, hash = generate_content_with_hash(2048, seed=900 + i)
|
||||||
upload_test_file(integration_client, project, package, content, tag=f"existing-{i}")
|
upload_test_file(integration_client, project, package, content, version=f"existing-{i}")
|
||||||
existing_files.append((f"existing-{i}", content))
|
existing_files.append((f"existing-{i}", content))
|
||||||
|
|
||||||
# New files for uploading
|
# New files for uploading
|
||||||
@@ -619,7 +619,7 @@ class TestMixedConcurrentOperations:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"new-{idx}"},
|
data={"version": f"new-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -689,7 +689,7 @@ class TestMixedConcurrentOperations:
|
|||||||
upload_resp = client.post(
|
upload_resp = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"pattern-{idx}"},
|
data={"version": f"pattern-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if upload_resp.status_code != 200:
|
if upload_resp.status_code != 200:
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ class TestUploadErrorHandling:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file-provided"},
|
data={"version": "no-file-provided"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
|
|
||||||
@@ -200,7 +200,7 @@ class TestTimeoutBehavior:
|
|||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-test"
|
integration_client, project, package, content, version="timeout-test"
|
||||||
)
|
)
|
||||||
elapsed = time.time() - start_time
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
@@ -219,7 +219,7 @@ class TestTimeoutBehavior:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="download-timeout-test"
|
integration_client, project, package, content, version="download-timeout-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Then download and time it
|
# Then download and time it
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class TestRoundTripVerification:
|
|||||||
|
|
||||||
# Upload and capture returned hash
|
# Upload and capture returned hash
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="roundtrip"
|
integration_client, project, package, content, version="roundtrip"
|
||||||
)
|
)
|
||||||
uploaded_hash = result["artifact_id"]
|
uploaded_hash = result["artifact_id"]
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class TestRoundTripVerification:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="header-check"
|
integration_client, project, package, content, version="header-check"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -102,7 +102,7 @@ class TestRoundTripVerification:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="etag-check"
|
integration_client, project, package, content, version="etag-check"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -186,7 +186,7 @@ class TestClientSideVerificationWorkflow:
|
|||||||
content = b"Client post-download verification"
|
content = b"Client post-download verification"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="verify-after"
|
integration_client, project, package, content, version="verify-after"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -215,7 +215,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
content, expected_hash = sized_content(SIZE_1KB, seed=100)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-1kb"
|
integration_client, project, package, content, version="int-1kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -234,7 +234,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
content, expected_hash = sized_content(SIZE_100KB, seed=101)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-100kb"
|
integration_client, project, package, content, version="int-100kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -253,7 +253,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
content, expected_hash = sized_content(SIZE_1MB, seed=102)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-1mb"
|
integration_client, project, package, content, version="int-1mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -273,7 +273,7 @@ class TestIntegritySizeVariants:
|
|||||||
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
content, expected_hash = sized_content(SIZE_10MB, seed=103)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="int-10mb"
|
integration_client, project, package, content, version="int-10mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -366,7 +366,7 @@ class TestDigestHeader:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="digest-test"
|
integration_client, project, package, content, version="digest-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -390,7 +390,7 @@ class TestDigestHeader:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="digest-b64"
|
integration_client, project, package, content, version="digest-b64"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -420,7 +420,7 @@ class TestVerificationModes:
|
|||||||
content = b"Pre-verification mode test"
|
content = b"Pre-verification mode test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="pre-verify"
|
integration_client, project, package, content, version="pre-verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -440,7 +440,7 @@ class TestVerificationModes:
|
|||||||
content = b"Stream verification mode test"
|
content = b"Stream verification mode test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="stream-verify"
|
integration_client, project, package, content, version="stream-verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -477,7 +477,7 @@ class TestArtifactIntegrityEndpoint:
|
|||||||
expected_size = len(content)
|
expected_size = len(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-len"
|
integration_client, project, package, content, version="content-len"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -513,7 +513,7 @@ class TestCorruptionDetection:
|
|||||||
|
|
||||||
# Upload original content
|
# Upload original content
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="corrupt-test"
|
integration_client, project, package, content, version="corrupt-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -555,7 +555,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="bitflip-test"
|
integration_client, project, package, content, version="bitflip-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -592,7 +592,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="truncate-test"
|
integration_client, project, package, content, version="truncate-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -627,7 +627,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="append-test"
|
integration_client, project, package, content, version="append-test"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -670,7 +670,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="client-detect"
|
integration_client, project, package, content, version="client-detect"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Corrupt the S3 object
|
# Corrupt the S3 object
|
||||||
@@ -713,7 +713,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="size-mismatch"
|
integration_client, project, package, content, version="size-mismatch"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Modify S3 object to have different size
|
# Modify S3 object to have different size
|
||||||
@@ -747,7 +747,7 @@ class TestCorruptionDetection:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="missing-s3"
|
integration_client, project, package, content, version="missing-s3"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Delete the S3 object
|
# Delete the S3 object
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class TestUploadMetrics:
|
|||||||
content = b"duration test content"
|
content = b"duration test content"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="duration-test"
|
integration_client, project, package, content, version="duration-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "duration_ms" in result
|
assert "duration_ms" in result
|
||||||
@@ -55,7 +55,7 @@ class TestUploadMetrics:
|
|||||||
content = b"throughput test content"
|
content = b"throughput test content"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="throughput-test"
|
integration_client, project, package, content, version="throughput-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "throughput_mbps" in result
|
assert "throughput_mbps" in result
|
||||||
@@ -72,7 +72,7 @@ class TestUploadMetrics:
|
|||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="duration-check"
|
integration_client, project, package, content, version="duration-check"
|
||||||
)
|
)
|
||||||
actual_duration = (time.time() - start) * 1000 # ms
|
actual_duration = (time.time() - start) * 1000 # ms
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
content, expected_hash = sized_content(SIZE_10MB, seed=200)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-10mb"
|
integration_client, project, package, content, version="large-10mb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -109,7 +109,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
content, expected_hash = sized_content(SIZE_100MB, seed=300)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-100mb"
|
integration_client, project, package, content, version="large-100mb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -126,7 +126,7 @@ class TestLargeFileUploads:
|
|||||||
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
content, expected_hash = sized_content(SIZE_1GB, seed=400)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="large-1gb"
|
integration_client, project, package, content, version="large-1gb"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -147,14 +147,14 @@ class TestLargeFileUploads:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-1"
|
integration_client, project, package, content, version=f"dedup-{unique_test_id}-1"
|
||||||
)
|
)
|
||||||
# Note: may be True if previous test uploaded same content
|
# Note: may be True if previous test uploaded same content
|
||||||
first_dedupe = result1["deduplicated"]
|
first_dedupe = result1["deduplicated"]
|
||||||
|
|
||||||
# Second upload of same content
|
# Second upload of same content
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag=f"dedup-{unique_test_id}-2"
|
integration_client, project, package, content, version=f"dedup-{unique_test_id}-2"
|
||||||
)
|
)
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
# Second upload MUST be deduplicated
|
# Second upload MUST be deduplicated
|
||||||
@@ -277,7 +277,7 @@ class TestUploadSizeLimits:
|
|||||||
content = b"X"
|
content = b"X"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="min-size"
|
integration_client, project, package, content, version="min-size"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["size"] == 1
|
assert result["size"] == 1
|
||||||
@@ -289,7 +289,7 @@ class TestUploadSizeLimits:
|
|||||||
content = b"content length verification test"
|
content = b"content length verification test"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-length-test"
|
integration_client, project, package, content, version="content-length-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Size in response should match actual content length
|
# Size in response should match actual content length
|
||||||
@@ -336,7 +336,7 @@ class TestUploadErrorHandling:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file"},
|
data={"version": "no-file"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
@@ -459,7 +459,7 @@ class TestUploadTimeout:
|
|||||||
|
|
||||||
# httpx client should handle this quickly
|
# httpx client should handle this quickly
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-small"
|
integration_client, project, package, content, version="timeout-small"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] is not None
|
assert result["artifact_id"] is not None
|
||||||
@@ -474,7 +474,7 @@ class TestUploadTimeout:
|
|||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="timeout-check"
|
integration_client, project, package, content, version="timeout-check"
|
||||||
)
|
)
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
|
|
||||||
@@ -525,7 +525,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-diff-{idx}"},
|
data={"version": f"concurrent-diff-{idx}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
|
|||||||
@@ -261,13 +261,13 @@ class TestPackageCascadeDelete:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="v1"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v2"
|
integration_client, project_name, package_name, content, version="v2"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v3"
|
integration_client, project_name, package_name, content, version="v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify ref_count is 3
|
# Verify ref_count is 3
|
||||||
|
|||||||
@@ -258,16 +258,16 @@ class TestProjectCascadeDelete:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package1_name, content, tag="v1"
|
integration_client, project_name, package1_name, content, version="v1"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package1_name, content, tag="v2"
|
integration_client, project_name, package1_name, content, version="v2"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package2_name, content, tag="latest"
|
integration_client, project_name, package2_name, content, version="latest"
|
||||||
)
|
)
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project_name, package2_name, content, tag="stable"
|
integration_client, project_name, package2_name, content, version="stable"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify ref_count is 4 (2 tags in each of 2 packages)
|
# Verify ref_count is 4 (2 tags in each of 2 packages)
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1byte.bin", tag="1byte"
|
filename="1byte.bin", version="1byte"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1B
|
assert result["size"] == SIZE_1B
|
||||||
@@ -70,7 +70,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1kb.bin", tag="1kb"
|
filename="1kb.bin", version="1kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1KB
|
assert result["size"] == SIZE_1KB
|
||||||
@@ -90,7 +90,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="10kb.bin", tag="10kb"
|
filename="10kb.bin", version="10kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_10KB
|
assert result["size"] == SIZE_10KB
|
||||||
@@ -110,7 +110,7 @@ class TestSmallFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="100kb.bin", tag="100kb"
|
filename="100kb.bin", version="100kb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_100KB
|
assert result["size"] == SIZE_100KB
|
||||||
@@ -134,7 +134,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1mb.bin", tag="1mb"
|
filename="1mb.bin", version="1mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_1MB
|
assert result["size"] == SIZE_1MB
|
||||||
@@ -155,7 +155,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="5mb.bin", tag="5mb"
|
filename="5mb.bin", version="5mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_5MB
|
assert result["size"] == SIZE_5MB
|
||||||
@@ -177,7 +177,7 @@ class TestMediumFileSizes:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="10mb.bin", tag="10mb"
|
filename="10mb.bin", version="10mb"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == SIZE_10MB
|
assert result["size"] == SIZE_10MB
|
||||||
@@ -200,7 +200,7 @@ class TestMediumFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="50mb.bin", tag="50mb"
|
filename="50mb.bin", version="50mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -240,7 +240,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="100mb.bin", tag="100mb"
|
filename="100mb.bin", version="100mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -271,7 +271,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="250mb.bin", tag="250mb"
|
filename="250mb.bin", version="250mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="500mb.bin", tag="500mb"
|
filename="500mb.bin", version="500mb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -336,7 +336,7 @@ class TestLargeFileSizes:
|
|||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="1gb.bin", tag="1gb"
|
filename="1gb.bin", version="1gb"
|
||||||
)
|
)
|
||||||
upload_time = time.time() - start_time
|
upload_time = time.time() - start_time
|
||||||
|
|
||||||
@@ -368,7 +368,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk.bin", tag="chunk-exact"
|
filename="chunk.bin", version="chunk-exact"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == CHUNK_SIZE
|
assert result["size"] == CHUNK_SIZE
|
||||||
@@ -389,7 +389,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk_plus.bin", tag="chunk-plus"
|
filename="chunk_plus.bin", version="chunk-plus"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -410,7 +410,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="chunk_minus.bin", tag="chunk-minus"
|
filename="chunk_minus.bin", version="chunk-minus"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -431,7 +431,7 @@ class TestChunkBoundaries:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="multi_chunk.bin", tag="multi-chunk"
|
filename="multi_chunk.bin", version="multi-chunk"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["size"] == size
|
assert result["size"] == size
|
||||||
@@ -457,7 +457,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="binary.bin", tag="binary"
|
filename="binary.bin", version="binary"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -477,7 +477,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="text.txt", tag="text"
|
filename="text.txt", version="text"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -498,7 +498,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="nulls.bin", tag="nulls"
|
filename="nulls.bin", version="nulls"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -519,7 +519,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="文件名.txt", tag="unicode-name"
|
filename="文件名.txt", version="unicode-name"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
assert result["original_name"] == "文件名.txt"
|
assert result["original_name"] == "文件名.txt"
|
||||||
@@ -543,7 +543,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="data.gz", tag="compressed"
|
filename="data.gz", version="compressed"
|
||||||
)
|
)
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
@@ -568,7 +568,7 @@ class TestDataIntegrity:
|
|||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename=f"hash_test_{size}.bin", tag=f"hash-{size}"
|
filename=f"hash_test_{size}.bin", version=f"hash-{size}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify artifact_id matches expected hash
|
# Verify artifact_id matches expected hash
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for first N bytes."""
|
"""Test range request for first N bytes."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789" * 100 # 1000 bytes
|
content = b"0123456789" * 100 # 1000 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-test")
|
upload_test_file(integration_client, project, package, content, version="range-test")
|
||||||
|
|
||||||
# Request first 10 bytes
|
# Request first 10 bytes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -50,7 +50,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for bytes in the middle."""
|
"""Test range request for bytes in the middle."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
content = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-mid")
|
upload_test_file(integration_client, project, package, content, version="range-mid")
|
||||||
|
|
||||||
# Request bytes 10-19 (KLMNOPQRST)
|
# Request bytes 10-19 (KLMNOPQRST)
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -66,7 +66,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request for last N bytes (suffix range)."""
|
"""Test range request for last N bytes (suffix range)."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789ABCDEF" # 16 bytes
|
content = b"0123456789ABCDEF" # 16 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-suffix")
|
upload_test_file(integration_client, project, package, content, version="range-suffix")
|
||||||
|
|
||||||
# Request last 4 bytes
|
# Request last 4 bytes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -82,7 +82,7 @@ class TestRangeRequests:
|
|||||||
"""Test range request from offset to end."""
|
"""Test range request from offset to end."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"0123456789"
|
content = b"0123456789"
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-open")
|
upload_test_file(integration_client, project, package, content, version="range-open")
|
||||||
|
|
||||||
# Request from byte 5 to end
|
# Request from byte 5 to end
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -100,7 +100,7 @@ class TestRangeRequests:
|
|||||||
"""Test that range requests include Accept-Ranges header."""
|
"""Test that range requests include Accept-Ranges header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"test content"
|
content = b"test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="accept-ranges")
|
upload_test_file(integration_client, project, package, content, version="accept-ranges")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
f"/api/v1/project/{project}/{package}/+/accept-ranges",
|
||||||
@@ -117,7 +117,7 @@ class TestRangeRequests:
|
|||||||
"""Test that full downloads advertise range support."""
|
"""Test that full downloads advertise range support."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"test content"
|
content = b"test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="full-accept")
|
upload_test_file(integration_client, project, package, content, version="full-accept")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/full-accept",
|
f"/api/v1/project/{project}/{package}/+/full-accept",
|
||||||
@@ -136,7 +136,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"conditional request test content"
|
content = b"conditional request test content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-etag")
|
upload_test_file(integration_client, project, package, content, version="cond-etag")
|
||||||
|
|
||||||
# Request with matching ETag
|
# Request with matching ETag
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -153,7 +153,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag no quotes test"
|
content = b"etag no quotes test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-noquote")
|
upload_test_file(integration_client, project, package, content, version="cond-noquote")
|
||||||
|
|
||||||
# Request with ETag without quotes
|
# Request with ETag without quotes
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -168,7 +168,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-None-Match with non-matching ETag returns 200."""
|
"""Test If-None-Match with non-matching ETag returns 200."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag mismatch test"
|
content = b"etag mismatch test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-mismatch")
|
upload_test_file(integration_client, project, package, content, version="cond-mismatch")
|
||||||
|
|
||||||
# Request with different ETag
|
# Request with different ETag
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -184,7 +184,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-Modified-Since with future date returns 304."""
|
"""Test If-Modified-Since with future date returns 304."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"modified since test"
|
content = b"modified since test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-modified")
|
upload_test_file(integration_client, project, package, content, version="cond-modified")
|
||||||
|
|
||||||
# Request with future date (artifact was definitely created before this)
|
# Request with future date (artifact was definitely created before this)
|
||||||
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
future_date = formatdate(time.time() + 86400, usegmt=True) # Tomorrow
|
||||||
@@ -202,7 +202,7 @@ class TestConditionalRequests:
|
|||||||
"""Test If-Modified-Since with old date returns 200."""
|
"""Test If-Modified-Since with old date returns 200."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"old date test"
|
content = b"old date test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cond-old")
|
upload_test_file(integration_client, project, package, content, version="cond-old")
|
||||||
|
|
||||||
# Request with old date (2020-01-01)
|
# Request with old date (2020-01-01)
|
||||||
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
old_date = "Wed, 01 Jan 2020 00:00:00 GMT"
|
||||||
@@ -220,7 +220,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"304 etag test"
|
content = b"304 etag test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="304-etag")
|
upload_test_file(integration_client, project, package, content, version="304-etag")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/304-etag",
|
f"/api/v1/project/{project}/{package}/+/304-etag",
|
||||||
@@ -236,7 +236,7 @@ class TestConditionalRequests:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"304 cache test"
|
content = b"304 cache test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="304-cache")
|
upload_test_file(integration_client, project, package, content, version="304-cache")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/304-cache",
|
f"/api/v1/project/{project}/{package}/+/304-cache",
|
||||||
@@ -255,7 +255,7 @@ class TestCachingHeaders:
|
|||||||
"""Test download response includes Cache-Control header."""
|
"""Test download response includes Cache-Control header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"cache control test"
|
content = b"cache control test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="cache-ctl")
|
upload_test_file(integration_client, project, package, content, version="cache-ctl")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
f"/api/v1/project/{project}/{package}/+/cache-ctl",
|
||||||
@@ -272,7 +272,7 @@ class TestCachingHeaders:
|
|||||||
"""Test download response includes Last-Modified header."""
|
"""Test download response includes Last-Modified header."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"last modified test"
|
content = b"last modified test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="last-mod")
|
upload_test_file(integration_client, project, package, content, version="last-mod")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/last-mod",
|
f"/api/v1/project/{project}/{package}/+/last-mod",
|
||||||
@@ -290,7 +290,7 @@ class TestCachingHeaders:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag header test"
|
content = b"etag header test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="etag-hdr")
|
upload_test_file(integration_client, project, package, content, version="etag-hdr")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
f"/api/v1/project/{project}/{package}/+/etag-hdr",
|
||||||
@@ -308,7 +308,7 @@ class TestDownloadResume:
|
|||||||
"""Test resuming download from where it left off."""
|
"""Test resuming download from where it left off."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
content = b"ABCDEFGHIJ" * 100 # 1000 bytes
|
||||||
upload_test_file(integration_client, project, package, content, tag="resume-test")
|
upload_test_file(integration_client, project, package, content, version="resume-test")
|
||||||
|
|
||||||
# Simulate partial download (first 500 bytes)
|
# Simulate partial download (first 500 bytes)
|
||||||
response1 = integration_client.get(
|
response1 = integration_client.get(
|
||||||
@@ -340,7 +340,7 @@ class TestDownloadResume:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"resume etag verification test content"
|
content = b"resume etag verification test content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="resume-etag")
|
upload_test_file(integration_client, project, package, content, version="resume-etag")
|
||||||
|
|
||||||
# Get ETag from first request
|
# Get ETag from first request
|
||||||
response1 = integration_client.get(
|
response1 = integration_client.get(
|
||||||
@@ -373,7 +373,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
content, expected_hash = sized_content(SIZE_1MB, seed=500)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="stream-1mb")
|
upload_test_file(integration_client, project, package, content, version="stream-1mb")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
f"/api/v1/project/{project}/{package}/+/stream-1mb",
|
||||||
@@ -391,7 +391,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
content, expected_hash = sized_content(SIZE_100KB, seed=501)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="stream-hdr")
|
upload_test_file(integration_client, project, package, content, version="stream-hdr")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
f"/api/v1/project/{project}/{package}/+/stream-hdr",
|
||||||
@@ -410,7 +410,7 @@ class TestLargeFileStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content, _ = sized_content(SIZE_100KB, seed=502)
|
content, _ = sized_content(SIZE_100KB, seed=502)
|
||||||
|
|
||||||
upload_test_file(integration_client, project, package, content, tag="range-large")
|
upload_test_file(integration_client, project, package, content, version="range-large")
|
||||||
|
|
||||||
# Request a slice from the middle
|
# Request a slice from the middle
|
||||||
start = 50000
|
start = 50000
|
||||||
@@ -433,7 +433,7 @@ class TestDownloadModes:
|
|||||||
"""Test proxy mode streams content through backend."""
|
"""Test proxy mode streams content through backend."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"proxy mode test content"
|
content = b"proxy mode test content"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-proxy")
|
upload_test_file(integration_client, project, package, content, version="mode-proxy")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
f"/api/v1/project/{project}/{package}/+/mode-proxy",
|
||||||
@@ -447,7 +447,7 @@ class TestDownloadModes:
|
|||||||
"""Test presigned mode returns JSON with URL."""
|
"""Test presigned mode returns JSON with URL."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"presigned mode test"
|
content = b"presigned mode test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-presign")
|
upload_test_file(integration_client, project, package, content, version="mode-presign")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
f"/api/v1/project/{project}/{package}/+/mode-presign",
|
||||||
@@ -464,7 +464,7 @@ class TestDownloadModes:
|
|||||||
"""Test redirect mode returns 302 to presigned URL."""
|
"""Test redirect mode returns 302 to presigned URL."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"redirect mode test"
|
content = b"redirect mode test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="mode-redir")
|
upload_test_file(integration_client, project, package, content, version="mode-redir")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
f"/api/v1/project/{project}/{package}/+/mode-redir",
|
||||||
@@ -484,7 +484,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"integrity check content"
|
content = b"integrity check content"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="integrity")
|
upload_test_file(integration_client, project, package, content, version="integrity")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/integrity",
|
f"/api/v1/project/{project}/{package}/+/integrity",
|
||||||
@@ -505,7 +505,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"etag integrity test"
|
content = b"etag integrity test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
upload_test_file(integration_client, project, package, content, tag="etag-int")
|
upload_test_file(integration_client, project, package, content, version="etag-int")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/etag-int",
|
f"/api/v1/project/{project}/{package}/+/etag-int",
|
||||||
@@ -524,7 +524,7 @@ class TestIntegrityDuringStreaming:
|
|||||||
"""Test Digest header is present in RFC 3230 format."""
|
"""Test Digest header is present in RFC 3230 format."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"digest header test"
|
content = b"digest header test"
|
||||||
upload_test_file(integration_client, project, package, content, tag="digest")
|
upload_test_file(integration_client, project, package, content, version="digest")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/digest",
|
f"/api/v1/project/{project}/{package}/+/digest",
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ class TestUploadBasics:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project_name, package_name, content, tag="v1"
|
integration_client, project_name, package_name, content, version="v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
@@ -116,31 +116,23 @@ class TestUploadBasics:
|
|||||||
assert result["created_at"] is not None
|
assert result["created_at"] is not None
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
def test_upload_without_version_succeeds(self, integration_client, test_package):
|
||||||
"""Test upload without tag succeeds (no tag created)."""
|
"""Test upload without version succeeds (no version created)."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"upload without tag test"
|
content = b"upload without version test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
files = {"file": ("no_version.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
# No tag parameter
|
# No version parameter
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
result = response.json()
|
result = response.json()
|
||||||
assert result["artifact_id"] == expected_hash
|
assert result["artifact_id"] == expected_hash
|
||||||
|
# Version should be None when not specified
|
||||||
# Verify no tag was created - list tags and check
|
assert result.get("version") is None
|
||||||
tags_response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
|
||||||
)
|
|
||||||
assert tags_response.status_code == 200
|
|
||||||
tags = tags_response.json()
|
|
||||||
# Filter for tags pointing to this artifact
|
|
||||||
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
|
||||||
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||||
@@ -172,25 +164,29 @@ class TestUploadBasics:
|
|||||||
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
def test_upload_with_version_creates_version_record(self, integration_client, test_package):
|
||||||
"""Test upload with tag creates tag record."""
|
"""Test upload with version creates version record."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"tag creation test"
|
content = b"version creation test"
|
||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
tag_name = "my-tag-v1"
|
version_name = "1.0.0"
|
||||||
|
|
||||||
upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag=tag_name
|
integration_client, project, package, content, version=version_name
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify tag exists
|
# Verify version was created
|
||||||
tags_response = integration_client.get(
|
assert result.get("version") == version_name
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Verify version exists in versions list
|
||||||
|
versions_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/versions"
|
||||||
)
|
)
|
||||||
assert tags_response.status_code == 200
|
assert versions_response.status_code == 200
|
||||||
tags = tags_response.json()
|
versions = versions_response.json()
|
||||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
version_names = [v["version"] for v in versions.get("items", [])]
|
||||||
assert tag_name in tag_names
|
assert version_name in version_names
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicateUploads:
|
class TestDuplicateUploads:
|
||||||
@@ -207,13 +203,13 @@ class TestDuplicateUploads:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="first"
|
integration_client, project, package, content, version="first"
|
||||||
)
|
)
|
||||||
assert result1["artifact_id"] == expected_hash
|
assert result1["artifact_id"] == expected_hash
|
||||||
|
|
||||||
# Second upload
|
# Second upload
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="second"
|
integration_client, project, package, content, version="second"
|
||||||
)
|
)
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
assert result1["artifact_id"] == result2["artifact_id"]
|
assert result1["artifact_id"] == result2["artifact_id"]
|
||||||
@@ -228,13 +224,13 @@ class TestDuplicateUploads:
|
|||||||
|
|
||||||
# First upload
|
# First upload
|
||||||
result1 = upload_test_file(
|
result1 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="v1"
|
integration_client, project, package, content, version="v1"
|
||||||
)
|
)
|
||||||
assert result1["ref_count"] == 1
|
assert result1["ref_count"] == 1
|
||||||
|
|
||||||
# Second upload
|
# Second upload
|
||||||
result2 = upload_test_file(
|
result2 = upload_test_file(
|
||||||
integration_client, project, package, content, tag="v2"
|
integration_client, project, package, content, version="v2"
|
||||||
)
|
)
|
||||||
assert result2["ref_count"] == 2
|
assert result2["ref_count"] == 2
|
||||||
|
|
||||||
@@ -261,12 +257,12 @@ class TestDuplicateUploads:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Upload to first package
|
# Upload to first package
|
||||||
result1 = upload_test_file(integration_client, project, pkg1, content, tag="v1")
|
result1 = upload_test_file(integration_client, project, pkg1, content, version="v1")
|
||||||
assert result1["artifact_id"] == expected_hash
|
assert result1["artifact_id"] == expected_hash
|
||||||
assert result1["deduplicated"] is False
|
assert result1["deduplicated"] is False
|
||||||
|
|
||||||
# Upload to second package
|
# Upload to second package
|
||||||
result2 = upload_test_file(integration_client, project, pkg2, content, tag="v1")
|
result2 = upload_test_file(integration_client, project, pkg2, content, version="v1")
|
||||||
assert result2["artifact_id"] == expected_hash
|
assert result2["artifact_id"] == expected_hash
|
||||||
assert result2["deduplicated"] is True
|
assert result2["deduplicated"] is True
|
||||||
|
|
||||||
@@ -307,17 +303,17 @@ class TestDownload:
|
|||||||
"""Tests for download functionality."""
|
"""Tests for download functionality."""
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_by_tag(self, integration_client, test_package):
|
def test_download_by_version(self, integration_client, test_package):
|
||||||
"""Test downloading artifact by tag name."""
|
"""Test downloading artifact by version."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
original_content = b"download by tag test"
|
original_content = b"download by version test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="download-tag"
|
integration_client, project, package, original_content, version="1.0.0"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/download-tag",
|
f"/api/v1/project/{project}/{package}/+/1.0.0",
|
||||||
params={"mode": "proxy"},
|
params={"mode": "proxy"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
@@ -340,29 +336,29 @@ class TestDownload:
|
|||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_by_tag_prefix(self, integration_client, test_package):
|
def test_download_by_version_prefix(self, integration_client, test_package):
|
||||||
"""Test downloading artifact using tag: prefix."""
|
"""Test downloading artifact using version: prefix."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
original_content = b"download by tag prefix test"
|
original_content = b"download by version prefix test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="prefix-tag"
|
integration_client, project, package, original_content, version="2.0.0"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
f"/api/v1/project/{project}/{package}/+/version:2.0.0",
|
||||||
params={"mode": "proxy"},
|
params={"mode": "proxy"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
def test_download_nonexistent_version(self, integration_client, test_package):
|
||||||
"""Test downloading nonexistent tag returns 404."""
|
"""Test downloading nonexistent version returns 404."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
f"/api/v1/project/{project}/{package}/+/nonexistent-version"
|
||||||
)
|
)
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@@ -400,7 +396,7 @@ class TestDownload:
|
|||||||
original_content = b"exact content verification test data 12345"
|
original_content = b"exact content verification test data 12345"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, original_content, tag="verify"
|
integration_client, project, package, original_content, version="verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -421,7 +417,7 @@ class TestDownloadHeaders:
|
|||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename="test.txt", tag="content-type-test"
|
filename="test.txt", version="content-type-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -440,7 +436,7 @@ class TestDownloadHeaders:
|
|||||||
expected_length = len(content)
|
expected_length = len(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="content-length-test"
|
integration_client, project, package, content, version="content-length-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -460,7 +456,7 @@ class TestDownloadHeaders:
|
|||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content,
|
integration_client, project, package, content,
|
||||||
filename=filename, tag="disposition-test"
|
filename=filename, version="disposition-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -481,7 +477,7 @@ class TestDownloadHeaders:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="checksum-headers"
|
integration_client, project, package, content, version="checksum-headers"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -501,7 +497,7 @@ class TestDownloadHeaders:
|
|||||||
expected_hash = compute_sha256(content)
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
integration_client, project, package, content, tag="etag-test"
|
integration_client, project, package, content, version="etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -553,7 +549,7 @@ class TestConcurrentUploads:
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"concurrent-{tag_suffix}"},
|
data={"version": f"concurrent-{tag_suffix}"},
|
||||||
headers={"Authorization": f"Bearer {api_key}"},
|
headers={"Authorization": f"Bearer {api_key}"},
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -605,7 +601,7 @@ class TestFileSizeValidation:
|
|||||||
content = b"X"
|
content = b"X"
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="tiny"
|
integration_client, project, package, content, version="tiny"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["artifact_id"] is not None
|
assert result["artifact_id"] is not None
|
||||||
@@ -621,7 +617,7 @@ class TestFileSizeValidation:
|
|||||||
expected_size = len(content)
|
expected_size = len(content)
|
||||||
|
|
||||||
result = upload_test_file(
|
result = upload_test_file(
|
||||||
integration_client, project, package, content, tag="size-test"
|
integration_client, project, package, content, version="size-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["size"] == expected_size
|
assert result["size"] == expected_size
|
||||||
@@ -649,7 +645,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -672,7 +668,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -693,7 +689,7 @@ class TestUploadFailureCleanup:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "test"},
|
data={"version": "test"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
@@ -719,7 +715,7 @@ class TestS3StorageVerification:
|
|||||||
|
|
||||||
# Upload same content multiple times
|
# Upload same content multiple times
|
||||||
for tag in ["s3test1", "s3test2", "s3test3"]:
|
for tag in ["s3test1", "s3test2", "s3test3"]:
|
||||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
upload_test_file(integration_client, project, package, content, version=tag)
|
||||||
|
|
||||||
# Verify only one S3 object exists
|
# Verify only one S3 object exists
|
||||||
s3_objects = list_s3_objects_by_hash(expected_hash)
|
s3_objects = list_s3_objects_by_hash(expected_hash)
|
||||||
@@ -744,7 +740,7 @@ class TestS3StorageVerification:
|
|||||||
|
|
||||||
# Upload same content multiple times
|
# Upload same content multiple times
|
||||||
for tag in ["v1", "v2", "v3"]:
|
for tag in ["v1", "v2", "v3"]:
|
||||||
upload_test_file(integration_client, project, package, content, tag=tag)
|
upload_test_file(integration_client, project, package, content, version=tag)
|
||||||
|
|
||||||
# Query artifact
|
# Query artifact
|
||||||
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
@@ -783,7 +779,7 @@ class TestSecurityPathTraversal:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "traversal-test"},
|
data={"version": "traversal-test"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
result = response.json()
|
result = response.json()
|
||||||
@@ -801,48 +797,16 @@ class TestSecurityPathTraversal:
|
|||||||
assert response.status_code in [400, 404, 422]
|
assert response.status_code in [400, 404, 422]
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
def test_path_traversal_in_version_name(self, integration_client, test_package):
|
||||||
"""Test tag names with path traversal are handled safely."""
|
"""Test version names with path traversal are handled safely."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
content = b"tag traversal test"
|
content = b"version traversal test"
|
||||||
|
|
||||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "../../../etc/passwd"},
|
data={"version": "../../../etc/passwd"},
|
||||||
)
|
|
||||||
assert response.status_code in [200, 400, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_download_path_traversal_in_ref(self, integration_client, test_package):
|
|
||||||
"""Test download ref with path traversal is rejected."""
|
|
||||||
project, package = test_package
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
|
|
||||||
)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_path_traversal_in_package_name(self, integration_client, test_project):
|
|
||||||
"""Test package names with path traversal sequences are rejected."""
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
|
|
||||||
)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_path_traversal_in_tag_name(self, integration_client, test_package):
|
|
||||||
"""Test tag names with path traversal are rejected or handled safely."""
|
|
||||||
project, package = test_package
|
|
||||||
content = b"tag traversal test"
|
|
||||||
|
|
||||||
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
|
||||||
response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files,
|
|
||||||
data={"tag": "../../../etc/passwd"},
|
|
||||||
)
|
)
|
||||||
assert response.status_code in [200, 400, 422]
|
assert response.status_code in [200, 400, 422]
|
||||||
|
|
||||||
@@ -867,7 +831,7 @@ class TestSecurityMalformedRequests:
|
|||||||
|
|
||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
data={"tag": "no-file"},
|
data={"version": "no-file"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
|||||||
@@ -39,31 +39,6 @@ class TestVersionCreation:
|
|||||||
assert result.get("version") == "1.0.0"
|
assert result.get("version") == "1.0.0"
|
||||||
assert result.get("version_source") == "explicit"
|
assert result.get("version_source") == "explicit"
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_upload_with_version_and_tag(self, integration_client, test_package):
|
|
||||||
"""Test upload with both version and tag creates both records."""
|
|
||||||
project, package = test_package
|
|
||||||
content = b"version and tag test"
|
|
||||||
|
|
||||||
files = {"file": ("app.tar.gz", io.BytesIO(content), "application/octet-stream")}
|
|
||||||
response = integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files,
|
|
||||||
data={"version": "2.0.0", "tag": "latest"},
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
result = response.json()
|
|
||||||
assert result.get("version") == "2.0.0"
|
|
||||||
|
|
||||||
# Verify tag was also created
|
|
||||||
tags_response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/tags"
|
|
||||||
)
|
|
||||||
assert tags_response.status_code == 200
|
|
||||||
tags = tags_response.json()
|
|
||||||
tag_names = [t["name"] for t in tags.get("items", tags)]
|
|
||||||
assert "latest" in tag_names
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
def test_duplicate_version_same_content_succeeds(self, integration_client, test_package):
|
||||||
"""Test uploading same version with same content succeeds (deduplication)."""
|
"""Test uploading same version with same content succeeds (deduplication)."""
|
||||||
@@ -262,11 +237,10 @@ class TestDownloadByVersion:
|
|||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_version_resolution_priority(self, integration_client, test_package):
|
def test_version_resolution_with_prefix(self, integration_client, test_package):
|
||||||
"""Test that version: prefix explicitly resolves to version, not tag."""
|
"""Test that version: prefix explicitly resolves to version."""
|
||||||
project, package = test_package
|
project, package = test_package
|
||||||
version_content = b"this is the version content"
|
version_content = b"this is the version content"
|
||||||
tag_content = b"this is the tag content"
|
|
||||||
|
|
||||||
# Create a version 6.0.0
|
# Create a version 6.0.0
|
||||||
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
files1 = {"file": ("app-v.tar.gz", io.BytesIO(version_content), "application/octet-stream")}
|
||||||
@@ -276,14 +250,6 @@ class TestDownloadByVersion:
|
|||||||
data={"version": "6.0.0"},
|
data={"version": "6.0.0"},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a tag named "6.0.0" pointing to different content
|
|
||||||
files2 = {"file": ("app-t.tar.gz", io.BytesIO(tag_content), "application/octet-stream")}
|
|
||||||
integration_client.post(
|
|
||||||
f"/api/v1/project/{project}/{package}/upload",
|
|
||||||
files=files2,
|
|
||||||
data={"tag": "6.0.0"},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Download with version: prefix should get version content
|
# Download with version: prefix should get version content
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
f"/api/v1/project/{project}/{package}/+/version:6.0.0",
|
||||||
@@ -292,14 +258,6 @@ class TestDownloadByVersion:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == version_content
|
assert response.content == version_content
|
||||||
|
|
||||||
# Download with tag: prefix should get tag content
|
|
||||||
response2 = integration_client.get(
|
|
||||||
f"/api/v1/project/{project}/{package}/+/tag:6.0.0",
|
|
||||||
params={"mode": "proxy"},
|
|
||||||
)
|
|
||||||
assert response2.status_code == 200
|
|
||||||
assert response2.content == tag_content
|
|
||||||
|
|
||||||
|
|
||||||
class TestVersionDeletion:
|
class TestVersionDeletion:
|
||||||
"""Tests for deleting versions."""
|
"""Tests for deleting versions."""
|
||||||
|
|||||||
@@ -27,11 +27,9 @@ class TestVersionCreation:
|
|||||||
project_name,
|
project_name,
|
||||||
package_name,
|
package_name,
|
||||||
b"version create test",
|
b"version create test",
|
||||||
tag="latest",
|
|
||||||
version="1.0.0",
|
version="1.0.0",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result["tag"] == "latest"
|
|
||||||
assert result["version"] == "1.0.0"
|
assert result["version"] == "1.0.0"
|
||||||
assert result["version_source"] == "explicit"
|
assert result["version_source"] == "explicit"
|
||||||
assert result["artifact_id"]
|
assert result["artifact_id"]
|
||||||
@@ -149,7 +147,6 @@ class TestVersionCRUD:
|
|||||||
package_name,
|
package_name,
|
||||||
b"version with info",
|
b"version with info",
|
||||||
version="1.0.0",
|
version="1.0.0",
|
||||||
tag="release",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -166,8 +163,6 @@ class TestVersionCRUD:
|
|||||||
assert version_item is not None
|
assert version_item is not None
|
||||||
assert "size" in version_item
|
assert "size" in version_item
|
||||||
assert "artifact_id" in version_item
|
assert "artifact_id" in version_item
|
||||||
assert "tags" in version_item
|
|
||||||
assert "release" in version_item["tags"]
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_get_version(self, integration_client, test_package):
|
def test_get_version(self, integration_client, test_package):
|
||||||
@@ -272,94 +267,9 @@ class TestVersionDownload:
|
|||||||
follow_redirects=False,
|
follow_redirects=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Should resolve version first (before tag)
|
# Should resolve version
|
||||||
assert response.status_code in [200, 302, 307]
|
assert response.status_code in [200, 302, 307]
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_version_takes_precedence_over_tag(self, integration_client, test_package):
|
|
||||||
"""Test that version is checked before tag when resolving refs."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Upload with version "1.0"
|
|
||||||
version_result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"version content",
|
|
||||||
version="1.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a tag with the same name "1.0" pointing to different artifact
|
|
||||||
tag_result = upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"tag content different",
|
|
||||||
tag="1.0",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Download by "1.0" should resolve to version, not tag
|
|
||||||
# Since version:1.0 artifact was uploaded first
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/1.0",
|
|
||||||
follow_redirects=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 302, 307]
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagVersionEnrichment:
|
|
||||||
"""Tests for tag responses including version information."""
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_response_includes_version(self, integration_client, test_package):
|
|
||||||
"""Test that tag responses include version of the artifact."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
# Upload with both version and tag
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"enriched tag test",
|
|
||||||
version="7.0.0",
|
|
||||||
tag="stable",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get tag and check version field
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags/stable"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["name"] == "stable"
|
|
||||||
assert data["version"] == "7.0.0"
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_tag_list_includes_versions(self, integration_client, test_package):
|
|
||||||
"""Test that tag list responses include version for each tag."""
|
|
||||||
project_name, package_name = test_package
|
|
||||||
|
|
||||||
upload_test_file(
|
|
||||||
integration_client,
|
|
||||||
project_name,
|
|
||||||
package_name,
|
|
||||||
b"list version test",
|
|
||||||
version="8.0.0",
|
|
||||||
tag="latest",
|
|
||||||
)
|
|
||||||
|
|
||||||
response = integration_client.get(
|
|
||||||
f"/api/v1/project/{project_name}/{package_name}/tags"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
tag_item = next((t for t in data["items"] if t["name"] == "latest"), None)
|
|
||||||
assert tag_item is not None
|
|
||||||
assert tag_item.get("version") == "8.0.0"
|
|
||||||
|
|
||||||
|
|
||||||
class TestVersionPagination:
|
class TestVersionPagination:
|
||||||
"""Tests for version listing pagination and sorting."""
|
"""Tests for version listing pagination and sorting."""
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class TestDependencySchema:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -59,29 +59,17 @@ class TestDependencySchema:
|
|||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_dependency_requires_version_or_tag(self, integration_client):
|
def test_dependency_requires_version(self, integration_client):
|
||||||
"""Test that dependency must have either version or tag, not both or neither."""
|
"""Test that dependency requires version."""
|
||||||
from app.schemas import DependencyCreate
|
from app.schemas import DependencyCreate
|
||||||
|
|
||||||
# Test: neither version nor tag
|
# Test: missing version
|
||||||
with pytest.raises(ValidationError) as exc_info:
|
with pytest.raises(ValidationError):
|
||||||
DependencyCreate(project="proj", package="pkg")
|
DependencyCreate(project="proj", package="pkg")
|
||||||
assert "Either 'version' or 'tag' must be specified" in str(exc_info.value)
|
|
||||||
|
|
||||||
# Test: both version and tag
|
|
||||||
with pytest.raises(ValidationError) as exc_info:
|
|
||||||
DependencyCreate(project="proj", package="pkg", version="1.0.0", tag="stable")
|
|
||||||
assert "Cannot specify both 'version' and 'tag'" in str(exc_info.value)
|
|
||||||
|
|
||||||
# Test: valid with version
|
# Test: valid with version
|
||||||
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
dep = DependencyCreate(project="proj", package="pkg", version="1.0.0")
|
||||||
assert dep.version == "1.0.0"
|
assert dep.version == "1.0.0"
|
||||||
assert dep.tag is None
|
|
||||||
|
|
||||||
# Test: valid with tag
|
|
||||||
dep = DependencyCreate(project="proj", package="pkg", tag="stable")
|
|
||||||
assert dep.tag == "stable"
|
|
||||||
assert dep.version is None
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_dependency_unique_constraint(
|
def test_dependency_unique_constraint(
|
||||||
@@ -126,7 +114,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -162,7 +150,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "Invalid ensure file" in response.json().get("detail", "")
|
assert "Invalid ensure file" in response.json().get("detail", "")
|
||||||
@@ -188,7 +176,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "Project" in response.json().get("detail", "")
|
assert "Project" in response.json().get("detail", "")
|
||||||
@@ -208,7 +196,7 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-nodeps-{unique_test_id}"},
|
data={"version": f"v1.0.0-nodeps-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -226,13 +214,14 @@ class TestEnsureFileParsing:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Test with missing version field (version is now required)
|
||||||
ensure_content = yaml.dump({
|
ensure_content = yaml.dump({
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
{"project": dep_project_name, "package": "pkg", "version": "1.0.0", "tag": "stable"}
|
{"project": dep_project_name, "package": "pkg"} # Missing version
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
content = unique_content("test-both", unique_test_id, "constraint")
|
content = unique_content("test-missing-version", unique_test_id, "constraint")
|
||||||
files = {
|
files = {
|
||||||
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
"file": ("test.tar.gz", BytesIO(content), "application/gzip"),
|
||||||
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
"ensure": ("orchard.ensure", BytesIO(ensure_content.encode()), "application/x-yaml"),
|
||||||
@@ -240,11 +229,10 @@ class TestEnsureFileParsing:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v1.0.0-{unique_test_id}"},
|
data={"version": f"v1.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert "both" in response.json().get("detail", "").lower() or \
|
assert "version" in response.json().get("detail", "").lower()
|
||||||
"version" in response.json().get("detail", "").lower()
|
|
||||||
finally:
|
finally:
|
||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
|
|
||||||
@@ -271,7 +259,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
ensure_content = yaml.dump({
|
ensure_content = yaml.dump({
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
{"project": dep_project_name, "package": "lib-a", "version": "1.0.0"},
|
||||||
{"project": dep_project_name, "package": "lib-b", "tag": "stable"},
|
{"project": dep_project_name, "package": "lib-b", "version": "2.0.0"},
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -283,7 +271,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v2.0.0-{unique_test_id}"},
|
data={"version": f"v2.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
artifact_id = response.json()["artifact_id"]
|
artifact_id = response.json()["artifact_id"]
|
||||||
@@ -299,10 +287,8 @@ class TestDependencyQueryEndpoints:
|
|||||||
deps = {d["package"]: d for d in data["dependencies"]}
|
deps = {d["package"]: d for d in data["dependencies"]}
|
||||||
assert "lib-a" in deps
|
assert "lib-a" in deps
|
||||||
assert deps["lib-a"]["version"] == "1.0.0"
|
assert deps["lib-a"]["version"] == "1.0.0"
|
||||||
assert deps["lib-a"]["tag"] is None
|
|
||||||
assert "lib-b" in deps
|
assert "lib-b" in deps
|
||||||
assert deps["lib-b"]["tag"] == "stable"
|
assert deps["lib-b"]["version"] == "2.0.0"
|
||||||
assert deps["lib-b"]["version"] is None
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
integration_client.delete(f"/api/v1/projects/{dep_project_name}")
|
||||||
@@ -336,7 +322,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": tag_name},
|
data={"version": tag_name},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -381,7 +367,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
f"/api/v1/project/{dep_project_name}/target-lib/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -400,7 +386,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v4.0.0-{unique_test_id}"},
|
data={"version": f"v4.0.0-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -442,7 +428,7 @@ class TestDependencyQueryEndpoints:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"v5.0.0-nodeps-{unique_test_id}"},
|
data={"version": f"v5.0.0-nodeps-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
artifact_id = response.json()["artifact_id"]
|
artifact_id = response.json()["artifact_id"]
|
||||||
@@ -482,7 +468,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -500,7 +486,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -518,7 +504,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -566,7 +552,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
f"/api/v1/project/{test_project}/{pkg_d}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -584,7 +570,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -602,7 +588,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -621,7 +607,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -663,7 +649,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"solo-{unique_test_id}"},
|
data={"version": f"solo-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -698,7 +684,7 @@ class TestDependencyResolution:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/upload",
|
f"/api/v1/project/{project_name}/{package_name}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": f"missing-dep-{unique_test_id}"},
|
data={"version": f"missing-dep-{unique_test_id}"},
|
||||||
)
|
)
|
||||||
# Should fail at upload time since package doesn't exist
|
# Should fail at upload time since package doesn't exist
|
||||||
# OR succeed at upload but fail at resolution
|
# OR succeed at upload but fail at resolution
|
||||||
@@ -736,7 +722,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -754,7 +740,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -772,7 +758,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
# Should be rejected with 400 (circular dependency)
|
# Should be rejected with 400 (circular dependency)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
@@ -807,7 +793,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -825,7 +811,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -843,7 +829,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
f"/api/v1/project/{test_project}/{pkg_c}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -861,7 +847,7 @@ class TestCircularDependencyDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
data = response.json()
|
data = response.json()
|
||||||
@@ -910,7 +896,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -920,7 +906,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "2.0.0"},
|
data={"version": "2.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -938,7 +924,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
f"/api/v1/project/{test_project}/{pkg_lib_a}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -956,7 +942,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
f"/api/v1/project/{test_project}/{pkg_lib_b}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -975,7 +961,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1023,7 +1009,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
f"/api/v1/project/{test_project}/{pkg_common}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1042,7 +1028,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
f"/api/v1/project/{test_project}/{lib_pkg}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@@ -1061,7 +1047,7 @@ class TestConflictDetection:
|
|||||||
response = integration_client.post(
|
response = integration_client.post(
|
||||||
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
f"/api/v1/project/{test_project}/{pkg_app}/upload",
|
||||||
files=files,
|
files=files,
|
||||||
data={"tag": "1.0.0"},
|
data={"version": "1.0.0"},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def upload_test_file(integration_client):
|
|||||||
Factory fixture to upload a test file and return its artifact ID.
|
Factory fixture to upload a test file and return its artifact ID.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
artifact_id = upload_test_file(project, package, content, tag="v1.0")
|
artifact_id = upload_test_file(project, package, content, version="v1.0")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
|
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
|
||||||
@@ -66,7 +66,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
|
|
||||||
# Upload file
|
# Upload file
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="sha256-header-test"
|
project_name, package_name, content, version="sha256-header-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Download with proxy mode
|
# Download with proxy mode
|
||||||
@@ -88,7 +88,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
content = b"Content for ETag header test"
|
content = b"Content for ETag header test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="etag-test"
|
project_name, package_name, content, version="etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -110,7 +110,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
content = b"Content for Digest header test"
|
content = b"Content for Digest header test"
|
||||||
sha256 = hashlib.sha256(content).hexdigest()
|
sha256 = hashlib.sha256(content).hexdigest()
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="digest-test")
|
upload_test_file(project_name, package_name, content, version="digest-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
|
||||||
@@ -137,7 +137,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for X-Content-Length test"
|
content = b"Content for X-Content-Length test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="content-length-test")
|
upload_test_file(project_name, package_name, content, version="content-length-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
|
||||||
@@ -156,7 +156,7 @@ class TestDownloadChecksumHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for X-Verified false test"
|
content = b"Content for X-Verified false test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="verified-false-test")
|
upload_test_file(project_name, package_name, content, version="verified-false-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
|
||||||
@@ -184,7 +184,7 @@ class TestPreVerificationMode:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for pre-verification success test"
|
content = b"Content for pre-verification success test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="pre-verify-success")
|
upload_test_file(project_name, package_name, content, version="pre-verify-success")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
|
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
|
||||||
@@ -205,7 +205,7 @@ class TestPreVerificationMode:
|
|||||||
# Use binary content to verify no corruption
|
# Use binary content to verify no corruption
|
||||||
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
|
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="pre-verify-content")
|
upload_test_file(project_name, package_name, content, version="pre-verify-content")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
|
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
|
||||||
@@ -233,7 +233,7 @@ class TestStreamingVerificationMode:
|
|||||||
content = b"Content for streaming verification success test"
|
content = b"Content for streaming verification success test"
|
||||||
|
|
||||||
upload_test_file(
|
upload_test_file(
|
||||||
project_name, package_name, content, tag="stream-verify-success"
|
project_name, package_name, content, version="stream-verify-success"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
@@ -255,7 +255,7 @@ class TestStreamingVerificationMode:
|
|||||||
# 100KB of content
|
# 100KB of content
|
||||||
content = b"x" * (100 * 1024)
|
content = b"x" * (100 * 1024)
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="stream-verify-large")
|
upload_test_file(project_name, package_name, content, version="stream-verify-large")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
|
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
|
||||||
@@ -283,7 +283,7 @@ class TestHeadRequestHeaders:
|
|||||||
content = b"Content for HEAD SHA256 test"
|
content = b"Content for HEAD SHA256 test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="head-sha256-test"
|
project_name, package_name, content, version="head-sha256-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
@@ -303,7 +303,7 @@ class TestHeadRequestHeaders:
|
|||||||
content = b"Content for HEAD ETag test"
|
content = b"Content for HEAD ETag test"
|
||||||
|
|
||||||
artifact_id = upload_test_file(
|
artifact_id = upload_test_file(
|
||||||
project_name, package_name, content, tag="head-etag-test"
|
project_name, package_name, content, version="head-etag-test"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
@@ -322,7 +322,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD Digest test"
|
content = b"Content for HEAD Digest test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-digest-test")
|
upload_test_file(project_name, package_name, content, version="head-digest-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
|
||||||
@@ -340,7 +340,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD Content-Length test"
|
content = b"Content for HEAD Content-Length test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-length-test")
|
upload_test_file(project_name, package_name, content, version="head-length-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
|
||||||
@@ -356,7 +356,7 @@ class TestHeadRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for HEAD no-body test"
|
content = b"Content for HEAD no-body test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="head-no-body-test")
|
upload_test_file(project_name, package_name, content, version="head-no-body-test")
|
||||||
|
|
||||||
response = integration_client.head(
|
response = integration_client.head(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
|
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
|
||||||
@@ -382,7 +382,7 @@ class TestRangeRequestHeaders:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for range request checksum header test"
|
content = b"Content for range request checksum header test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="range-checksum-test")
|
upload_test_file(project_name, package_name, content, version="range-checksum-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
|
||||||
@@ -412,7 +412,7 @@ class TestClientSideVerification:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for client-side verification test"
|
content = b"Content for client-side verification test"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="client-verify-test")
|
upload_test_file(project_name, package_name, content, version="client-verify-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
|
||||||
@@ -438,7 +438,7 @@ class TestClientSideVerification:
|
|||||||
project_name, package_name = test_package
|
project_name, package_name = test_package
|
||||||
content = b"Content for Digest header verification"
|
content = b"Content for Digest header verification"
|
||||||
|
|
||||||
upload_test_file(project_name, package_name, content, tag="digest-verify-test")
|
upload_test_file(project_name, package_name, content, version="digest-verify-test")
|
||||||
|
|
||||||
response = integration_client.get(
|
response = integration_client.get(
|
||||||
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
|
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
|
||||||
|
|||||||
@@ -192,7 +192,6 @@ class TestCacheSettingsModel:
|
|||||||
|
|
||||||
settings = CacheSettings()
|
settings = CacheSettings()
|
||||||
assert hasattr(settings, 'id')
|
assert hasattr(settings, 'id')
|
||||||
assert hasattr(settings, 'allow_public_internet')
|
|
||||||
assert hasattr(settings, 'auto_create_system_projects')
|
assert hasattr(settings, 'auto_create_system_projects')
|
||||||
|
|
||||||
def test_model_with_values(self):
|
def test_model_with_values(self):
|
||||||
@@ -201,11 +200,9 @@ class TestCacheSettingsModel:
|
|||||||
|
|
||||||
settings = CacheSettings(
|
settings = CacheSettings(
|
||||||
id=1,
|
id=1,
|
||||||
allow_public_internet=False,
|
|
||||||
auto_create_system_projects=True,
|
auto_create_system_projects=True,
|
||||||
)
|
)
|
||||||
assert settings.id == 1
|
assert settings.id == 1
|
||||||
assert settings.allow_public_internet is False
|
|
||||||
assert settings.auto_create_system_projects is True
|
assert settings.auto_create_system_projects is True
|
||||||
|
|
||||||
|
|
||||||
@@ -365,16 +362,14 @@ class TestCacheSettingsSchemas:
|
|||||||
from app.schemas import CacheSettingsUpdate
|
from app.schemas import CacheSettingsUpdate
|
||||||
|
|
||||||
update = CacheSettingsUpdate()
|
update = CacheSettingsUpdate()
|
||||||
assert update.allow_public_internet is None
|
|
||||||
assert update.auto_create_system_projects is None
|
assert update.auto_create_system_projects is None
|
||||||
|
|
||||||
def test_update_schema_partial(self):
|
def test_update_schema_partial(self):
|
||||||
"""Test CacheSettingsUpdate with partial fields."""
|
"""Test CacheSettingsUpdate with partial fields."""
|
||||||
from app.schemas import CacheSettingsUpdate
|
from app.schemas import CacheSettingsUpdate
|
||||||
|
|
||||||
update = CacheSettingsUpdate(allow_public_internet=False)
|
update = CacheSettingsUpdate(auto_create_system_projects=True)
|
||||||
assert update.allow_public_internet is False
|
assert update.auto_create_system_projects is True
|
||||||
assert update.auto_create_system_projects is None
|
|
||||||
|
|
||||||
|
|
||||||
class TestCacheRequestSchemas:
|
class TestCacheRequestSchemas:
|
||||||
@@ -388,7 +383,7 @@ class TestCacheRequestSchemas:
|
|||||||
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
url="https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
source_type="npm",
|
source_type="npm",
|
||||||
package_name="lodash",
|
package_name="lodash",
|
||||||
tag="4.17.21",
|
version="4.17.21",
|
||||||
)
|
)
|
||||||
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
|
assert request.url == "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
|
||||||
assert request.source_type == "npm"
|
assert request.source_type == "npm"
|
||||||
|
|||||||
@@ -145,54 +145,6 @@ class TestPackageModel:
|
|||||||
assert platform_col.default.arg == "any"
|
assert platform_col.default.arg == "any"
|
||||||
|
|
||||||
|
|
||||||
class TestTagModel:
|
|
||||||
"""Tests for the Tag model."""
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_requires_package_id(self):
|
|
||||||
"""Test tag requires package_id."""
|
|
||||||
from app.models import Tag
|
|
||||||
|
|
||||||
tag = Tag(
|
|
||||||
name="v1.0.0",
|
|
||||||
package_id=uuid.uuid4(),
|
|
||||||
artifact_id="f" * 64,
|
|
||||||
created_by="test-user",
|
|
||||||
)
|
|
||||||
|
|
||||||
assert tag.package_id is not None
|
|
||||||
assert tag.artifact_id == "f" * 64
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagHistoryModel:
|
|
||||||
"""Tests for the TagHistory model."""
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_history_default_change_type(self):
|
|
||||||
"""Test tag history change_type column has default value of 'update'."""
|
|
||||||
from app.models import TagHistory
|
|
||||||
|
|
||||||
# Check the column definition has the right default
|
|
||||||
change_type_col = TagHistory.__table__.columns["change_type"]
|
|
||||||
assert change_type_col.default is not None
|
|
||||||
assert change_type_col.default.arg == "update"
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
def test_tag_history_allows_null_old_artifact(self):
|
|
||||||
"""Test tag history allows null old_artifact_id (for create events)."""
|
|
||||||
from app.models import TagHistory
|
|
||||||
|
|
||||||
history = TagHistory(
|
|
||||||
tag_id=uuid.uuid4(),
|
|
||||||
old_artifact_id=None,
|
|
||||||
new_artifact_id="h" * 64,
|
|
||||||
change_type="create",
|
|
||||||
changed_by="test-user",
|
|
||||||
)
|
|
||||||
|
|
||||||
assert history.old_artifact_id is None
|
|
||||||
|
|
||||||
|
|
||||||
class TestUploadModel:
|
class TestUploadModel:
|
||||||
"""Tests for the Upload model."""
|
"""Tests for the Upload model."""
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user