Add comprehensive upload/download API tests and error handling tests
- Add upload API tests: upload without tag, artifact creation, S3 storage - Add download tests: tag: prefix, Content-Type/Length/Disposition headers - Add download tests: 404 for nonexistent project/package/artifact - Add checksum header tests: ETag, X-Checksum-SHA256 - Add error handling tests: timeout behavior, checksum validation - Add resource cleanup tests: verify no orphans on failed uploads - Add graceful error response tests: JSON format, detail messages
This commit is contained in:
@@ -14,6 +14,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Added `@pytest.mark.large` and `@pytest.mark.concurrent` test markers
|
- Added `@pytest.mark.large` and `@pytest.mark.concurrent` test markers
|
||||||
- Added `generate_content()` and `generate_content_with_hash()` test helpers
|
- Added `generate_content()` and `generate_content_with_hash()` test helpers
|
||||||
- Added `sized_content` fixture for generating test content of specific sizes
|
- Added `sized_content` fixture for generating test content of specific sizes
|
||||||
|
- Added upload API tests: upload without tag, artifact creation verification, S3 object creation
|
||||||
|
- Added download API tests: tag: prefix resolution, 404 for nonexistent project/package/artifact
|
||||||
|
- Added download header tests: Content-Type, Content-Length, Content-Disposition, ETag, X-Checksum-SHA256
|
||||||
|
- Added error handling tests: timeout behavior, checksum validation, resource cleanup, graceful error responses
|
||||||
- Added production deployment job triggered by semantic version tags (v1.0.0) with manual approval gate (#63)
|
- Added production deployment job triggered by semantic version tags (v1.0.0) with manual approval gate (#63)
|
||||||
- Added production Helm values file with persistence enabled (20Gi PostgreSQL, 100Gi MinIO) (#63)
|
- Added production Helm values file with persistence enabled (20Gi PostgreSQL, 100Gi MinIO) (#63)
|
||||||
- Added integration tests for production deployment (#63)
|
- Added integration tests for production deployment (#63)
|
||||||
|
|||||||
322
backend/tests/integration/test_error_handling.py
Normal file
322
backend/tests/integration/test_error_handling.py
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for error handling in upload and download operations.
|
||||||
|
|
||||||
|
Tests cover:
|
||||||
|
- Timeout handling
|
||||||
|
- Invalid request handling
|
||||||
|
- Resource cleanup on failures
|
||||||
|
- Graceful error responses
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import io
|
||||||
|
import time
|
||||||
|
from tests.factories import (
|
||||||
|
compute_sha256,
|
||||||
|
upload_test_file,
|
||||||
|
generate_content_with_hash,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUploadErrorHandling:
|
||||||
|
"""Tests for upload error handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_project_returns_404(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent project returns 404."""
|
||||||
|
content = b"test content for nonexistent project"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_to_nonexistent_package_returns_404(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test upload to nonexistent package returns 404."""
|
||||||
|
content = b"test content for nonexistent package"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_empty_file_rejected(self, integration_client, test_package):
|
||||||
|
"""Test empty file upload is rejected."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
files = {"file": ("empty.bin", io.BytesIO(b""), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code in [400, 422]
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_missing_file_returns_422(self, integration_client, test_package):
|
||||||
|
"""Test upload without file field returns 422."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
data={"tag": "no-file-provided"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_invalid_checksum_format_returns_400(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with invalid checksum format returns 400."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum format test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": "invalid-hash-format"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_checksum_mismatch_returns_422(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with mismatched checksum returns 422."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum mismatch test"
|
||||||
|
wrong_hash = "0" * 64 # Valid format but wrong hash
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_correct_checksum_succeeds(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test upload with correct checksum succeeds."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"correct checksum test"
|
||||||
|
correct_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": correct_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["artifact_id"] == correct_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadErrorHandling:
|
||||||
|
"""Tests for download error handling."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_tag_returns_404(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download of nonexistent tag returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/nonexistent-tag-xyz"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_artifact_returns_404(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download of nonexistent artifact ID returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
fake_hash = "a" * 64
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_invalid_artifact_id_format(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test download with invalid artifact ID format."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Too short
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:abc123"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_project_returns_404(
|
||||||
|
self, integration_client, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test download from nonexistent project returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_package_returns_404(
|
||||||
|
self, integration_client, test_project, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test download from nonexistent package returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-{unique_test_id}/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestTimeoutBehavior:
|
||||||
|
"""Tests for timeout behavior (integration level)."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_large_upload_completes_within_reasonable_time(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test that a 10MB upload completes within reasonable time."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(10 * 1024 * 1024, seed=999) # 10MB
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="timeout-test"
|
||||||
|
)
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
# Should complete within 60 seconds for 10MB on local docker
|
||||||
|
assert elapsed < 60, f"Upload took too long: {elapsed:.2f}s"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
@pytest.mark.slow
|
||||||
|
def test_large_download_completes_within_reasonable_time(
|
||||||
|
self, integration_client, test_package, sized_content
|
||||||
|
):
|
||||||
|
"""Test that a 10MB download completes within reasonable time."""
|
||||||
|
project, package = test_package
|
||||||
|
content, expected_hash = sized_content(10 * 1024 * 1024, seed=998) # 10MB
|
||||||
|
|
||||||
|
# First upload
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="download-timeout-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Then download and time it
|
||||||
|
start_time = time.time()
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/download-timeout-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert len(response.content) == len(content)
|
||||||
|
# Should complete within 60 seconds for 10MB on local docker
|
||||||
|
assert elapsed < 60, f"Download took too long: {elapsed:.2f}s"
|
||||||
|
|
||||||
|
|
||||||
|
class TestResourceCleanup:
|
||||||
|
"""Tests for proper resource cleanup on failures.
|
||||||
|
|
||||||
|
Note: More comprehensive cleanup tests are in test_upload_download_api.py
|
||||||
|
(TestUploadFailureCleanup class) including S3 object cleanup verification.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_checksum_mismatch_no_orphaned_artifact(
|
||||||
|
self, integration_client, test_package, unique_test_id
|
||||||
|
):
|
||||||
|
"""Test checksum mismatch doesn't leave orphaned artifact."""
|
||||||
|
project, package = test_package
|
||||||
|
# Use unique content to ensure artifact doesn't exist from prior tests
|
||||||
|
content = f"checksum mismatch orphan test {unique_test_id}".encode()
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
actual_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
# Verify artifact doesn't exist before test
|
||||||
|
pre_check = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||||
|
assert pre_check.status_code == 404, "Artifact should not exist before test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
|
||||||
|
# Verify no artifact was created with either hash
|
||||||
|
response1 = integration_client.get(f"/api/v1/artifact/{wrong_hash}")
|
||||||
|
response2 = integration_client.get(f"/api/v1/artifact/{actual_hash}")
|
||||||
|
assert response1.status_code == 404
|
||||||
|
assert response2.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
class TestGracefulErrorResponses:
|
||||||
|
"""Tests for graceful and informative error responses."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_404_response_has_detail_message(
|
||||||
|
self, integration_client, test_package
|
||||||
|
):
|
||||||
|
"""Test 404 responses include a detail message."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
data = response.json()
|
||||||
|
assert "detail" in data
|
||||||
|
assert len(data["detail"]) > 0
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_422_response_has_detail_message(self, integration_client, test_package):
|
||||||
|
"""Test 422 responses include a detail message."""
|
||||||
|
project, package = test_package
|
||||||
|
|
||||||
|
# Upload with mismatched checksum
|
||||||
|
content = b"detail message test"
|
||||||
|
wrong_hash = "0" * 64
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
headers={"X-Checksum-SHA256": wrong_hash},
|
||||||
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
|
data = response.json()
|
||||||
|
assert "detail" in data
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_error_response_is_json(self, integration_client, unique_test_id):
|
||||||
|
"""Test error responses are valid JSON."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-{unique_test_id}/pkg/+/tag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
# Should not raise exception - valid JSON
|
||||||
|
data = response.json()
|
||||||
|
assert isinstance(data, dict)
|
||||||
@@ -25,6 +25,19 @@ from tests.factories import (
|
|||||||
class TestUploadBasics:
|
class TestUploadBasics:
|
||||||
"""Tests for basic upload functionality."""
|
"""Tests for basic upload functionality."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_returns_200(self, integration_client, test_package):
|
||||||
|
"""Test upload with valid file returns 200."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"valid file upload test"
|
||||||
|
|
||||||
|
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_upload_returns_artifact_id(self, integration_client, test_package):
|
def test_upload_returns_artifact_id(self, integration_client, test_package):
|
||||||
"""Test upload returns the artifact ID (SHA256 hash)."""
|
"""Test upload returns the artifact ID (SHA256 hash)."""
|
||||||
@@ -101,6 +114,82 @@ class TestUploadBasics:
|
|||||||
assert "created_at" in result
|
assert "created_at" in result
|
||||||
assert result["created_at"] is not None
|
assert result["created_at"] is not None
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_without_tag_succeeds(self, integration_client, test_package):
|
||||||
|
"""Test upload without tag succeeds (no tag created)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"upload without tag test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
files = {"file": ("no_tag.bin", io.BytesIO(content), "application/octet-stream")}
|
||||||
|
response = integration_client.post(
|
||||||
|
f"/api/v1/project/{project}/{package}/upload",
|
||||||
|
files=files,
|
||||||
|
# No tag parameter
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
result = response.json()
|
||||||
|
assert result["artifact_id"] == expected_hash
|
||||||
|
|
||||||
|
# Verify no tag was created - list tags and check
|
||||||
|
tags_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/tags"
|
||||||
|
)
|
||||||
|
assert tags_response.status_code == 200
|
||||||
|
tags = tags_response.json()
|
||||||
|
# Filter for tags pointing to this artifact
|
||||||
|
artifact_tags = [t for t in tags.get("items", tags) if t.get("artifact_id") == expected_hash]
|
||||||
|
assert len(artifact_tags) == 0, "Tag should not be created when not specified"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_creates_artifact_in_database(self, integration_client, test_package):
|
||||||
|
"""Test upload creates artifact record in database."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"database artifact test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Verify artifact exists via API
|
||||||
|
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
|
||||||
|
assert response.status_code == 200
|
||||||
|
artifact = response.json()
|
||||||
|
assert artifact["id"] == expected_hash
|
||||||
|
assert artifact["size"] == len(content)
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_creates_object_in_s3(self, integration_client, test_package):
|
||||||
|
"""Test upload creates object in S3 storage."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"s3 object creation test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(integration_client, project, package, content)
|
||||||
|
|
||||||
|
# Verify S3 object exists
|
||||||
|
assert s3_object_exists(expected_hash), "S3 object should exist after upload"
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_upload_with_tag_creates_tag_record(self, integration_client, test_package):
|
||||||
|
"""Test upload with tag creates tag record."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"tag creation test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
tag_name = "my-tag-v1"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag=tag_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify tag exists
|
||||||
|
tags_response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/tags"
|
||||||
|
)
|
||||||
|
assert tags_response.status_code == 200
|
||||||
|
tags = tags_response.json()
|
||||||
|
tag_names = [t["name"] for t in tags.get("items", tags)]
|
||||||
|
assert tag_name in tag_names
|
||||||
|
|
||||||
|
|
||||||
class TestDuplicateUploads:
|
class TestDuplicateUploads:
|
||||||
"""Tests for duplicate upload deduplication behavior."""
|
"""Tests for duplicate upload deduplication behavior."""
|
||||||
@@ -248,6 +337,23 @@ class TestDownload:
|
|||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_by_tag_prefix(self, integration_client, test_package):
|
||||||
|
"""Test downloading artifact using tag: prefix."""
|
||||||
|
project, package = test_package
|
||||||
|
original_content = b"download by tag prefix test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, original_content, tag="prefix-tag"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/tag:prefix-tag",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.content == original_content
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_download_nonexistent_tag(self, integration_client, test_package):
|
def test_download_nonexistent_tag(self, integration_client, test_package):
|
||||||
"""Test downloading nonexistent tag returns 404."""
|
"""Test downloading nonexistent tag returns 404."""
|
||||||
@@ -258,6 +364,33 @@ class TestDownload:
|
|||||||
)
|
)
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_nonexistent_artifact(self, integration_client, test_package):
|
||||||
|
"""Test downloading nonexistent artifact ID returns 404."""
|
||||||
|
project, package = test_package
|
||||||
|
fake_hash = "0" * 64
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/artifact:{fake_hash}"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_project(self, integration_client, unique_test_id):
|
||||||
|
"""Test downloading from nonexistent project returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/nonexistent-project-{unique_test_id}/somepackage/+/sometag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_from_nonexistent_package(self, integration_client, test_project, unique_test_id):
|
||||||
|
"""Test downloading from nonexistent package returns 404."""
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/+/sometag"
|
||||||
|
)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
@pytest.mark.integration
|
@pytest.mark.integration
|
||||||
def test_content_matches_original(self, integration_client, test_package):
|
def test_content_matches_original(self, integration_client, test_package):
|
||||||
"""Test downloaded content matches original exactly."""
|
"""Test downloaded content matches original exactly."""
|
||||||
@@ -275,6 +408,111 @@ class TestDownload:
|
|||||||
assert response.content == original_content
|
assert response.content == original_content
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadHeaders:
|
||||||
|
"""Tests for download response headers."""
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_type_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Type header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content type header test"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename="test.txt", tag="content-type-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/content-type-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Content-Type should be set (either text/plain or application/octet-stream)
|
||||||
|
assert "content-type" in response.headers
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_length_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Length header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content length header test - exactly 41 bytes!"
|
||||||
|
expected_length = len(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="content-length-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/content-length-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "content-length" in response.headers
|
||||||
|
assert int(response.headers["content-length"]) == expected_length
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_content_disposition_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns correct Content-Disposition header."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"content disposition test"
|
||||||
|
filename = "my-test-file.bin"
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content,
|
||||||
|
filename=filename, tag="disposition-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/disposition-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "content-disposition" in response.headers
|
||||||
|
disposition = response.headers["content-disposition"]
|
||||||
|
assert "attachment" in disposition
|
||||||
|
assert filename in disposition
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_checksum_headers(self, integration_client, test_package):
|
||||||
|
"""Test download returns checksum headers."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"checksum header test content"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="checksum-headers"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/checksum-headers",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Check for checksum headers
|
||||||
|
assert "x-checksum-sha256" in response.headers
|
||||||
|
assert response.headers["x-checksum-sha256"] == expected_hash
|
||||||
|
|
||||||
|
@pytest.mark.integration
|
||||||
|
def test_download_etag_header(self, integration_client, test_package):
|
||||||
|
"""Test download returns ETag header (artifact ID)."""
|
||||||
|
project, package = test_package
|
||||||
|
content = b"etag header test"
|
||||||
|
expected_hash = compute_sha256(content)
|
||||||
|
|
||||||
|
upload_test_file(
|
||||||
|
integration_client, project, package, content, tag="etag-test"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = integration_client.get(
|
||||||
|
f"/api/v1/project/{project}/{package}/+/etag-test",
|
||||||
|
params={"mode": "proxy"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "etag" in response.headers
|
||||||
|
# ETag should contain the artifact ID (hash)
|
||||||
|
etag = response.headers["etag"].strip('"')
|
||||||
|
assert etag == expected_hash
|
||||||
|
|
||||||
|
|
||||||
class TestConcurrentUploads:
|
class TestConcurrentUploads:
|
||||||
"""Tests for concurrent upload handling."""
|
"""Tests for concurrent upload handling."""
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user