Add storage abstraction, stats endpoints, garbage collection, and test infrastructure
- Add StorageBackend protocol for backend-agnostic storage interface - Add health check with storage and database connectivity verification - Add garbage collection endpoints for orphaned artifacts (ref_count=0) - Add deduplication statistics endpoints (/api/v1/stats, /stats/storage, /stats/deduplication) - Add per-project statistics endpoint - Add verify_integrity method for post-upload hash validation - Set up pytest infrastructure with mock S3 client - Add unit tests for hash calculation and duplicate detection
This commit is contained in:
201
backend/tests/conftest.py
Normal file
201
backend/tests/conftest.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Test configuration and fixtures for Orchard backend tests.
|
||||
|
||||
This module provides:
|
||||
- Database fixtures with test isolation
|
||||
- Mock S3 storage using moto
|
||||
- Test data factories for common scenarios
|
||||
"""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import hashlib
|
||||
from typing import Generator, BinaryIO
|
||||
from unittest.mock import MagicMock, patch
|
||||
import io
|
||||
|
||||
# Set test environment before importing app modules
|
||||
os.environ["ORCHARD_DATABASE_HOST"] = "localhost"
|
||||
os.environ["ORCHARD_DATABASE_PORT"] = "5432"
|
||||
os.environ["ORCHARD_DATABASE_USER"] = "test"
|
||||
os.environ["ORCHARD_DATABASE_PASSWORD"] = "test"
|
||||
os.environ["ORCHARD_DATABASE_DBNAME"] = "orchard_test"
|
||||
os.environ["ORCHARD_S3_ENDPOINT"] = "http://localhost:9000"
|
||||
os.environ["ORCHARD_S3_BUCKET"] = "test-bucket"
|
||||
os.environ["ORCHARD_S3_ACCESS_KEY_ID"] = "test"
|
||||
os.environ["ORCHARD_S3_SECRET_ACCESS_KEY"] = "test"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Test Data Factories
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def create_test_file(content: bytes = None, size: int = 1024) -> io.BytesIO:
|
||||
"""
|
||||
Create a test file with known content.
|
||||
|
||||
Args:
|
||||
content: Specific content to use, or None to generate random-ish content
|
||||
size: Size of generated content if content is None
|
||||
|
||||
Returns:
|
||||
BytesIO object with the content
|
||||
"""
|
||||
if content is None:
|
||||
content = os.urandom(size)
|
||||
return io.BytesIO(content)
|
||||
|
||||
|
||||
def compute_sha256(content: bytes) -> str:
|
||||
"""Compute SHA256 hash of content as lowercase hex string."""
|
||||
return hashlib.sha256(content).hexdigest()
|
||||
|
||||
|
||||
def compute_md5(content: bytes) -> str:
|
||||
"""Compute MD5 hash of content as lowercase hex string."""
|
||||
return hashlib.md5(content).hexdigest()
|
||||
|
||||
|
||||
def compute_sha1(content: bytes) -> str:
|
||||
"""Compute SHA1 hash of content as lowercase hex string."""
|
||||
return hashlib.sha1(content).hexdigest()
|
||||
|
||||
|
||||
# Known test data with pre-computed hashes
|
||||
TEST_CONTENT_HELLO = b"Hello, World!"
|
||||
TEST_HASH_HELLO = "dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f"
|
||||
TEST_MD5_HELLO = "65a8e27d8879283831b664bd8b7f0ad4"
|
||||
TEST_SHA1_HELLO = "0a0a9f2a6772942557ab5355d76af442f8f65e01"
|
||||
|
||||
TEST_CONTENT_EMPTY = b""
|
||||
# Note: Empty content should be rejected by the storage layer
|
||||
|
||||
TEST_CONTENT_BINARY = bytes(range(256))
|
||||
TEST_HASH_BINARY = compute_sha256(TEST_CONTENT_BINARY)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Mock Storage Fixtures
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class MockS3Client:
|
||||
"""Mock S3 client for unit testing without actual S3/MinIO."""
|
||||
|
||||
def __init__(self):
|
||||
self.objects = {} # key -> content
|
||||
self.bucket = "test-bucket"
|
||||
|
||||
def put_object(self, Bucket: str, Key: str, Body: bytes) -> dict:
|
||||
self.objects[Key] = Body
|
||||
return {"ETag": f'"{compute_md5(Body)}"'}
|
||||
|
||||
def get_object(self, Bucket: str, Key: str, **kwargs) -> dict:
|
||||
if Key not in self.objects:
|
||||
raise Exception("NoSuchKey")
|
||||
content = self.objects[Key]
|
||||
return {
|
||||
"Body": io.BytesIO(content),
|
||||
"ContentLength": len(content),
|
||||
}
|
||||
|
||||
def head_object(self, Bucket: str, Key: str) -> dict:
|
||||
if Key not in self.objects:
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
error_response = {"Error": {"Code": "404", "Message": "Not Found"}}
|
||||
raise ClientError(error_response, "HeadObject")
|
||||
content = self.objects[Key]
|
||||
return {
|
||||
"ContentLength": len(content),
|
||||
"ETag": f'"{compute_md5(content)}"',
|
||||
}
|
||||
|
||||
def delete_object(self, Bucket: str, Key: str) -> dict:
|
||||
if Key in self.objects:
|
||||
del self.objects[Key]
|
||||
return {}
|
||||
|
||||
def head_bucket(self, Bucket: str) -> dict:
|
||||
return {}
|
||||
|
||||
def create_multipart_upload(self, Bucket: str, Key: str) -> dict:
|
||||
return {"UploadId": "test-upload-id"}
|
||||
|
||||
def upload_part(
|
||||
self, Bucket: str, Key: str, UploadId: str, PartNumber: int, Body: bytes
|
||||
) -> dict:
|
||||
return {"ETag": f'"{compute_md5(Body)}"'}
|
||||
|
||||
def complete_multipart_upload(
|
||||
self, Bucket: str, Key: str, UploadId: str, MultipartUpload: dict
|
||||
) -> dict:
|
||||
return {"ETag": '"test-etag"'}
|
||||
|
||||
def abort_multipart_upload(self, Bucket: str, Key: str, UploadId: str) -> dict:
|
||||
return {}
|
||||
|
||||
def generate_presigned_url(
|
||||
self, ClientMethod: str, Params: dict, ExpiresIn: int
|
||||
) -> str:
|
||||
return f"https://test-bucket.s3.amazonaws.com/{Params['Key']}?presigned=true"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_s3_client() -> MockS3Client:
|
||||
"""Provide a mock S3 client for unit tests."""
|
||||
return MockS3Client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_storage(mock_s3_client):
|
||||
"""
|
||||
Provide a mock storage instance for unit tests.
|
||||
|
||||
Uses the MockS3Client to avoid actual S3/MinIO calls.
|
||||
"""
|
||||
from app.storage import S3Storage
|
||||
|
||||
storage = S3Storage.__new__(S3Storage)
|
||||
storage.client = mock_s3_client
|
||||
storage.bucket = "test-bucket"
|
||||
storage._active_uploads = {}
|
||||
|
||||
return storage
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Database Fixtures (for integration tests)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_db_url():
|
||||
"""Get the test database URL."""
|
||||
return (
|
||||
f"postgresql://{os.environ['ORCHARD_DATABASE_USER']}:"
|
||||
f"{os.environ['ORCHARD_DATABASE_PASSWORD']}@"
|
||||
f"{os.environ['ORCHARD_DATABASE_HOST']}:"
|
||||
f"{os.environ['ORCHARD_DATABASE_PORT']}/"
|
||||
f"{os.environ['ORCHARD_DATABASE_DBNAME']}"
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# HTTP Client Fixtures (for API tests)
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_app():
|
||||
"""
|
||||
Create a test FastAPI application.
|
||||
|
||||
Note: This requires the database to be available for integration tests.
|
||||
For unit tests, use mock_storage fixture instead.
|
||||
"""
|
||||
from fastapi.testclient import TestClient
|
||||
from app.main import app
|
||||
|
||||
return TestClient(app)
|
||||
Reference in New Issue
Block a user