Add storage abstraction, stats endpoints, garbage collection, and test infrastructure

- Add StorageBackend protocol for backend-agnostic storage interface
- Add health check with storage and database connectivity verification
- Add garbage collection endpoints for orphaned artifacts (ref_count=0)
- Add deduplication statistics endpoints (/api/v1/stats, /stats/storage, /stats/deduplication)
- Add per-project statistics endpoint
- Add verify_integrity method for post-upload hash validation
- Set up pytest infrastructure with mock S3 client
- Add unit tests for hash calculation and duplicate detection
This commit is contained in:
Mondo Diaz
2026-01-05 11:16:46 -06:00
parent dbe78ded2f
commit 109677e43a
9 changed files with 1311 additions and 5 deletions

View File

@@ -73,6 +73,11 @@ from .schemas import (
SearchResultPackage,
SearchResultArtifact,
PresignedUrlResponse,
GarbageCollectionResponse,
OrphanedArtifactResponse,
StorageStatsResponse,
DeduplicationStatsResponse,
ProjectStatsResponse,
)
from .metadata import extract_metadata
from .config import get_settings
@@ -246,8 +251,39 @@ def _log_audit(
# Health check
@router.get("/health", response_model=HealthResponse)
def health_check():
return HealthResponse(status="ok")
def health_check(
db: Session = Depends(get_db),
storage: S3Storage = Depends(get_storage),
):
"""
Health check endpoint with optional storage and database health verification.
"""
storage_healthy = None
database_healthy = None
# Check database connectivity
try:
db.execute("SELECT 1")
database_healthy = True
except Exception as e:
logger.warning(f"Database health check failed: {e}")
database_healthy = False
# Check storage connectivity by listing bucket (lightweight operation)
try:
storage.client.head_bucket(Bucket=storage.bucket)
storage_healthy = True
except Exception as e:
logger.warning(f"Storage health check failed: {e}")
storage_healthy = False
overall_status = "ok" if (storage_healthy and database_healthy) else "degraded"
return HealthResponse(
status=overall_status,
storage_healthy=storage_healthy,
database_healthy=database_healthy,
)
# Global search
@@ -2104,3 +2140,365 @@ def get_artifact(artifact_id: str, db: Session = Depends(get_db)):
format_metadata=artifact.format_metadata,
tags=tag_infos,
)
# =============================================================================
# Garbage Collection Endpoints (ISSUE 36)
# =============================================================================
@router.get(
"/api/v1/admin/orphaned-artifacts",
response_model=List[OrphanedArtifactResponse],
)
def list_orphaned_artifacts(
request: Request,
limit: int = Query(
default=100, ge=1, le=1000, description="Max artifacts to return"
),
db: Session = Depends(get_db),
):
"""
List artifacts with ref_count=0 (orphaned artifacts not referenced by any tag).
These artifacts can be safely cleaned up as they are not referenced by any tag.
"""
orphaned = (
db.query(Artifact)
.filter(Artifact.ref_count == 0)
.order_by(Artifact.created_at.asc())
.limit(limit)
.all()
)
return [
OrphanedArtifactResponse(
id=a.id,
size=a.size,
created_at=a.created_at,
created_by=a.created_by,
original_name=a.original_name,
)
for a in orphaned
]
@router.post(
"/api/v1/admin/garbage-collect",
response_model=GarbageCollectionResponse,
)
def garbage_collect(
request: Request,
dry_run: bool = Query(
default=True, description="If true, only report what would be deleted"
),
limit: int = Query(
default=100, ge=1, le=1000, description="Max artifacts to delete per run"
),
db: Session = Depends(get_db),
storage: S3Storage = Depends(get_storage),
):
"""
Clean up orphaned artifacts (ref_count=0) from storage and database.
By default runs in dry-run mode (only reports what would be deleted).
Set dry_run=false to actually delete artifacts.
Returns list of deleted artifact IDs and total bytes freed.
"""
user_id = get_user_id(request)
# Find orphaned artifacts
orphaned = (
db.query(Artifact)
.filter(Artifact.ref_count == 0)
.order_by(Artifact.created_at.asc())
.limit(limit)
.all()
)
deleted_ids = []
bytes_freed = 0
for artifact in orphaned:
if not dry_run:
# Delete from S3
try:
storage.delete(artifact.s3_key)
except Exception as e:
logger.error(f"Failed to delete S3 object {artifact.s3_key}: {e}")
continue
# Delete from database
db.delete(artifact)
logger.info(
f"Garbage collected artifact {artifact.id[:12]}... ({artifact.size} bytes)"
)
deleted_ids.append(artifact.id)
bytes_freed += artifact.size
if not dry_run:
# Audit log
_log_audit(
db,
action="garbage_collect",
resource="artifacts",
user_id=user_id,
source_ip=request.client.host if request.client else None,
details={
"artifacts_deleted": len(deleted_ids),
"bytes_freed": bytes_freed,
"artifact_ids": deleted_ids[:10], # Log first 10 for brevity
},
)
db.commit()
return GarbageCollectionResponse(
artifacts_deleted=len(deleted_ids),
bytes_freed=bytes_freed,
artifact_ids=deleted_ids,
dry_run=dry_run,
)
# =============================================================================
# Statistics Endpoints (ISSUE 34)
# =============================================================================
@router.get("/api/v1/stats", response_model=StorageStatsResponse)
def get_storage_stats(db: Session = Depends(get_db)):
"""
Get global storage statistics including deduplication metrics.
"""
# Total artifacts and size
total_stats = db.query(
func.count(Artifact.id),
func.coalesce(func.sum(Artifact.size), 0),
).first()
total_artifacts = total_stats[0] or 0
total_size_bytes = total_stats[1] or 0
# Unique artifacts (ref_count > 0) and their size
unique_stats = (
db.query(
func.count(Artifact.id),
)
.filter(Artifact.ref_count > 0)
.first()
)
unique_artifacts = unique_stats[0] or 0
# Orphaned artifacts (ref_count = 0)
orphaned_stats = (
db.query(
func.count(Artifact.id),
func.coalesce(func.sum(Artifact.size), 0),
)
.filter(Artifact.ref_count == 0)
.first()
)
orphaned_artifacts = orphaned_stats[0] or 0
orphaned_size_bytes = orphaned_stats[1] or 0
# Total uploads and deduplicated uploads
upload_stats = db.query(
func.count(Upload.id),
func.count(Upload.id).filter(Upload.deduplicated == True),
).first()
total_uploads = upload_stats[0] or 0
deduplicated_uploads = upload_stats[1] or 0
# Calculate deduplication ratio
deduplication_ratio = (
total_uploads / unique_artifacts if unique_artifacts > 0 else 0.0
)
# Calculate storage saved (sum of size * (ref_count - 1) for artifacts with ref_count > 1)
# This represents bytes that would have been stored without deduplication
saved_query = (
db.query(func.coalesce(func.sum(Artifact.size * (Artifact.ref_count - 1)), 0))
.filter(Artifact.ref_count > 1)
.first()
)
storage_saved_bytes = saved_query[0] or 0
return StorageStatsResponse(
total_artifacts=total_artifacts,
total_size_bytes=total_size_bytes,
unique_artifacts=unique_artifacts,
orphaned_artifacts=orphaned_artifacts,
orphaned_size_bytes=orphaned_size_bytes,
total_uploads=total_uploads,
deduplicated_uploads=deduplicated_uploads,
deduplication_ratio=deduplication_ratio,
storage_saved_bytes=storage_saved_bytes,
)
@router.get("/api/v1/stats/storage", response_model=StorageStatsResponse)
def get_storage_stats_alias(db: Session = Depends(get_db)):
"""Alias for /api/v1/stats - get global storage statistics."""
return get_storage_stats(db)
@router.get("/api/v1/stats/deduplication", response_model=DeduplicationStatsResponse)
def get_deduplication_stats(
top_n: int = Query(
default=10,
ge=1,
le=100,
description="Number of top referenced artifacts to return",
),
db: Session = Depends(get_db),
):
"""
Get detailed deduplication effectiveness statistics.
"""
# Total logical bytes (sum of all upload sizes - what would be stored without dedup)
# We calculate this as: sum(artifact.size * artifact.ref_count) for all artifacts
logical_query = db.query(
func.coalesce(func.sum(Artifact.size * Artifact.ref_count), 0)
).first()
total_logical_bytes = logical_query[0] or 0
# Total physical bytes (actual storage used)
physical_query = (
db.query(func.coalesce(func.sum(Artifact.size), 0))
.filter(Artifact.ref_count > 0)
.first()
)
total_physical_bytes = physical_query[0] or 0
# Bytes saved
bytes_saved = total_logical_bytes - total_physical_bytes
# Savings percentage
savings_percentage = (
(bytes_saved / total_logical_bytes * 100) if total_logical_bytes > 0 else 0.0
)
# Upload counts
total_uploads = db.query(func.count(Upload.id)).scalar() or 0
unique_artifacts = (
db.query(func.count(Artifact.id)).filter(Artifact.ref_count > 0).scalar() or 0
)
duplicate_uploads = (
total_uploads - unique_artifacts if total_uploads > unique_artifacts else 0
)
# Average and max ref_count
ref_stats = (
db.query(
func.coalesce(func.avg(Artifact.ref_count), 0),
func.coalesce(func.max(Artifact.ref_count), 0),
)
.filter(Artifact.ref_count > 0)
.first()
)
average_ref_count = float(ref_stats[0] or 0)
max_ref_count = ref_stats[1] or 0
# Top N most referenced artifacts
top_artifacts = (
db.query(Artifact)
.filter(Artifact.ref_count > 1)
.order_by(Artifact.ref_count.desc())
.limit(top_n)
.all()
)
most_referenced = [
{
"id": a.id,
"ref_count": a.ref_count,
"size": a.size,
"storage_saved": a.size * (a.ref_count - 1),
"original_name": a.original_name,
}
for a in top_artifacts
]
return DeduplicationStatsResponse(
total_logical_bytes=total_logical_bytes,
total_physical_bytes=total_physical_bytes,
bytes_saved=bytes_saved,
savings_percentage=savings_percentage,
total_uploads=total_uploads,
unique_artifacts=unique_artifacts,
duplicate_uploads=duplicate_uploads,
average_ref_count=average_ref_count,
max_ref_count=max_ref_count,
most_referenced_artifacts=most_referenced,
)
@router.get(
"/api/v1/projects/{project_name}/stats", response_model=ProjectStatsResponse
)
def get_project_stats(
project_name: str,
db: Session = Depends(get_db),
):
"""
Get statistics for a specific project.
"""
project = db.query(Project).filter(Project.name == project_name).first()
if not project:
raise HTTPException(status_code=404, detail="Project not found")
# Package count
package_count = (
db.query(func.count(Package.id))
.filter(Package.project_id == project.id)
.scalar()
or 0
)
# Get all package IDs for this project
package_ids = (
db.query(Package.id).filter(Package.project_id == project.id).subquery()
)
# Tag count
tag_count = (
db.query(func.count(Tag.id)).filter(Tag.package_id.in_(package_ids)).scalar()
or 0
)
# Unique artifact count and total size (via uploads)
artifact_stats = (
db.query(
func.count(func.distinct(Upload.artifact_id)),
func.coalesce(func.sum(Artifact.size), 0),
)
.join(Artifact, Upload.artifact_id == Artifact.id)
.filter(Upload.package_id.in_(package_ids))
.first()
)
artifact_count = artifact_stats[0] if artifact_stats else 0
total_size_bytes = artifact_stats[1] if artifact_stats else 0
# Upload counts
upload_stats = (
db.query(
func.count(Upload.id),
func.count(Upload.id).filter(Upload.deduplicated == True),
)
.filter(Upload.package_id.in_(package_ids))
.first()
)
upload_count = upload_stats[0] if upload_stats else 0
deduplicated_uploads = upload_stats[1] if upload_stats else 0
return ProjectStatsResponse(
project_id=str(project.id),
project_name=project.name,
package_count=package_count,
tag_count=tag_count,
artifact_count=artifact_count,
total_size_bytes=total_size_bytes,
upload_count=upload_count,
deduplicated_uploads=deduplicated_uploads,
)