Files
orchard/backend/app/schemas.py
Mondo Diaz 722e7d2d81 Add large file upload enhancements and tests (#43)
- Add upload duration/throughput metrics (duration_ms, throughput_mbps) to response
- Add upload progress logging for large files (hash computation and multipart upload)
- Add client disconnect handling during uploads with proper cleanup
- Add upload progress tracking endpoint GET /upload/{upload_id}/progress
- Add large file upload tests (10MB, 100MB, 1GB)
- Add upload cancellation and timeout handling tests
- Add API documentation for upload endpoints with curl, Python, JavaScript examples
2026-01-16 19:33:31 +00:00

919 lines
23 KiB
Python

from datetime import datetime
from typing import Optional, List, Dict, Any, Generic, TypeVar
from pydantic import BaseModel, field_validator
from uuid import UUID
T = TypeVar("T")
# Pagination schemas
class PaginationMeta(BaseModel):
page: int
limit: int
total: int
total_pages: int
has_more: bool = False # True if there are more pages after current page
class PaginatedResponse(BaseModel, Generic[T]):
items: List[T]
pagination: PaginationMeta
# Project schemas
class ProjectCreate(BaseModel):
name: str
description: Optional[str] = None
is_public: bool = True
class ProjectResponse(BaseModel):
id: UUID
name: str
description: Optional[str]
is_public: bool
created_at: datetime
updated_at: datetime
created_by: str
class Config:
from_attributes = True
class ProjectUpdate(BaseModel):
"""Schema for updating a project"""
description: Optional[str] = None
is_public: Optional[bool] = None
class ProjectWithAccessResponse(ProjectResponse):
"""Project response with user's access level included"""
access_level: Optional[str] = None # 'read', 'write', 'admin', or None
is_owner: bool = False
# Package format and platform enums
PACKAGE_FORMATS = [
"generic",
"npm",
"pypi",
"docker",
"deb",
"rpm",
"maven",
"nuget",
"helm",
]
PACKAGE_PLATFORMS = [
"any",
"linux",
"darwin",
"windows",
"linux-amd64",
"linux-arm64",
"darwin-amd64",
"darwin-arm64",
"windows-amd64",
]
# Package schemas
class PackageCreate(BaseModel):
name: str
description: Optional[str] = None
format: str = "generic"
platform: str = "any"
class PackageResponse(BaseModel):
id: UUID
project_id: UUID
name: str
description: Optional[str]
format: str
platform: str
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
class PackageUpdate(BaseModel):
"""Schema for updating a package"""
description: Optional[str] = None
format: Optional[str] = None
platform: Optional[str] = None
class TagSummary(BaseModel):
"""Lightweight tag info for embedding in package responses"""
name: str
artifact_id: str
created_at: datetime
class PackageDetailResponse(BaseModel):
"""Package with aggregated metadata"""
id: UUID
project_id: UUID
name: str
description: Optional[str]
format: str
platform: str
created_at: datetime
updated_at: datetime
# Aggregated fields
tag_count: int = 0
artifact_count: int = 0
total_size: int = 0
latest_tag: Optional[str] = None
latest_upload_at: Optional[datetime] = None
# Recent tags (limit 5)
recent_tags: List[TagSummary] = []
class Config:
from_attributes = True
# Artifact schemas
class ArtifactResponse(BaseModel):
id: str
sha256: str # Explicit SHA256 field (same as id)
size: int
content_type: Optional[str]
original_name: Optional[str]
checksum_md5: Optional[str] = None
checksum_sha1: Optional[str] = None
s3_etag: Optional[str] = None
created_at: datetime
created_by: str
ref_count: int
format_metadata: Optional[Dict[str, Any]] = None
class Config:
from_attributes = True
# Tag schemas
class TagCreate(BaseModel):
name: str
artifact_id: str
class TagResponse(BaseModel):
id: UUID
package_id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
version: Optional[str] = None # Version of the artifact this tag points to
class Config:
from_attributes = True
class TagDetailResponse(BaseModel):
"""Tag with embedded artifact metadata"""
id: UUID
package_id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
version: Optional[str] = None # Version of the artifact this tag points to
# Artifact metadata
artifact_size: int
artifact_content_type: Optional[str]
artifact_original_name: Optional[str]
artifact_created_at: datetime
artifact_format_metadata: Optional[Dict[str, Any]] = None
class Config:
from_attributes = True
class TagHistoryResponse(BaseModel):
"""History entry for tag changes"""
id: UUID
tag_id: UUID
old_artifact_id: Optional[str]
new_artifact_id: str
changed_at: datetime
changed_by: str
class Config:
from_attributes = True
class TagHistoryDetailResponse(BaseModel):
"""Tag history with artifact metadata for each version"""
id: UUID
tag_id: UUID
tag_name: str
old_artifact_id: Optional[str]
new_artifact_id: str
changed_at: datetime
changed_by: str
# Artifact metadata for new artifact
artifact_size: int
artifact_original_name: Optional[str]
artifact_content_type: Optional[str]
class Config:
from_attributes = True
# Audit log schemas
class AuditLogResponse(BaseModel):
"""Audit log entry response"""
id: UUID
action: str
resource: str
user_id: str
details: Optional[Dict[str, Any]]
timestamp: datetime
source_ip: Optional[str]
class Config:
from_attributes = True
# Upload history schemas
class UploadHistoryResponse(BaseModel):
"""Upload event with artifact details"""
id: UUID
artifact_id: str
package_id: UUID
package_name: str
project_name: str
original_name: Optional[str]
tag_name: Optional[str]
uploaded_at: datetime
uploaded_by: str
source_ip: Optional[str]
deduplicated: bool
# Artifact metadata
artifact_size: int
artifact_content_type: Optional[str]
class Config:
from_attributes = True
# Artifact provenance schemas
class ArtifactProvenanceResponse(BaseModel):
"""Full provenance/history of an artifact"""
artifact_id: str
sha256: str
size: int
content_type: Optional[str]
original_name: Optional[str]
created_at: datetime
created_by: str
ref_count: int
# First upload info
first_uploaded_at: datetime
first_uploaded_by: str
# Usage statistics
upload_count: int
# References
packages: List[Dict[str, Any]] # List of {project_name, package_name, tag_names}
tags: List[
Dict[str, Any]
] # List of {project_name, package_name, tag_name, created_at}
# Upload history
uploads: List[Dict[str, Any]] # List of upload events
class Config:
from_attributes = True
class ArtifactTagInfo(BaseModel):
"""Tag info for embedding in artifact responses"""
id: UUID
name: str
package_id: UUID
package_name: str
project_name: str
class ArtifactDetailResponse(BaseModel):
"""Artifact with list of tags/packages referencing it"""
id: str
sha256: str # Explicit SHA256 field (same as id)
size: int
content_type: Optional[str]
original_name: Optional[str]
checksum_md5: Optional[str] = None
checksum_sha1: Optional[str] = None
s3_etag: Optional[str] = None
created_at: datetime
created_by: str
ref_count: int
format_metadata: Optional[Dict[str, Any]] = None
tags: List[ArtifactTagInfo] = []
class Config:
from_attributes = True
class PackageArtifactResponse(BaseModel):
"""Artifact with tags for package artifact listing"""
id: str
sha256: str # Explicit SHA256 field (same as id)
size: int
content_type: Optional[str]
original_name: Optional[str]
checksum_md5: Optional[str] = None
checksum_sha1: Optional[str] = None
s3_etag: Optional[str] = None
created_at: datetime
created_by: str
format_metadata: Optional[Dict[str, Any]] = None
tags: List[str] = [] # Tag names pointing to this artifact
class Config:
from_attributes = True
class GlobalArtifactResponse(BaseModel):
"""Artifact with project/package context for global listing"""
id: str
sha256: str
size: int
content_type: Optional[str]
original_name: Optional[str]
created_at: datetime
created_by: str
format_metadata: Optional[Dict[str, Any]] = None
ref_count: int = 0
# Context from tags/packages
projects: List[str] = [] # List of project names containing this artifact
packages: List[str] = [] # List of "project/package" paths
tags: List[str] = [] # List of "project/package:tag" references
class Config:
from_attributes = True
class GlobalTagResponse(BaseModel):
"""Tag with project/package context for global listing"""
id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
project_name: str
package_name: str
artifact_size: Optional[int] = None
artifact_content_type: Optional[str] = None
version: Optional[str] = None # Version of the artifact this tag points to
class Config:
from_attributes = True
# Upload response
class UploadResponse(BaseModel):
artifact_id: str
sha256: str # Explicit SHA256 field (same as artifact_id)
size: int
project: str
package: str
tag: Optional[str]
version: Optional[str] = None # Version assigned to this artifact
version_source: Optional[str] = None # How version was determined: 'explicit', 'filename', 'metadata'
checksum_md5: Optional[str] = None
checksum_sha1: Optional[str] = None
s3_etag: Optional[str] = None
format_metadata: Optional[Dict[str, Any]] = None
deduplicated: bool = False
ref_count: int = 1 # Current reference count after this upload
# Enhanced metadata (Issue #19)
upload_id: Optional[UUID] = None # UUID of the upload record
content_type: Optional[str] = None
original_name: Optional[str] = None
created_at: Optional[datetime] = None
# Upload metrics (Issue #43)
duration_ms: Optional[int] = None # Upload duration in milliseconds
throughput_mbps: Optional[float] = None # Upload throughput in MB/s
# Resumable upload schemas
class ResumableUploadInitRequest(BaseModel):
"""Request to initiate a resumable upload"""
expected_hash: str # SHA256 hash of the file (client must compute)
filename: str
content_type: Optional[str] = None
size: int
tag: Optional[str] = None
version: Optional[str] = None # Explicit version (auto-detected if not provided)
@field_validator("expected_hash")
@classmethod
def validate_sha256_hash(cls, v: str) -> str:
"""Validate that expected_hash is a valid 64-character lowercase hex SHA256 hash."""
import re
if not re.match(r"^[a-f0-9]{64}$", v.lower()):
raise ValueError(
"expected_hash must be a valid 64-character lowercase hexadecimal SHA256 hash"
)
return v.lower() # Normalize to lowercase
class ResumableUploadInitResponse(BaseModel):
"""Response from initiating a resumable upload"""
upload_id: Optional[str] # None if file already exists
already_exists: bool
artifact_id: Optional[str] = None # Set if already_exists is True
chunk_size: int # Recommended chunk size for parts
class ResumableUploadPartResponse(BaseModel):
"""Response from uploading a part"""
part_number: int
etag: str
class ResumableUploadCompleteRequest(BaseModel):
"""Request to complete a resumable upload"""
tag: Optional[str] = None
class ResumableUploadCompleteResponse(BaseModel):
"""Response from completing a resumable upload"""
artifact_id: str
size: int
project: str
package: str
tag: Optional[str]
class ResumableUploadStatusResponse(BaseModel):
"""Status of a resumable upload"""
upload_id: str
uploaded_parts: List[int]
total_uploaded_bytes: int
class UploadProgressResponse(BaseModel):
"""Progress information for an in-flight upload"""
upload_id: str
status: str # 'in_progress', 'completed', 'failed', 'not_found'
bytes_uploaded: int = 0
bytes_total: Optional[int] = None
percent_complete: Optional[float] = None
parts_uploaded: int = 0
parts_total: Optional[int] = None
started_at: Optional[datetime] = None
elapsed_seconds: Optional[float] = None
throughput_mbps: Optional[float] = None
# Consumer schemas
class ConsumerResponse(BaseModel):
id: UUID
package_id: UUID
project_url: str
last_access: datetime
created_at: datetime
class Config:
from_attributes = True
# Package version schemas
class PackageVersionResponse(BaseModel):
"""Immutable version record for an artifact in a package"""
id: UUID
package_id: UUID
artifact_id: str
version: str
version_source: Optional[str] = None # 'explicit', 'filename', 'metadata', 'migrated_from_tag'
created_at: datetime
created_by: str
# Enriched fields from joins
size: Optional[int] = None
content_type: Optional[str] = None
original_name: Optional[str] = None
tags: List[str] = [] # Tag names pointing to this artifact
class Config:
from_attributes = True
class PackageVersionDetailResponse(PackageVersionResponse):
"""Version with full artifact metadata"""
format_metadata: Optional[Dict[str, Any]] = None
checksum_md5: Optional[str] = None
checksum_sha1: Optional[str] = None
# Global search schemas
class SearchResultProject(BaseModel):
"""Project result for global search"""
id: UUID
name: str
description: Optional[str]
is_public: bool
class Config:
from_attributes = True
class SearchResultPackage(BaseModel):
"""Package result for global search"""
id: UUID
project_id: UUID
project_name: str
name: str
description: Optional[str]
format: str
class Config:
from_attributes = True
class SearchResultArtifact(BaseModel):
"""Artifact/tag result for global search"""
tag_id: UUID
tag_name: str
artifact_id: str
package_id: UUID
package_name: str
project_name: str
original_name: Optional[str]
class GlobalSearchResponse(BaseModel):
"""Combined search results across all entity types"""
query: str
projects: List[SearchResultProject]
packages: List[SearchResultPackage]
artifacts: List[SearchResultArtifact]
counts: Dict[str, int] # Total counts for each type
# Presigned URL response
class PresignedUrlResponse(BaseModel):
"""Response containing a presigned URL for direct S3 download"""
url: str
expires_at: datetime
method: str = "GET"
artifact_id: str
size: int
content_type: Optional[str] = None
original_name: Optional[str] = None
checksum_sha256: Optional[str] = None
checksum_md5: Optional[str] = None
# Health check
class HealthResponse(BaseModel):
status: str
version: str = "1.0.0"
storage_healthy: Optional[bool] = None
database_healthy: Optional[bool] = None
# Garbage collection schemas
class GarbageCollectionResponse(BaseModel):
"""Response from garbage collection operation"""
artifacts_deleted: int
bytes_freed: int
artifact_ids: List[str]
dry_run: bool
class OrphanedArtifactResponse(BaseModel):
"""Information about an orphaned artifact"""
id: str
size: int
created_at: datetime
created_by: str
original_name: Optional[str]
# Storage statistics schemas
class StorageStatsResponse(BaseModel):
"""Global storage statistics"""
total_artifacts: int
total_size_bytes: int
unique_artifacts: int # Artifacts with ref_count > 0
orphaned_artifacts: int # Artifacts with ref_count = 0
orphaned_size_bytes: int
total_uploads: int
deduplicated_uploads: int
deduplication_ratio: (
float # total_uploads / unique_artifacts (if > 1, deduplication is working)
)
storage_saved_bytes: int # Bytes saved through deduplication
class ConsistencyCheckResponse(BaseModel):
"""Result of S3/Database consistency check"""
total_artifacts_checked: int
orphaned_s3_objects: int # Objects in S3 but not in DB
missing_s3_objects: int # Records in DB but not in S3
size_mismatches: int # Records where DB size != S3 size
healthy: bool
orphaned_s3_keys: List[str] = [] # Limited list of orphaned S3 keys
missing_s3_keys: List[str] = [] # Limited list of missing S3 keys
size_mismatch_artifacts: List[Dict[str, Any]] = [] # Limited list of mismatches
class DeduplicationStatsResponse(BaseModel):
"""Deduplication effectiveness statistics"""
total_logical_bytes: (
int # Sum of all upload sizes (what would be stored without dedup)
)
total_physical_bytes: int # Actual storage used
bytes_saved: int
savings_percentage: float
total_uploads: int
unique_artifacts: int
duplicate_uploads: int
average_ref_count: float
max_ref_count: int
most_referenced_artifacts: List[Dict[str, Any]] # Top N most referenced
class ProjectStatsResponse(BaseModel):
"""Per-project statistics"""
project_id: str
project_name: str
package_count: int
tag_count: int
artifact_count: int
total_size_bytes: int
upload_count: int
deduplicated_uploads: int
storage_saved_bytes: int = 0 # Bytes saved through deduplication
deduplication_ratio: float = 1.0 # upload_count / artifact_count
class PackageStatsResponse(BaseModel):
"""Per-package statistics"""
package_id: str
package_name: str
project_name: str
tag_count: int
artifact_count: int
total_size_bytes: int
upload_count: int
deduplicated_uploads: int
storage_saved_bytes: int = 0
deduplication_ratio: float = 1.0
class ArtifactStatsResponse(BaseModel):
"""Per-artifact reference statistics"""
artifact_id: str
sha256: str
size: int
ref_count: int
storage_savings: int # (ref_count - 1) * size
tags: List[Dict[str, Any]] # Tags referencing this artifact
projects: List[str] # Projects using this artifact
packages: List[str] # Packages using this artifact
first_uploaded: Optional[datetime] = None
last_referenced: Optional[datetime] = None
class CrossProjectDeduplicationResponse(BaseModel):
"""Cross-project deduplication statistics"""
shared_artifacts_count: int # Artifacts used in multiple projects
total_cross_project_savings: int # Bytes saved by cross-project sharing
shared_artifacts: List[Dict[str, Any]] # Details of shared artifacts
class TimeBasedStatsResponse(BaseModel):
"""Time-based deduplication statistics"""
period: str # "daily", "weekly", "monthly"
start_date: datetime
end_date: datetime
data_points: List[
Dict[str, Any]
] # List of {date, uploads, unique, duplicated, bytes_saved}
class StatsReportResponse(BaseModel):
"""Summary report in various formats"""
format: str # "json", "csv", "markdown"
generated_at: datetime
content: str # The report content
# Authentication schemas
class LoginRequest(BaseModel):
"""Login request with username and password"""
username: str
password: str
class LoginResponse(BaseModel):
"""Login response with user info"""
id: UUID
username: str
email: Optional[str]
is_admin: bool
must_change_password: bool
class ChangePasswordRequest(BaseModel):
"""Change password request"""
current_password: str
new_password: str
class UserResponse(BaseModel):
"""User information response"""
id: UUID
username: str
email: Optional[str]
is_admin: bool
is_active: bool
must_change_password: bool
created_at: datetime
last_login: Optional[datetime]
class Config:
from_attributes = True
class UserCreate(BaseModel):
"""Create user request (admin only)"""
username: str
password: str
email: Optional[str] = None
is_admin: bool = False
class UserUpdate(BaseModel):
"""Update user request (admin only)"""
email: Optional[str] = None
is_admin: Optional[bool] = None
is_active: Optional[bool] = None
class ResetPasswordRequest(BaseModel):
"""Reset password request (admin only)"""
new_password: str
class APIKeyCreate(BaseModel):
"""Create API key request"""
name: str
description: Optional[str] = None
scopes: Optional[List[str]] = None
class APIKeyResponse(BaseModel):
"""API key response (without the secret key)"""
id: UUID
name: str
description: Optional[str]
scopes: Optional[List[str]]
created_at: datetime
expires_at: Optional[datetime]
last_used: Optional[datetime]
class Config:
from_attributes = True
class APIKeyCreateResponse(BaseModel):
"""API key creation response (includes the secret key - only shown once)"""
id: UUID
name: str
description: Optional[str]
scopes: Optional[List[str]]
key: str # The actual API key - only returned on creation
created_at: datetime
expires_at: Optional[datetime]
# OIDC Configuration schemas
class OIDCConfigResponse(BaseModel):
"""OIDC configuration response (hides client secret)"""
enabled: bool
issuer_url: str
client_id: str
has_client_secret: bool # True if secret is configured, but don't expose it
scopes: List[str]
auto_create_users: bool
admin_group: str
class OIDCConfigUpdate(BaseModel):
"""Update OIDC configuration"""
enabled: Optional[bool] = None
issuer_url: Optional[str] = None
client_id: Optional[str] = None
client_secret: Optional[str] = None # Only set if changing
scopes: Optional[List[str]] = None
auto_create_users: Optional[bool] = None
admin_group: Optional[str] = None
class OIDCStatusResponse(BaseModel):
"""Public OIDC status response"""
enabled: bool
issuer_url: Optional[str] = None # Only included if enabled
class OIDCLoginResponse(BaseModel):
"""OIDC login initiation response"""
authorization_url: str
# Access Permission schemas
class AccessPermissionCreate(BaseModel):
"""Grant access to a user for a project"""
username: str
level: str # 'read', 'write', or 'admin'
expires_at: Optional[datetime] = None
@field_validator('level')
@classmethod
def validate_level(cls, v):
if v not in ('read', 'write', 'admin'):
raise ValueError("level must be 'read', 'write', or 'admin'")
return v
class AccessPermissionUpdate(BaseModel):
"""Update access permission"""
level: Optional[str] = None
expires_at: Optional[datetime] = None
@field_validator('level')
@classmethod
def validate_level(cls, v):
if v is not None and v not in ('read', 'write', 'admin'):
raise ValueError("level must be 'read', 'write', or 'admin'")
return v
class AccessPermissionResponse(BaseModel):
"""Access permission response"""
id: UUID
project_id: UUID
user_id: str
level: str
created_at: datetime
expires_at: Optional[datetime]
class Config:
from_attributes = True
class ProjectWithAccessResponse(ProjectResponse):
"""Project response with user's access level"""
user_access_level: Optional[str] = None