Complete metadata query EPIC: add upload endpoints, enhance response fields, standardize audit actions (#18, #19, #20, #22)

- Add GET /api/v1/uploads global endpoint with project/package/user/date filters
- Add GET /api/v1/project/{project}/uploads project-level uploads endpoint
- Add has_more field to PaginationMeta for pagination UI
- Add upload_id, content_type, original_name, created_at to UploadResponse
- Standardize audit action names: project.delete, package.delete, tag.delete, artifact.upload
- Add 13 new integration tests for upload query endpoints and response fields
- 130 tests passing
This commit is contained in:
Mondo Diaz
2026-01-06 14:23:52 -06:00
parent 3d0f502867
commit a293432d2e
4 changed files with 431 additions and 7 deletions

View File

@@ -482,6 +482,7 @@ def list_projects(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -570,7 +571,7 @@ def delete_project(
# Audit log (after commit)
_log_audit(
db,
action="delete_project",
action="project.delete",
resource=f"project/{project_name}",
user_id=user_id,
source_ip=request.client.host if request.client else None,
@@ -755,6 +756,7 @@ def list_packages(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -956,7 +958,7 @@ def delete_package(
# Audit log (after commit)
_log_audit(
db,
action="delete_package",
action="package.delete",
resource=f"project/{project_name}/{package_name}",
user_id=user_id,
source_ip=request.client.host if request.client else None,
@@ -1131,6 +1133,7 @@ def upload_artifact(
deduplicated=deduplicated,
)
db.add(upload)
db.flush() # Flush to get upload ID
# Create or update tag if provided (with ref_count management and history)
if tag:
@@ -1146,7 +1149,7 @@ def upload_artifact(
# Audit log
_log_audit(
db,
action="upload",
action="artifact.upload",
resource=f"project/{project_name}/{package_name}/artifact/{storage_result.sha256[:12]}",
user_id=user_id,
source_ip=request.client.host if request.client else None,
@@ -1174,6 +1177,10 @@ def upload_artifact(
format_metadata=artifact.artifact_metadata,
deduplicated=deduplicated,
ref_count=artifact.ref_count,
upload_id=upload.id,
content_type=artifact.content_type,
original_name=artifact.original_name,
created_at=artifact.created_at,
)
@@ -1260,7 +1267,7 @@ def init_resumable_upload(
# Audit log
_log_audit(
db,
action="upload",
action="artifact.upload",
resource=f"project/{project_name}/{package_name}/artifact/{init_request.expected_hash[:12]}",
user_id=user_id,
source_ip=request.client.host if request.client else None,
@@ -1841,6 +1848,7 @@ def list_tags(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -2071,7 +2079,7 @@ def delete_tag(
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
_log_audit(
db,
action="delete_tag",
action="tag.delete",
resource=f"project/{project_name}/{package_name}/tag/{tag_name}",
user_id=user_id,
source_ip=request.client.host if request.client else None,
@@ -2208,6 +2216,7 @@ def list_package_artifacts(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -3181,6 +3190,7 @@ def list_audit_logs(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -3231,6 +3241,7 @@ def list_project_audit_logs(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -3290,6 +3301,7 @@ def list_package_audit_logs(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -3299,6 +3311,191 @@ def list_package_audit_logs(
# =============================================================================
@router.get(
"/api/v1/uploads",
response_model=PaginatedResponse[UploadHistoryResponse],
)
def list_all_uploads(
request: Request,
project: Optional[str] = Query(None, description="Filter by project name"),
package: Optional[str] = Query(None, description="Filter by package name"),
uploaded_by: Optional[str] = Query(None, description="Filter by uploader"),
from_date: Optional[datetime] = Query(None, alias="from", description="Start date"),
to_date: Optional[datetime] = Query(None, alias="to", description="End date"),
deduplicated: Optional[bool] = Query(
None, description="Filter by deduplication status"
),
page: int = Query(1, ge=1),
limit: int = Query(20, ge=1, le=100),
db: Session = Depends(get_db),
):
"""
List all upload events globally (admin endpoint).
Supports filtering by:
- project: Filter by project name
- package: Filter by package name (requires project)
- uploaded_by: Filter by user ID
- from/to: Filter by timestamp range
- deduplicated: Filter by deduplication status
"""
query = (
db.query(Upload, Package, Project, Artifact)
.join(Package, Upload.package_id == Package.id)
.join(Project, Package.project_id == Project.id)
.join(Artifact, Upload.artifact_id == Artifact.id)
)
# Apply filters
if project:
query = query.filter(Project.name == project)
if package:
query = query.filter(Package.name == package)
if uploaded_by:
query = query.filter(Upload.uploaded_by == uploaded_by)
if from_date:
query = query.filter(Upload.uploaded_at >= from_date)
if to_date:
query = query.filter(Upload.uploaded_at <= to_date)
if deduplicated is not None:
query = query.filter(Upload.deduplicated == deduplicated)
total = query.count()
total_pages = math.ceil(total / limit) if total > 0 else 1
results = (
query.order_by(Upload.uploaded_at.desc())
.offset((page - 1) * limit)
.limit(limit)
.all()
)
items = [
UploadHistoryResponse(
id=upload.id,
artifact_id=upload.artifact_id,
package_id=upload.package_id,
package_name=pkg.name,
project_name=proj.name,
original_name=upload.original_name,
tag_name=upload.tag_name,
uploaded_at=upload.uploaded_at,
uploaded_by=upload.uploaded_by,
source_ip=upload.source_ip,
deduplicated=upload.deduplicated or False,
artifact_size=artifact.size,
artifact_content_type=artifact.content_type,
)
for upload, pkg, proj, artifact in results
]
return PaginatedResponse(
items=items,
pagination=PaginationMeta(
page=page,
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@router.get(
"/api/v1/project/{project_name}/uploads",
response_model=PaginatedResponse[UploadHistoryResponse],
)
def list_project_uploads(
project_name: str,
package: Optional[str] = Query(None, description="Filter by package name"),
uploaded_by: Optional[str] = Query(None, description="Filter by uploader"),
from_date: Optional[datetime] = Query(None, alias="from", description="Start date"),
to_date: Optional[datetime] = Query(None, alias="to", description="End date"),
deduplicated: Optional[bool] = Query(
None, description="Filter by deduplication status"
),
page: int = Query(1, ge=1),
limit: int = Query(20, ge=1, le=100),
db: Session = Depends(get_db),
):
"""
List upload events for a specific project.
Supports filtering by:
- package: Filter by package name within the project
- uploaded_by: Filter by user ID
- from/to: Filter by timestamp range
- deduplicated: Filter by deduplication status
"""
project = db.query(Project).filter(Project.name == project_name).first()
if not project:
raise HTTPException(status_code=404, detail="Project not found")
# Get all package IDs for this project
package_ids_query = db.query(Package.id).filter(Package.project_id == project.id)
if package:
package_ids_query = package_ids_query.filter(Package.name == package)
package_ids = package_ids_query.subquery()
query = (
db.query(Upload, Package, Artifact)
.join(Package, Upload.package_id == Package.id)
.join(Artifact, Upload.artifact_id == Artifact.id)
.filter(Upload.package_id.in_(package_ids))
)
if uploaded_by:
query = query.filter(Upload.uploaded_by == uploaded_by)
if from_date:
query = query.filter(Upload.uploaded_at >= from_date)
if to_date:
query = query.filter(Upload.uploaded_at <= to_date)
if deduplicated is not None:
query = query.filter(Upload.deduplicated == deduplicated)
total = query.count()
total_pages = math.ceil(total / limit) if total > 0 else 1
results = (
query.order_by(Upload.uploaded_at.desc())
.offset((page - 1) * limit)
.limit(limit)
.all()
)
items = [
UploadHistoryResponse(
id=upload.id,
artifact_id=upload.artifact_id,
package_id=upload.package_id,
package_name=pkg.name,
project_name=project_name,
original_name=upload.original_name,
tag_name=upload.tag_name,
uploaded_at=upload.uploaded_at,
uploaded_by=upload.uploaded_by,
source_ip=upload.source_ip,
deduplicated=upload.deduplicated or False,
artifact_size=artifact.size,
artifact_content_type=artifact.content_type,
)
for upload, pkg, artifact in results
]
return PaginatedResponse(
items=items,
pagination=PaginationMeta(
page=page,
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@router.get(
"/api/v1/project/{project_name}/{package_name}/uploads",
response_model=PaginatedResponse[UploadHistoryResponse],
@@ -3371,6 +3568,7 @@ def list_package_uploads(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)
@@ -3436,6 +3634,7 @@ def list_artifact_uploads(
limit=limit,
total=total,
total_pages=total_pages,
has_more=page < total_pages,
),
)