Complete metadata query EPIC: add upload endpoints, enhance response fields, standardize audit actions (#18, #19, #20, #22)
- Add GET /api/v1/uploads global endpoint with project/package/user/date filters
- Add GET /api/v1/project/{project}/uploads project-level uploads endpoint
- Add has_more field to PaginationMeta for pagination UI
- Add upload_id, content_type, original_name, created_at to UploadResponse
- Standardize audit action names: project.delete, package.delete, tag.delete, artifact.upload
- Add 13 new integration tests for upload query endpoints and response fields
- 130 tests passing
This commit is contained in:
10
CHANGELOG.md
10
CHANGELOG.md
@@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Added global uploads query endpoint `GET /api/v1/uploads` with project/package/user/date filters (#18)
|
||||
- Added project-level uploads endpoint `GET /api/v1/project/{project}/uploads` (#18)
|
||||
- Added `has_more` field to pagination metadata for easier pagination UI (#18)
|
||||
- Added `upload_id`, `content_type`, `original_name`, `created_at` fields to upload response (#19)
|
||||
- Added audit log API endpoints with filtering and pagination (#20)
|
||||
- `GET /api/v1/audit-logs` - list all audit logs with action/resource/user/date filters
|
||||
- `GET /api/v1/projects/{project}/audit-logs` - project-scoped audit logs
|
||||
@@ -16,10 +20,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- `GET /api/v1/artifact/{id}/uploads` - list all uploads of a specific artifact
|
||||
- Added artifact provenance endpoint `GET /api/v1/artifact/{id}/history` (#20)
|
||||
- Returns full artifact history including packages, tags, and upload events
|
||||
- Added audit logging for project.create, package.create, tag.create, tag.update actions (#20)
|
||||
- Added audit logging for project.create, package.create, tag.create, tag.update, artifact.upload actions (#20)
|
||||
- Added `AuditLogResponse`, `UploadHistoryResponse`, `ArtifactProvenanceResponse` schemas (#20)
|
||||
- Added `TagHistoryDetailResponse` schema with artifact metadata (#20)
|
||||
- Added 18 integration tests for audit log and history endpoints (#20)
|
||||
- Added 31 integration tests for audit log, history, and upload query endpoints (#22)
|
||||
### Changed
|
||||
- Standardized audit action naming to `{entity}.{action}` pattern (project.delete, package.delete, tag.delete) (#20)
|
||||
- Added `StorageBackend` protocol/interface for backend-agnostic storage (#33)
|
||||
- Added `health_check()` method to storage backend with `/health` endpoint integration (#33)
|
||||
- Added `verify_integrity()` method for post-upload hash validation (#33)
|
||||
|
||||
@@ -482,6 +482,7 @@ def list_projects(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -570,7 +571,7 @@ def delete_project(
|
||||
# Audit log (after commit)
|
||||
_log_audit(
|
||||
db,
|
||||
action="delete_project",
|
||||
action="project.delete",
|
||||
resource=f"project/{project_name}",
|
||||
user_id=user_id,
|
||||
source_ip=request.client.host if request.client else None,
|
||||
@@ -755,6 +756,7 @@ def list_packages(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -956,7 +958,7 @@ def delete_package(
|
||||
# Audit log (after commit)
|
||||
_log_audit(
|
||||
db,
|
||||
action="delete_package",
|
||||
action="package.delete",
|
||||
resource=f"project/{project_name}/{package_name}",
|
||||
user_id=user_id,
|
||||
source_ip=request.client.host if request.client else None,
|
||||
@@ -1131,6 +1133,7 @@ def upload_artifact(
|
||||
deduplicated=deduplicated,
|
||||
)
|
||||
db.add(upload)
|
||||
db.flush() # Flush to get upload ID
|
||||
|
||||
# Create or update tag if provided (with ref_count management and history)
|
||||
if tag:
|
||||
@@ -1146,7 +1149,7 @@ def upload_artifact(
|
||||
# Audit log
|
||||
_log_audit(
|
||||
db,
|
||||
action="upload",
|
||||
action="artifact.upload",
|
||||
resource=f"project/{project_name}/{package_name}/artifact/{storage_result.sha256[:12]}",
|
||||
user_id=user_id,
|
||||
source_ip=request.client.host if request.client else None,
|
||||
@@ -1174,6 +1177,10 @@ def upload_artifact(
|
||||
format_metadata=artifact.artifact_metadata,
|
||||
deduplicated=deduplicated,
|
||||
ref_count=artifact.ref_count,
|
||||
upload_id=upload.id,
|
||||
content_type=artifact.content_type,
|
||||
original_name=artifact.original_name,
|
||||
created_at=artifact.created_at,
|
||||
)
|
||||
|
||||
|
||||
@@ -1260,7 +1267,7 @@ def init_resumable_upload(
|
||||
# Audit log
|
||||
_log_audit(
|
||||
db,
|
||||
action="upload",
|
||||
action="artifact.upload",
|
||||
resource=f"project/{project_name}/{package_name}/artifact/{init_request.expected_hash[:12]}",
|
||||
user_id=user_id,
|
||||
source_ip=request.client.host if request.client else None,
|
||||
@@ -1841,6 +1848,7 @@ def list_tags(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -2071,7 +2079,7 @@ def delete_tag(
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
_log_audit(
|
||||
db,
|
||||
action="delete_tag",
|
||||
action="tag.delete",
|
||||
resource=f"project/{project_name}/{package_name}/tag/{tag_name}",
|
||||
user_id=user_id,
|
||||
source_ip=request.client.host if request.client else None,
|
||||
@@ -2208,6 +2216,7 @@ def list_package_artifacts(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3181,6 +3190,7 @@ def list_audit_logs(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3231,6 +3241,7 @@ def list_project_audit_logs(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3290,6 +3301,7 @@ def list_package_audit_logs(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3299,6 +3311,191 @@ def list_package_audit_logs(
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/uploads",
|
||||
response_model=PaginatedResponse[UploadHistoryResponse],
|
||||
)
|
||||
def list_all_uploads(
|
||||
request: Request,
|
||||
project: Optional[str] = Query(None, description="Filter by project name"),
|
||||
package: Optional[str] = Query(None, description="Filter by package name"),
|
||||
uploaded_by: Optional[str] = Query(None, description="Filter by uploader"),
|
||||
from_date: Optional[datetime] = Query(None, alias="from", description="Start date"),
|
||||
to_date: Optional[datetime] = Query(None, alias="to", description="End date"),
|
||||
deduplicated: Optional[bool] = Query(
|
||||
None, description="Filter by deduplication status"
|
||||
),
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
List all upload events globally (admin endpoint).
|
||||
|
||||
Supports filtering by:
|
||||
- project: Filter by project name
|
||||
- package: Filter by package name (requires project)
|
||||
- uploaded_by: Filter by user ID
|
||||
- from/to: Filter by timestamp range
|
||||
- deduplicated: Filter by deduplication status
|
||||
"""
|
||||
query = (
|
||||
db.query(Upload, Package, Project, Artifact)
|
||||
.join(Package, Upload.package_id == Package.id)
|
||||
.join(Project, Package.project_id == Project.id)
|
||||
.join(Artifact, Upload.artifact_id == Artifact.id)
|
||||
)
|
||||
|
||||
# Apply filters
|
||||
if project:
|
||||
query = query.filter(Project.name == project)
|
||||
if package:
|
||||
query = query.filter(Package.name == package)
|
||||
if uploaded_by:
|
||||
query = query.filter(Upload.uploaded_by == uploaded_by)
|
||||
if from_date:
|
||||
query = query.filter(Upload.uploaded_at >= from_date)
|
||||
if to_date:
|
||||
query = query.filter(Upload.uploaded_at <= to_date)
|
||||
if deduplicated is not None:
|
||||
query = query.filter(Upload.deduplicated == deduplicated)
|
||||
|
||||
total = query.count()
|
||||
total_pages = math.ceil(total / limit) if total > 0 else 1
|
||||
|
||||
results = (
|
||||
query.order_by(Upload.uploaded_at.desc())
|
||||
.offset((page - 1) * limit)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
items = [
|
||||
UploadHistoryResponse(
|
||||
id=upload.id,
|
||||
artifact_id=upload.artifact_id,
|
||||
package_id=upload.package_id,
|
||||
package_name=pkg.name,
|
||||
project_name=proj.name,
|
||||
original_name=upload.original_name,
|
||||
tag_name=upload.tag_name,
|
||||
uploaded_at=upload.uploaded_at,
|
||||
uploaded_by=upload.uploaded_by,
|
||||
source_ip=upload.source_ip,
|
||||
deduplicated=upload.deduplicated or False,
|
||||
artifact_size=artifact.size,
|
||||
artifact_content_type=artifact.content_type,
|
||||
)
|
||||
for upload, pkg, proj, artifact in results
|
||||
]
|
||||
|
||||
return PaginatedResponse(
|
||||
items=items,
|
||||
pagination=PaginationMeta(
|
||||
page=page,
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/project/{project_name}/uploads",
|
||||
response_model=PaginatedResponse[UploadHistoryResponse],
|
||||
)
|
||||
def list_project_uploads(
|
||||
project_name: str,
|
||||
package: Optional[str] = Query(None, description="Filter by package name"),
|
||||
uploaded_by: Optional[str] = Query(None, description="Filter by uploader"),
|
||||
from_date: Optional[datetime] = Query(None, alias="from", description="Start date"),
|
||||
to_date: Optional[datetime] = Query(None, alias="to", description="End date"),
|
||||
deduplicated: Optional[bool] = Query(
|
||||
None, description="Filter by deduplication status"
|
||||
),
|
||||
page: int = Query(1, ge=1),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
List upload events for a specific project.
|
||||
|
||||
Supports filtering by:
|
||||
- package: Filter by package name within the project
|
||||
- uploaded_by: Filter by user ID
|
||||
- from/to: Filter by timestamp range
|
||||
- deduplicated: Filter by deduplication status
|
||||
"""
|
||||
project = db.query(Project).filter(Project.name == project_name).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Get all package IDs for this project
|
||||
package_ids_query = db.query(Package.id).filter(Package.project_id == project.id)
|
||||
|
||||
if package:
|
||||
package_ids_query = package_ids_query.filter(Package.name == package)
|
||||
|
||||
package_ids = package_ids_query.subquery()
|
||||
|
||||
query = (
|
||||
db.query(Upload, Package, Artifact)
|
||||
.join(Package, Upload.package_id == Package.id)
|
||||
.join(Artifact, Upload.artifact_id == Artifact.id)
|
||||
.filter(Upload.package_id.in_(package_ids))
|
||||
)
|
||||
|
||||
if uploaded_by:
|
||||
query = query.filter(Upload.uploaded_by == uploaded_by)
|
||||
if from_date:
|
||||
query = query.filter(Upload.uploaded_at >= from_date)
|
||||
if to_date:
|
||||
query = query.filter(Upload.uploaded_at <= to_date)
|
||||
if deduplicated is not None:
|
||||
query = query.filter(Upload.deduplicated == deduplicated)
|
||||
|
||||
total = query.count()
|
||||
total_pages = math.ceil(total / limit) if total > 0 else 1
|
||||
|
||||
results = (
|
||||
query.order_by(Upload.uploaded_at.desc())
|
||||
.offset((page - 1) * limit)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
items = [
|
||||
UploadHistoryResponse(
|
||||
id=upload.id,
|
||||
artifact_id=upload.artifact_id,
|
||||
package_id=upload.package_id,
|
||||
package_name=pkg.name,
|
||||
project_name=project_name,
|
||||
original_name=upload.original_name,
|
||||
tag_name=upload.tag_name,
|
||||
uploaded_at=upload.uploaded_at,
|
||||
uploaded_by=upload.uploaded_by,
|
||||
source_ip=upload.source_ip,
|
||||
deduplicated=upload.deduplicated or False,
|
||||
artifact_size=artifact.size,
|
||||
artifact_content_type=artifact.content_type,
|
||||
)
|
||||
for upload, pkg, artifact in results
|
||||
]
|
||||
|
||||
return PaginatedResponse(
|
||||
items=items,
|
||||
pagination=PaginationMeta(
|
||||
page=page,
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/project/{project_name}/{package_name}/uploads",
|
||||
response_model=PaginatedResponse[UploadHistoryResponse],
|
||||
@@ -3371,6 +3568,7 @@ def list_package_uploads(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3436,6 +3634,7 @@ def list_artifact_uploads(
|
||||
limit=limit,
|
||||
total=total,
|
||||
total_pages=total_pages,
|
||||
has_more=page < total_pages,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ class PaginationMeta(BaseModel):
|
||||
limit: int
|
||||
total: int
|
||||
total_pages: int
|
||||
has_more: bool = False # True if there are more pages after current page
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
@@ -341,6 +342,11 @@ class UploadResponse(BaseModel):
|
||||
format_metadata: Optional[Dict[str, Any]] = None
|
||||
deduplicated: bool = False
|
||||
ref_count: int = 1 # Current reference count after this upload
|
||||
# Enhanced metadata (Issue #19)
|
||||
upload_id: Optional[UUID] = None # UUID of the upload record
|
||||
content_type: Optional[str] = None
|
||||
original_name: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
|
||||
|
||||
# Resumable upload schemas
|
||||
|
||||
@@ -307,3 +307,216 @@ class TestArtifactProvenance:
|
||||
assert "project_name" in tag
|
||||
assert "package_name" in tag
|
||||
assert "tag_name" in tag
|
||||
|
||||
|
||||
class TestGlobalUploadsEndpoint:
|
||||
"""Tests for /api/v1/uploads endpoint (global admin)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_uploads_returns_200(self, integration_client):
|
||||
"""Test that global uploads endpoint returns 200."""
|
||||
response = integration_client.get("/api/v1/uploads")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "items" in data
|
||||
assert "pagination" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_uploads_pagination(self, integration_client):
|
||||
"""Test that global uploads endpoint respects pagination."""
|
||||
response = integration_client.get("/api/v1/uploads?limit=5&page=1")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data["items"]) <= 5
|
||||
assert data["pagination"]["limit"] == 5
|
||||
assert data["pagination"]["page"] == 1
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_uploads_filter_by_project(self, integration_client, test_package):
|
||||
"""Test filtering global uploads by project name."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a file
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"global filter test",
|
||||
"global.txt",
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/uploads?project={project_name}")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
for item in data["items"]:
|
||||
assert item["project_name"] == project_name
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_uploads_filter_by_uploader(self, integration_client, test_package):
|
||||
"""Test filtering global uploads by uploaded_by."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a file
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"uploader filter test",
|
||||
"uploader.txt",
|
||||
)
|
||||
|
||||
# Filter by anonymous (default user)
|
||||
response = integration_client.get("/api/v1/uploads?uploaded_by=anonymous")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
for item in data["items"]:
|
||||
assert item["uploaded_by"] == "anonymous"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_global_uploads_has_more_field(self, integration_client):
|
||||
"""Test that pagination includes has_more field."""
|
||||
response = integration_client.get("/api/v1/uploads?limit=1")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "has_more" in data["pagination"]
|
||||
assert isinstance(data["pagination"]["has_more"], bool)
|
||||
|
||||
|
||||
class TestProjectUploadsEndpoint:
|
||||
"""Tests for /api/v1/project/{project}/uploads endpoint."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_project_uploads_returns_200(self, integration_client, test_project):
|
||||
"""Test that project uploads endpoint returns 200."""
|
||||
response = integration_client.get(f"/api/v1/project/{test_project}/uploads")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert "items" in data
|
||||
assert "pagination" in data
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_project_uploads_after_upload(self, integration_client, test_package):
|
||||
"""Test that uploads are recorded in project uploads."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a file
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"project uploads test",
|
||||
"project.txt",
|
||||
)
|
||||
|
||||
response = integration_client.get(f"/api/v1/project/{project_name}/uploads")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert len(data["items"]) >= 1
|
||||
|
||||
# Verify project name matches
|
||||
for item in data["items"]:
|
||||
assert item["project_name"] == project_name
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_project_uploads_filter_by_package(self, integration_client, test_package):
|
||||
"""Test filtering project uploads by package name."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
# Upload a file
|
||||
upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"package filter test",
|
||||
"pkgfilter.txt",
|
||||
)
|
||||
|
||||
response = integration_client.get(
|
||||
f"/api/v1/project/{project_name}/uploads?package={package_name}"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
for item in data["items"]:
|
||||
assert item["package_name"] == package_name
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_project_uploads_not_found(self, integration_client):
|
||||
"""Test that non-existent project returns 404."""
|
||||
response = integration_client.get("/api/v1/project/nonexistent/uploads")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestUploadResponseFields:
|
||||
"""Tests for enhanced UploadResponse fields (Issue #19)."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_has_upload_id(self, integration_client, test_package):
|
||||
"""Test that upload response includes upload_id."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"upload id test",
|
||||
"uploadid.txt",
|
||||
)
|
||||
|
||||
# upload_id should be present
|
||||
assert "upload_id" in upload_result
|
||||
assert upload_result["upload_id"] is not None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_has_content_type(self, integration_client, test_package):
|
||||
"""Test that upload response includes content_type."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"content type test",
|
||||
"content.txt",
|
||||
)
|
||||
|
||||
assert "content_type" in upload_result
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_has_original_name(self, integration_client, test_package):
|
||||
"""Test that upload response includes original_name."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"original name test",
|
||||
"originalname.txt",
|
||||
)
|
||||
|
||||
assert "original_name" in upload_result
|
||||
assert upload_result["original_name"] == "originalname.txt"
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_upload_response_has_created_at(self, integration_client, test_package):
|
||||
"""Test that upload response includes created_at."""
|
||||
project_name, package_name = test_package
|
||||
|
||||
upload_result = upload_test_file(
|
||||
integration_client,
|
||||
project_name,
|
||||
package_name,
|
||||
b"created at test",
|
||||
"createdat.txt",
|
||||
)
|
||||
|
||||
assert "created_at" in upload_result
|
||||
assert upload_result["created_at"] is not None
|
||||
|
||||
Reference in New Issue
Block a user