init
This commit is contained in:
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
242
app/api/artifacts.py
Normal file
242
app/api/artifacts.py
Normal file
@@ -0,0 +1,242 @@
|
||||
from fastapi import APIRouter, UploadFile, File, Form, Depends, HTTPException, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
import uuid
|
||||
import json
|
||||
import io
|
||||
from datetime import datetime
|
||||
|
||||
from app.database import get_db
|
||||
from app.models.artifact import Artifact
|
||||
from app.schemas.artifact import ArtifactCreate, ArtifactResponse, ArtifactQuery
|
||||
from app.storage import get_storage_backend
|
||||
|
||||
router = APIRouter(prefix="/api/v1/artifacts", tags=["artifacts"])
|
||||
|
||||
|
||||
def get_file_type(filename: str) -> str:
|
||||
"""Determine file type from filename"""
|
||||
extension = filename.lower().split('.')[-1]
|
||||
type_mapping = {
|
||||
'csv': 'csv',
|
||||
'json': 'json',
|
||||
'pcap': 'pcap',
|
||||
'pcapng': 'pcap',
|
||||
'bin': 'binary',
|
||||
'dat': 'binary',
|
||||
}
|
||||
return type_mapping.get(extension, 'binary')
|
||||
|
||||
|
||||
@router.post("/upload", response_model=ArtifactResponse, status_code=201)
|
||||
async def upload_artifact(
|
||||
file: UploadFile = File(...),
|
||||
test_name: Optional[str] = Form(None),
|
||||
test_suite: Optional[str] = Form(None),
|
||||
test_config: Optional[str] = Form(None),
|
||||
test_result: Optional[str] = Form(None),
|
||||
metadata: Optional[str] = Form(None),
|
||||
description: Optional[str] = Form(None),
|
||||
tags: Optional[str] = Form(None),
|
||||
version: Optional[str] = Form(None),
|
||||
parent_id: Optional[int] = Form(None),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Upload a new artifact file with metadata
|
||||
|
||||
- **file**: The file to upload (CSV, JSON, binary, PCAP)
|
||||
- **test_name**: Name of the test
|
||||
- **test_suite**: Test suite identifier
|
||||
- **test_config**: JSON string of test configuration
|
||||
- **test_result**: Test result (pass, fail, skip, error)
|
||||
- **metadata**: JSON string of additional metadata
|
||||
- **description**: Text description of the artifact
|
||||
- **tags**: JSON array of tags (as string)
|
||||
- **version**: Version identifier
|
||||
- **parent_id**: ID of parent artifact (for versioning)
|
||||
"""
|
||||
try:
|
||||
# Parse JSON fields
|
||||
test_config_dict = json.loads(test_config) if test_config else None
|
||||
metadata_dict = json.loads(metadata) if metadata else None
|
||||
tags_list = json.loads(tags) if tags else None
|
||||
|
||||
# Generate unique storage path
|
||||
file_extension = file.filename.split('.')[-1] if '.' in file.filename else ''
|
||||
object_name = f"{uuid.uuid4()}.{file_extension}" if file_extension else str(uuid.uuid4())
|
||||
|
||||
# Upload to storage backend
|
||||
storage = get_storage_backend()
|
||||
file_content = await file.read()
|
||||
file_size = len(file_content)
|
||||
|
||||
storage_path = await storage.upload_file(
|
||||
io.BytesIO(file_content),
|
||||
object_name
|
||||
)
|
||||
|
||||
# Create database record
|
||||
artifact = Artifact(
|
||||
filename=file.filename,
|
||||
file_type=get_file_type(file.filename),
|
||||
file_size=file_size,
|
||||
storage_path=storage_path,
|
||||
content_type=file.content_type,
|
||||
test_name=test_name,
|
||||
test_suite=test_suite,
|
||||
test_config=test_config_dict,
|
||||
test_result=test_result,
|
||||
metadata=metadata_dict,
|
||||
description=description,
|
||||
tags=tags_list,
|
||||
version=version,
|
||||
parent_id=parent_id
|
||||
)
|
||||
|
||||
db.add(artifact)
|
||||
db.commit()
|
||||
db.refresh(artifact)
|
||||
|
||||
return artifact
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid JSON in metadata fields: {str(e)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/{artifact_id}", response_model=ArtifactResponse)
|
||||
async def get_artifact(artifact_id: int, db: Session = Depends(get_db)):
|
||||
"""Get artifact metadata by ID"""
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
if not artifact:
|
||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
||||
return artifact
|
||||
|
||||
|
||||
@router.get("/{artifact_id}/download")
|
||||
async def download_artifact(artifact_id: int, db: Session = Depends(get_db)):
|
||||
"""Download artifact file by ID"""
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
if not artifact:
|
||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
||||
|
||||
try:
|
||||
storage = get_storage_backend()
|
||||
# Extract object name from storage path
|
||||
object_name = artifact.storage_path.split('/')[-1]
|
||||
file_data = await storage.download_file(object_name)
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(file_data),
|
||||
media_type=artifact.content_type or "application/octet-stream",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{artifact.filename}"'
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Download failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/{artifact_id}/url")
|
||||
async def get_artifact_url(
|
||||
artifact_id: int,
|
||||
expiration: int = Query(default=3600, ge=60, le=86400),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get presigned URL for artifact download"""
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
if not artifact:
|
||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
||||
|
||||
try:
|
||||
storage = get_storage_backend()
|
||||
object_name = artifact.storage_path.split('/')[-1]
|
||||
url = await storage.get_file_url(object_name, expiration)
|
||||
return {"url": url, "expires_in": expiration}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate URL: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/{artifact_id}")
|
||||
async def delete_artifact(artifact_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete artifact and its file"""
|
||||
artifact = db.query(Artifact).filter(Artifact.id == artifact_id).first()
|
||||
if not artifact:
|
||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
||||
|
||||
try:
|
||||
# Delete from storage
|
||||
storage = get_storage_backend()
|
||||
object_name = artifact.storage_path.split('/')[-1]
|
||||
await storage.delete_file(object_name)
|
||||
|
||||
# Delete from database
|
||||
db.delete(artifact)
|
||||
db.commit()
|
||||
|
||||
return {"message": "Artifact deleted successfully"}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/query", response_model=List[ArtifactResponse])
|
||||
async def query_artifacts(query: ArtifactQuery, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Query artifacts with filters
|
||||
|
||||
- **filename**: Filter by filename (partial match)
|
||||
- **file_type**: Filter by file type
|
||||
- **test_name**: Filter by test name
|
||||
- **test_suite**: Filter by test suite
|
||||
- **test_result**: Filter by test result
|
||||
- **tags**: Filter by tags (must contain all specified tags)
|
||||
- **start_date**: Filter by creation date (from)
|
||||
- **end_date**: Filter by creation date (to)
|
||||
- **limit**: Maximum number of results
|
||||
- **offset**: Number of results to skip
|
||||
"""
|
||||
q = db.query(Artifact)
|
||||
|
||||
if query.filename:
|
||||
q = q.filter(Artifact.filename.ilike(f"%{query.filename}%"))
|
||||
if query.file_type:
|
||||
q = q.filter(Artifact.file_type == query.file_type)
|
||||
if query.test_name:
|
||||
q = q.filter(Artifact.test_name.ilike(f"%{query.test_name}%"))
|
||||
if query.test_suite:
|
||||
q = q.filter(Artifact.test_suite == query.test_suite)
|
||||
if query.test_result:
|
||||
q = q.filter(Artifact.test_result == query.test_result)
|
||||
if query.tags:
|
||||
for tag in query.tags:
|
||||
q = q.filter(Artifact.tags.contains([tag]))
|
||||
if query.start_date:
|
||||
q = q.filter(Artifact.created_at >= query.start_date)
|
||||
if query.end_date:
|
||||
q = q.filter(Artifact.created_at <= query.end_date)
|
||||
|
||||
# Order by creation date descending
|
||||
q = q.order_by(Artifact.created_at.desc())
|
||||
|
||||
# Apply pagination
|
||||
artifacts = q.offset(query.offset).limit(query.limit).all()
|
||||
|
||||
return artifacts
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ArtifactResponse])
|
||||
async def list_artifacts(
|
||||
limit: int = Query(default=100, le=1000),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""List all artifacts with pagination"""
|
||||
artifacts = db.query(Artifact).order_by(
|
||||
Artifact.created_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
return artifacts
|
||||
Reference in New Issue
Block a user