Add tags prominence and SIM source grouping features
Database changes: - Add sim_source_id column to artifacts table for grouping multiple artifacts - Create Alembic migration (001_add_sim_source_id) for schema update - Add Alembic env.py for migration support with environment-based DB URLs API enhancements: - Add sim_source_id parameter to upload endpoint - Add sim_source_id filter to query endpoint - Add new /grouped-by-sim-source endpoint for getting artifacts by group - Update all API documentation to include sim_source_id UI improvements: - Make tags required field and more prominent in upload form - Add tags display directly in artifacts table (below filename) - Add SIM Source ID field in upload form with helper text for grouping - Update table to show sim_source_id (falls back to test_suite if null) - Tags now displayed as inline badges in main table view Seed data updates: - Generate sim_source_id for 70% of artifacts to demonstrate grouping - Multiple artifacts can share same sim_source_id - Improved seed data variety with tag combinations Features: - Tags are now prominently displayed in both table and detail views - Multiple artifacts can be grouped by SIM source ID - Users can filter/query by sim_source_id - Backward compatible - existing artifacts without sim_source_id still work 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
84
alembic/env.py
Normal file
84
alembic/env.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Import your models Base
|
||||
from app.models.artifact import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url from environment variable
|
||||
if os.getenv("DATABASE_URL"):
|
||||
config.set_main_option("sqlalchemy.url", os.getenv("DATABASE_URL"))
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -1,7 +1,7 @@
|
||||
from fastapi import APIRouter, UploadFile, File, Form, Depends, HTTPException, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Dict
|
||||
import uuid
|
||||
import json
|
||||
import io
|
||||
@@ -36,6 +36,7 @@ async def upload_artifact(
|
||||
test_suite: Optional[str] = Form(None),
|
||||
test_config: Optional[str] = Form(None),
|
||||
test_result: Optional[str] = Form(None),
|
||||
sim_source_id: Optional[str] = Form(None),
|
||||
custom_metadata: Optional[str] = Form(None),
|
||||
description: Optional[str] = Form(None),
|
||||
tags: Optional[str] = Form(None),
|
||||
@@ -51,6 +52,7 @@ async def upload_artifact(
|
||||
- **test_suite**: Test suite identifier
|
||||
- **test_config**: JSON string of test configuration
|
||||
- **test_result**: Test result (pass, fail, skip, error)
|
||||
- **sim_source_id**: SIM source ID to group multiple artifacts
|
||||
- **custom_metadata**: JSON string of additional metadata
|
||||
- **description**: Text description of the artifact
|
||||
- **tags**: JSON array of tags (as string)
|
||||
@@ -88,6 +90,7 @@ async def upload_artifact(
|
||||
test_suite=test_suite,
|
||||
test_config=test_config_dict,
|
||||
test_result=test_result,
|
||||
sim_source_id=sim_source_id,
|
||||
custom_metadata=metadata_dict,
|
||||
description=description,
|
||||
tags=tags_list,
|
||||
@@ -194,6 +197,7 @@ async def query_artifacts(query: ArtifactQuery, db: Session = Depends(get_db)):
|
||||
- **test_name**: Filter by test name
|
||||
- **test_suite**: Filter by test suite
|
||||
- **test_result**: Filter by test result
|
||||
- **sim_source_id**: Filter by SIM source ID
|
||||
- **tags**: Filter by tags (must contain all specified tags)
|
||||
- **start_date**: Filter by creation date (from)
|
||||
- **end_date**: Filter by creation date (to)
|
||||
@@ -212,6 +216,8 @@ async def query_artifacts(query: ArtifactQuery, db: Session = Depends(get_db)):
|
||||
q = q.filter(Artifact.test_suite == query.test_suite)
|
||||
if query.test_result:
|
||||
q = q.filter(Artifact.test_result == query.test_result)
|
||||
if query.sim_source_id:
|
||||
q = q.filter(Artifact.sim_source_id == query.sim_source_id)
|
||||
if query.tags:
|
||||
for tag in query.tags:
|
||||
q = q.filter(Artifact.tags.contains([tag]))
|
||||
@@ -240,3 +246,20 @@ async def list_artifacts(
|
||||
Artifact.created_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
return artifacts
|
||||
|
||||
|
||||
@router.get("/grouped-by-sim-source", response_model=Dict[str, List[ArtifactResponse]])
|
||||
async def get_artifacts_grouped_by_sim_source(
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get all artifacts grouped by SIM source ID"""
|
||||
from collections import defaultdict
|
||||
|
||||
artifacts = db.query(Artifact).order_by(Artifact.created_at.desc()).all()
|
||||
grouped = defaultdict(list)
|
||||
|
||||
for artifact in artifacts:
|
||||
sim_source = artifact.sim_source_id or "ungrouped"
|
||||
grouped[sim_source].append(artifact)
|
||||
|
||||
return dict(grouped)
|
||||
|
||||
@@ -21,6 +21,9 @@ class Artifact(Base):
|
||||
test_config = Column(JSON)
|
||||
test_result = Column(String(50), index=True) # pass, fail, skip, error
|
||||
|
||||
# SIM source grouping - allows multiple artifacts per source
|
||||
sim_source_id = Column(String(100), index=True) # Groups artifacts from same SIM source
|
||||
|
||||
# Additional metadata
|
||||
custom_metadata = Column(JSON)
|
||||
description = Column(Text)
|
||||
|
||||
@@ -8,6 +8,7 @@ class ArtifactCreate(BaseModel):
|
||||
test_suite: Optional[str] = None
|
||||
test_config: Optional[Dict[str, Any]] = None
|
||||
test_result: Optional[str] = None
|
||||
sim_source_id: Optional[str] = None # Groups artifacts from same SIM source
|
||||
custom_metadata: Optional[Dict[str, Any]] = None
|
||||
description: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
@@ -26,6 +27,7 @@ class ArtifactResponse(BaseModel):
|
||||
test_suite: Optional[str] = None
|
||||
test_config: Optional[Dict[str, Any]] = None
|
||||
test_result: Optional[str] = None
|
||||
sim_source_id: Optional[str] = None
|
||||
custom_metadata: Optional[Dict[str, Any]] = None
|
||||
description: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
@@ -44,6 +46,7 @@ class ArtifactQuery(BaseModel):
|
||||
test_name: Optional[str] = None
|
||||
test_suite: Optional[str] = None
|
||||
test_result: Optional[str] = None
|
||||
sim_source_id: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
|
||||
@@ -109,6 +109,18 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group">
|
||||
<label for="sim-source-id">SIM Source ID (for grouping)</label>
|
||||
<input type="text" id="sim-source-id" name="sim_source_id" placeholder="e.g., sim_run_20251015_001">
|
||||
<small>Use same ID for multiple artifacts from same source</small>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="tags">Tags (comma-separated) *</label>
|
||||
<input type="text" id="tags" name="tags" placeholder="e.g., regression, smoke, critical" required>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-row">
|
||||
<div class="form-group">
|
||||
<label for="test-result">Test Result</label>
|
||||
@@ -126,11 +138,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="tags">Tags (comma-separated)</label>
|
||||
<input type="text" id="tags" name="tags" placeholder="e.g., regression, smoke, critical">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="description">Description</label>
|
||||
<textarea id="description" name="description" rows="3" placeholder="Describe this artifact..."></textarea>
|
||||
|
||||
@@ -73,11 +73,12 @@ function displayArtifacts(artifacts) {
|
||||
|
||||
tbody.innerHTML = displayedArtifacts.map(artifact => `
|
||||
<tr>
|
||||
<td>${artifact.test_suite || '-'}</td>
|
||||
<td>${artifact.sim_source_id || artifact.test_suite || '-'}</td>
|
||||
<td>
|
||||
<a href="#" onclick="showDetail(${artifact.id}); return false;" style="color: #60a5fa; text-decoration: none;">
|
||||
${escapeHtml(artifact.filename)}
|
||||
</a>
|
||||
${artifact.tags && artifact.tags.length > 0 ? `<br><div style="margin-top: 5px;">${formatTags(artifact.tags)}</div>` : ''}
|
||||
</td>
|
||||
<td>${formatDate(artifact.created_at)}</td>
|
||||
<td>${artifact.test_name || '-'}</td>
|
||||
@@ -289,9 +290,9 @@ async function uploadArtifact(event) {
|
||||
formData.append('file', fileInput.files[0]);
|
||||
|
||||
// Add optional fields
|
||||
const fields = ['test_name', 'test_suite', 'test_result', 'version', 'description'];
|
||||
const fields = ['test_name', 'test_suite', 'test_result', 'version', 'description', 'sim_source_id'];
|
||||
fields.forEach(field => {
|
||||
const value = form.elements[field].value;
|
||||
const value = form.elements[field]?.value;
|
||||
if (value) formData.append(field, value);
|
||||
});
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ def generate_pcap_content() -> bytes:
|
||||
return bytes(pcap_header)
|
||||
|
||||
|
||||
def create_artifact_data(index: int) -> Dict[str, Any]:
|
||||
def create_artifact_data(index: int, sim_source_id: str = None) -> Dict[str, Any]:
|
||||
"""Generate metadata for an artifact"""
|
||||
test_name = random.choice(TEST_NAMES)
|
||||
test_suite = random.choice(TEST_SUITES)
|
||||
@@ -147,6 +147,7 @@ def create_artifact_data(index: int) -> Dict[str, Any]:
|
||||
"test_name": test_name,
|
||||
"test_suite": test_suite,
|
||||
"test_result": test_result,
|
||||
"sim_source_id": sim_source_id,
|
||||
"tags": artifact_tags,
|
||||
"test_config": test_config,
|
||||
"custom_metadata": custom_metadata,
|
||||
@@ -201,6 +202,9 @@ async def generate_seed_data(num_artifacts: int = 50) -> List[int]:
|
||||
print(f"Deployment mode: {settings.deployment_mode}")
|
||||
print(f"Storage backend: {settings.storage_backend}")
|
||||
|
||||
# Generate some SIM source IDs that will be reused (simulating multiple artifacts per source)
|
||||
sim_sources = [f"sim_run_{uuid.uuid4().hex[:8]}" for _ in range(max(num_artifacts // 3, 1))]
|
||||
|
||||
for i in range(num_artifacts):
|
||||
# Randomly choose file type
|
||||
file_type_choice = random.choice(['csv', 'json', 'binary', 'pcap'])
|
||||
@@ -225,8 +229,11 @@ async def generate_seed_data(num_artifacts: int = 50) -> List[int]:
|
||||
# Upload to storage
|
||||
storage_path = await upload_artifact_to_storage(content, filename)
|
||||
|
||||
# Randomly assign a SIM source ID (70% chance of having one, enabling grouping)
|
||||
sim_source_id = random.choice(sim_sources) if random.random() < 0.7 else None
|
||||
|
||||
# Generate metadata
|
||||
artifact_data = create_artifact_data(i)
|
||||
artifact_data = create_artifact_data(i, sim_source_id)
|
||||
|
||||
# Create database record
|
||||
artifact = Artifact(
|
||||
@@ -239,6 +246,7 @@ async def generate_seed_data(num_artifacts: int = 50) -> List[int]:
|
||||
test_suite=artifact_data["test_suite"],
|
||||
test_config=artifact_data["test_config"],
|
||||
test_result=artifact_data["test_result"],
|
||||
sim_source_id=artifact_data["sim_source_id"],
|
||||
custom_metadata=artifact_data["custom_metadata"],
|
||||
description=artifact_data["description"],
|
||||
tags=artifact_data["tags"],
|
||||
|
||||
Reference in New Issue
Block a user