16 Commits

Author SHA1 Message Date
Mondo Diaz
f6b79a7af0 Add feature branch deployment pipeline
- Add deploy_feature job for ephemeral dev environments
- Use unique identifier (feat-{short_sha}) for K8s resource isolation
- Dynamic hostnames for ingress (orchard-{sha}.common.global.bsf.tools)
- Add cleanup_feature job with on_stop for automatic cleanup on merge
- Add values-dev.yaml with lighter resources for ephemeral deployments
- Refactor deploy_stage to use dynamic image tag from CI
2026-01-13 16:45:48 +00:00
Dane Moss
deda96795b comment out rule block for now 2026-01-13 16:43:49 +00:00
Dane Moss
f555dd6bde try another rule 2026-01-13 16:43:49 +00:00
Dane Moss
36b79485ba update job name 2026-01-13 16:43:49 +00:00
Dane Moss
b58deb4a60 Update .gitlab-ci.yml file 2026-01-13 16:43:49 +00:00
Dane Moss
d3bacfe6b6 Update 2 files
- /helm/orchard/values-stage.yaml
- /.gitlab-ci.yml
2026-01-13 16:43:49 +00:00
Dane Moss
0cc4f25362 Merge branch 'update_changelog' into 'main'
add changelog entry

See merge request esv/bsf/bsf-integration/orchard/orchard-mvp!25
2026-01-12 10:11:50 -07:00
Dane Moss
5c9da9003b add changelog entry 2026-01-12 10:11:50 -07:00
Dane Moss
90bb2a3a39 Merge branch 'feature/auth-system' into 'main'
Implement authentication system with access control UI

Closes #50 and #18

See merge request esv/bsf/bsf-integration/orchard/orchard-mvp!24
2026-01-12 09:52:35 -07:00
Mondo Diaz
617bcbe89c Implement authentication system with access control UI 2026-01-12 09:52:35 -07:00
Mondo Diaz
1cbd335443 Merge branch 'feature/drag-drop-upload' into 'main'
Add drag-and-drop upload component with chunked uploads and offline support

See merge request esv/bsf/bsf-integration/orchard/orchard-mvp!23
2026-01-08 11:59:33 -06:00
Mondo Diaz
10d3694794 Add drag-and-drop upload component with chunked uploads and offline support 2026-01-08 11:59:32 -06:00
Mondo Diaz
bccbc71c13 Merge branch 'feature/download-verification' into 'main'
Add download verification with SHA256 checksum support (#26, #27, #28, #29)

See merge request esv/bsf/bsf-integration/orchard/orchard-mvp!22
2026-01-07 13:36:46 -06:00
Mondo Diaz
35fda65d38 Add download verification with SHA256 checksum support (#26, #27, #28, #29) 2026-01-07 13:36:46 -06:00
Mondo Diaz
08dce6cbb8 Merge branch 'feature/audit-history-api' into 'main'
Metadata database tracks all uploads with project, package, tag, and timestamp queryable via API

See merge request esv/bsf/bsf-integration/orchard/orchard-mvp!21
2026-01-07 12:31:46 -06:00
Mondo Diaz
2f1891cf01 Metadata database tracks all uploads with project, package, tag, and timestamp queryable via API 2026-01-07 12:31:44 -06:00
70 changed files with 22813 additions and 2263 deletions

View File

@@ -49,3 +49,95 @@ python_tests:
# - |
# helm upgrade --install orchard-dev ./helm/orchard --namespace $NAMESPACE -f $VALUES_FILE
.deploy_template: &deploy_template
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
before_script:
- helm version
- helm repo add stable https://charts.helm.sh/stable
- helm repo add bitnami https://charts.bitnami.com/bitnami
- cd helm/orchard
- helm dependency update
- helm repo update
script:
- echo "Deploying to $ENV environment in namespace $NAMESPACE using chart $VALUES_FILE with agent $AGENT"
- helm upgrade --install orchard-$ENV ./helm/orchard --namespace $NAMESPACE -f $VALUES_FILE --set image.tag=$IMAGE
environment:
name: $ENV
kubernetes:
agent: $AGENT
# Deploy to stage (main branch)
deploy_stage:
stage: deploy
variables:
ENV: stage
NAMESPACE: orch-stage-namespace
VALUES_FILE: "helm/orchard/values-stage.yaml"
AGENT: orchard-stage
IMAGE: $CI_COMMIT_SHA
rules:
- if: '$CI_COMMIT_BRANCH == "main"'
when: always
<<: *deploy_template
# Deploy feature branch to dev namespace
deploy_feature:
stage: deploy
variables:
FEATURE_ID: feat-$CI_COMMIT_SHORT_SHA
ENV: feat-$CI_COMMIT_SHORT_SHA
NAMESPACE: orch-dev-namespace
VALUES_FILE: "helm/orchard/values-dev.yaml"
AGENT: orchard-dev
IMAGE: $CI_COMMIT_SHA
FEATURE_HOST: orchard-$CI_COMMIT_SHORT_SHA.common.global.bsf.tools
MINIO_HOST: minio-$CI_COMMIT_SHORT_SHA.common.global.bsf.tools
before_script:
- helm version
- helm repo add stable https://charts.helm.sh/stable
- helm repo add bitnami https://charts.bitnami.com/bitnami
- cd helm/orchard
- helm dependency update
- helm repo update
script:
- echo "Deploying feature branch to $ENV environment"
- |
helm upgrade --install orchard-$FEATURE_ID ./helm/orchard \
--namespace $NAMESPACE \
-f $VALUES_FILE \
--set image.tag=$IMAGE \
--set ingress.hosts[0].host=$FEATURE_HOST \
--set ingress.tls[0].hosts[0]=$FEATURE_HOST \
--set ingress.tls[0].secretName=orchard-$FEATURE_ID-tls \
--set minioIngress.host=$MINIO_HOST \
--set minioIngress.tls.secretName=minio-$FEATURE_ID-tls
environment:
name: review/$CI_COMMIT_REF_SLUG
url: https://orchard-$CI_COMMIT_SHORT_SHA.common.global.bsf.tools
on_stop: cleanup_feature
kubernetes:
agent: $AGENT
rules:
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
when: always
# Cleanup feature branch deployment
cleanup_feature:
stage: deploy
variables:
FEATURE_ID: feat-$CI_COMMIT_SHORT_SHA
NAMESPACE: orch-dev-namespace
AGENT: orchard-dev
image: deps.global.bsf.tools/registry-1.docker.io/alpine/k8s:1.29.12
script:
- echo "Cleaning up feature deployment orchard-$FEATURE_ID"
- helm uninstall orchard-$FEATURE_ID --namespace $NAMESPACE || true
environment:
name: review/$CI_COMMIT_REF_SLUG
action: stop
kubernetes:
agent: $AGENT
rules:
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
when: manual
allow_failure: true

View File

@@ -7,6 +7,147 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Added
- Added GitLab CI pipeline for feature branch deployments to dev namespace (#51)
- Added `deploy_feature` job with dynamic hostnames and unique release names (#51)
- Added `cleanup_feature` job with `on_stop` for automatic cleanup on merge (#51)
- Added `values-dev.yaml` Helm values for lightweight ephemeral environments (#51)
## [0.4.0] - 2026-01-12
### Added
- Added user authentication system with session-based login (#50)
- `users` table with password hashing (bcrypt), admin flag, active status
- `sessions` table for web login sessions (24-hour expiry)
- `auth_settings` table for future OIDC configuration
- Default admin user created on first boot (username: admin, password: admin)
- Added auth API endpoints (#50)
- `POST /api/v1/auth/login` - Login with username/password
- `POST /api/v1/auth/logout` - Logout and clear session
- `GET /api/v1/auth/me` - Get current user info
- `POST /api/v1/auth/change-password` - Change own password
- Added API key management with user ownership (#50)
- `POST /api/v1/auth/keys` - Create API key (format: `orch_<random>`)
- `GET /api/v1/auth/keys` - List user's API keys
- `DELETE /api/v1/auth/keys/{id}` - Revoke API key
- Added `owner_id`, `scopes`, `description` columns to `api_keys` table
- Added admin user management endpoints (#50)
- `GET /api/v1/admin/users` - List all users
- `POST /api/v1/admin/users` - Create user
- `GET /api/v1/admin/users/{username}` - Get user details
- `PUT /api/v1/admin/users/{username}` - Update user (admin/active status)
- `POST /api/v1/admin/users/{username}/reset-password` - Reset password
- Added `auth.py` module with AuthService class and FastAPI dependencies (#50)
- Added auth schemas: LoginRequest, LoginResponse, UserResponse, APIKeyResponse (#50)
- Added migration `006_auth_tables.sql` for auth database tables (#50)
- Added frontend Login page with session management (#50)
- Added frontend API Keys management page (#50)
- Added frontend Admin Users page (admin-only) (#50)
- Added AuthContext for frontend session state (#50)
- Added user menu to Layout header with login/logout (#50)
- Added 15 integration tests for auth system (#50)
- Added reusable `DragDropUpload` component for artifact uploads (#8)
- Drag-and-drop file selection with visual feedback
- Click-to-browse fallback
- Multiple file upload support with queue management
- Real-time progress indicators with speed and ETA
- File type and size validation (configurable)
- Concurrent upload handling (configurable max concurrent)
- Automatic retry with exponential backoff for network errors
- Individual file status (pending, uploading, complete, failed)
- Retry and remove actions per file
- Auto-dismiss success messages after 5 seconds
- Integrated DragDropUpload into PackagePage replacing basic file input (#8)
- Added frontend testing infrastructure with Vitest and React Testing Library (#14)
- Configured Vitest for React/TypeScript with jsdom
- Added 24 unit tests for DragDropUpload component
- Tests cover: rendering, drag-drop events, file validation, upload queue, progress, errors
- Added chunked upload support for large files (#9)
- Files >100MB automatically use chunked upload API (10MB chunks)
- Client-side SHA256 hash computation via Web Crypto API
- localStorage persistence for resume after browser close
- Deduplication check at upload init phase
- Added offline detection and network resilience (#12)
- Automatic pause when browser goes offline
- Auto-resume when connection restored
- Offline banner UI with status message
- XHR abort on network loss to prevent hung requests
- Added download by artifact ID feature (#10)
- Direct artifact ID input field on package page
- Hex-only input validation with character count
- File size and filename displayed in tag list
- Added backend security tests (#15)
- Path traversal prevention tests for upload/download
- Malformed request handling tests
- Checksum validation tests
- 10 new security-focused integration tests
- Added download verification with `verify` and `verify_mode` query parameters (#26)
- `?verify=true&verify_mode=pre` - Pre-verification: verify before streaming (guaranteed no corrupt data)
- `?verify=true&verify_mode=stream` - Streaming verification: verify while streaming (logs error if mismatch)
- Added checksum response headers to all download endpoints (#27)
- `X-Checksum-SHA256` - SHA256 hash of the artifact
- `X-Content-Length` - File size in bytes
- `X-Checksum-MD5` - MD5 hash (if available)
- `ETag` - Artifact ID (SHA256)
- `Digest` - RFC 3230 format sha-256 hash (base64)
- `X-Verified` - Verification status (true/false/pending)
- Added `checksum.py` module with SHA256 utilities (#26)
- `compute_sha256()` and `compute_sha256_stream()` functions
- `HashingStreamWrapper` for incremental hash computation
- `VerifyingStreamWrapper` for stream verification
- `verify_checksum()` and `verify_checksum_strict()` functions
- `ChecksumMismatchError` exception with context
- Added `get_verified()` and `get_stream_verified()` methods to storage layer (#26)
- Added `logging_config.py` module with structured logging (#28)
- JSON logging format for production
- Request ID tracking via context variables
- Verification failure logging with full context
- Added `log_level` and `log_format` settings to configuration (#28)
- Added 62 unit tests for checksum utilities and verification (#29)
- Added 17 integration tests for download verification API (#29)
- Added global artifacts endpoint `GET /api/v1/artifacts` with project/package/tag/size/date filters (#18)
- Added global tags endpoint `GET /api/v1/tags` with project/package/search/date filters (#18)
- Added wildcard pattern matching (`*`) for tag filters across all endpoints (#18)
- Added comma-separated multi-value support for tag filters (#18)
- Added `search` parameter to `/api/v1/uploads` for filename search (#18)
- Added `tag` filter to `/api/v1/uploads` endpoint (#18)
- Added `sort` and `order` parameters to `/api/v1/uploads` endpoint (#18)
- Added `min_size` and `max_size` filters to package artifacts endpoint (#18)
- Added `sort` and `order` parameters to package artifacts endpoint (#18)
- Added `from` and `to` date filters to package tags endpoint (#18)
- Added `GlobalArtifactResponse` and `GlobalTagResponse` schemas (#18)
- Added S3 object verification before database commit during upload (#19)
- Added S3 object cleanup on database commit failure (#19)
- Added upload duration tracking (`duration_ms` field) (#19)
- Added `User-Agent` header capture during uploads (#19)
- Added `X-Checksum-SHA256` header support for client-side checksum verification (#19)
- Added `status`, `error_message`, `client_checksum` columns to uploads table (#19)
- Added `upload_locks` table for future concurrent upload conflict detection (#19)
- Added consistency check endpoint `GET /api/v1/admin/consistency-check` (#19)
- Added `PUT /api/v1/projects/{project}` endpoint for project updates with audit logging (#20)
- Added `PUT /api/v1/project/{project}/packages/{package}` endpoint for package updates with audit logging (#20)
- Added `artifact.download` audit logging to download endpoint (#20)
- Added `ProjectHistory` and `PackageHistory` models with database triggers (#20)
- Added migration `004_history_tables.sql` for project/package history (#20)
- Added migration `005_upload_enhancements.sql` for upload status tracking (#19)
- Added 9 integration tests for global artifacts/tags endpoints (#18)
- Added global uploads query endpoint `GET /api/v1/uploads` with project/package/user/date filters (#18)
- Added project-level uploads endpoint `GET /api/v1/project/{project}/uploads` (#18)
- Added `has_more` field to pagination metadata for easier pagination UI (#18)
- Added `upload_id`, `content_type`, `original_name`, `created_at` fields to upload response (#19)
- Added audit log API endpoints with filtering and pagination (#20)
- `GET /api/v1/audit-logs` - list all audit logs with action/resource/user/date filters
- `GET /api/v1/projects/{project}/audit-logs` - project-scoped audit logs
- `GET /api/v1/project/{project}/{package}/audit-logs` - package-scoped audit logs
- Added upload history API endpoints (#20)
- `GET /api/v1/project/{project}/{package}/uploads` - list upload events for a package
- `GET /api/v1/artifact/{id}/uploads` - list all uploads of a specific artifact
- Added artifact provenance endpoint `GET /api/v1/artifact/{id}/history` (#20)
- Returns full artifact history including packages, tags, and upload events
- Added audit logging for project.create, package.create, tag.create, tag.update, artifact.upload actions (#20)
- Added `AuditLogResponse`, `UploadHistoryResponse`, `ArtifactProvenanceResponse` schemas (#20)
- Added `TagHistoryDetailResponse` schema with artifact metadata (#20)
- Added 31 integration tests for audit log, history, and upload query endpoints (#22)
### Changed
- Standardized audit action naming to `{entity}.{action}` pattern (project.delete, package.delete, tag.delete) (#20)
- Added `StorageBackend` protocol/interface for backend-agnostic storage (#33)
- Added `health_check()` method to storage backend with `/health` endpoint integration (#33)
- Added `verify_integrity()` method for post-upload hash validation (#33)

1208
backend/app/auth.py Normal file

File diff suppressed because it is too large Load Diff

477
backend/app/checksum.py Normal file
View File

@@ -0,0 +1,477 @@
"""
Checksum utilities for download verification.
This module provides functions and classes for computing and verifying
SHA256 checksums during artifact downloads.
Key components:
- compute_sha256(): Compute SHA256 of bytes content
- compute_sha256_stream(): Compute SHA256 from an iterable stream
- HashingStreamWrapper: Wrapper that computes hash while streaming
- VerifyingStreamWrapper: Wrapper that verifies hash after streaming
- verify_checksum(): Verify content against expected hash
- ChecksumMismatchError: Exception for verification failures
"""
import hashlib
import logging
import re
import base64
from typing import (
Generator,
Optional,
Any,
Callable,
)
logger = logging.getLogger(__name__)
# Default chunk size for streaming operations (8KB)
DEFAULT_CHUNK_SIZE = 8 * 1024
# Regex pattern for valid SHA256 hash (64 hex characters)
SHA256_PATTERN = re.compile(r"^[a-fA-F0-9]{64}$")
class ChecksumError(Exception):
"""Base exception for checksum operations."""
pass
class ChecksumMismatchError(ChecksumError):
"""
Raised when computed checksum does not match expected checksum.
Attributes:
expected: The expected SHA256 hash
actual: The actual computed SHA256 hash
artifact_id: Optional artifact ID for context
s3_key: Optional S3 key for debugging
size: Optional file size
"""
def __init__(
self,
expected: str,
actual: str,
artifact_id: Optional[str] = None,
s3_key: Optional[str] = None,
size: Optional[int] = None,
message: Optional[str] = None,
):
self.expected = expected
self.actual = actual
self.artifact_id = artifact_id
self.s3_key = s3_key
self.size = size
if message:
self.message = message
else:
self.message = (
f"Checksum verification failed: "
f"expected {expected[:16]}..., got {actual[:16]}..."
)
super().__init__(self.message)
def to_dict(self) -> dict:
"""Convert to dictionary for logging/API responses."""
return {
"error": "checksum_mismatch",
"expected": self.expected,
"actual": self.actual,
"artifact_id": self.artifact_id,
"s3_key": self.s3_key,
"size": self.size,
"message": self.message,
}
class InvalidHashFormatError(ChecksumError):
"""Raised when a hash string is not valid SHA256 format."""
def __init__(self, hash_value: str):
self.hash_value = hash_value
message = f"Invalid SHA256 hash format: '{hash_value[:32]}...'"
super().__init__(message)
def is_valid_sha256(hash_value: str) -> bool:
"""
Check if a string is a valid SHA256 hash (64 hex characters).
Args:
hash_value: String to validate
Returns:
True if valid SHA256 format, False otherwise
"""
if not hash_value:
return False
return bool(SHA256_PATTERN.match(hash_value))
def compute_sha256(content: bytes) -> str:
"""
Compute SHA256 hash of bytes content.
Args:
content: Bytes content to hash
Returns:
Lowercase hexadecimal SHA256 hash (64 characters)
Raises:
ChecksumError: If hash computation fails
"""
if content is None:
raise ChecksumError("Cannot compute hash of None content")
try:
return hashlib.sha256(content).hexdigest().lower()
except Exception as e:
raise ChecksumError(f"Hash computation failed: {e}") from e
def compute_sha256_stream(
stream: Any,
chunk_size: int = DEFAULT_CHUNK_SIZE,
) -> str:
"""
Compute SHA256 hash from a stream or file-like object.
Reads the stream in chunks to minimize memory usage for large files.
Args:
stream: Iterator yielding bytes or file-like object with read()
chunk_size: Size of chunks to read (default 8KB)
Returns:
Lowercase hexadecimal SHA256 hash (64 characters)
Raises:
ChecksumError: If hash computation fails
"""
try:
hasher = hashlib.sha256()
# Handle file-like objects with read()
if hasattr(stream, "read"):
while True:
chunk = stream.read(chunk_size)
if not chunk:
break
hasher.update(chunk)
else:
# Handle iterators
for chunk in stream:
if chunk:
hasher.update(chunk)
return hasher.hexdigest().lower()
except Exception as e:
raise ChecksumError(f"Stream hash computation failed: {e}") from e
def verify_checksum(content: bytes, expected: str) -> bool:
"""
Verify that content matches expected SHA256 hash.
Args:
content: Bytes content to verify
expected: Expected SHA256 hash (case-insensitive)
Returns:
True if hash matches, False otherwise
Raises:
InvalidHashFormatError: If expected hash is not valid format
ChecksumError: If hash computation fails
"""
if not is_valid_sha256(expected):
raise InvalidHashFormatError(expected)
actual = compute_sha256(content)
return actual == expected.lower()
def verify_checksum_strict(
content: bytes,
expected: str,
artifact_id: Optional[str] = None,
s3_key: Optional[str] = None,
) -> None:
"""
Verify content matches expected hash, raising exception on mismatch.
Args:
content: Bytes content to verify
expected: Expected SHA256 hash (case-insensitive)
artifact_id: Optional artifact ID for error context
s3_key: Optional S3 key for error context
Raises:
InvalidHashFormatError: If expected hash is not valid format
ChecksumMismatchError: If verification fails
ChecksumError: If hash computation fails
"""
if not is_valid_sha256(expected):
raise InvalidHashFormatError(expected)
actual = compute_sha256(content)
if actual != expected.lower():
raise ChecksumMismatchError(
expected=expected.lower(),
actual=actual,
artifact_id=artifact_id,
s3_key=s3_key,
size=len(content),
)
def sha256_to_base64(hex_hash: str) -> str:
"""
Convert SHA256 hex string to base64 encoding (for RFC 3230 Digest header).
Args:
hex_hash: SHA256 hash as 64-character hex string
Returns:
Base64-encoded hash string
"""
if not is_valid_sha256(hex_hash):
raise InvalidHashFormatError(hex_hash)
hash_bytes = bytes.fromhex(hex_hash)
return base64.b64encode(hash_bytes).decode("ascii")
class HashingStreamWrapper:
"""
Wrapper that computes SHA256 hash incrementally as chunks are read.
This allows computing the hash while streaming content to a client,
without buffering the entire content in memory.
Usage:
wrapper = HashingStreamWrapper(stream)
for chunk in wrapper:
send_to_client(chunk)
final_hash = wrapper.get_hash()
Attributes:
chunk_size: Size of chunks to yield
bytes_read: Total bytes processed so far
"""
def __init__(
self,
stream: Any,
chunk_size: int = DEFAULT_CHUNK_SIZE,
):
"""
Initialize the hashing stream wrapper.
Args:
stream: Source stream (iterator, file-like, or S3 StreamingBody)
chunk_size: Size of chunks to yield (default 8KB)
"""
self._stream = stream
self._hasher = hashlib.sha256()
self._chunk_size = chunk_size
self._bytes_read = 0
self._finalized = False
self._final_hash: Optional[str] = None
@property
def bytes_read(self) -> int:
"""Total bytes read so far."""
return self._bytes_read
@property
def chunk_size(self) -> int:
"""Chunk size for reading."""
return self._chunk_size
def __iter__(self) -> Generator[bytes, None, None]:
"""Iterate over chunks, computing hash as we go."""
# Handle S3 StreamingBody (has iter_chunks)
if hasattr(self._stream, "iter_chunks"):
for chunk in self._stream.iter_chunks(chunk_size=self._chunk_size):
if chunk:
self._hasher.update(chunk)
self._bytes_read += len(chunk)
yield chunk
# Handle file-like objects with read()
elif hasattr(self._stream, "read"):
while True:
chunk = self._stream.read(self._chunk_size)
if not chunk:
break
self._hasher.update(chunk)
self._bytes_read += len(chunk)
yield chunk
# Handle iterators
else:
for chunk in self._stream:
if chunk:
self._hasher.update(chunk)
self._bytes_read += len(chunk)
yield chunk
self._finalized = True
self._final_hash = self._hasher.hexdigest().lower()
def get_hash(self) -> str:
"""
Get the computed SHA256 hash.
If stream hasn't been fully consumed, consumes remaining chunks.
Returns:
Lowercase hexadecimal SHA256 hash
"""
if not self._finalized:
# Consume remaining stream
for _ in self:
pass
return self._final_hash or self._hasher.hexdigest().lower()
def get_hash_if_complete(self) -> Optional[str]:
"""
Get hash only if stream has been fully consumed.
Returns:
Hash if complete, None otherwise
"""
if self._finalized:
return self._final_hash
return None
class VerifyingStreamWrapper:
"""
Wrapper that yields chunks and verifies hash after streaming completes.
IMPORTANT: Because HTTP streams cannot be "un-sent", if verification
fails after streaming, the client has already received potentially
corrupt data. This wrapper logs an error but cannot prevent delivery.
For guaranteed verification before delivery, use pre-verification mode
which buffers the entire content first.
Usage:
wrapper = VerifyingStreamWrapper(stream, expected_hash)
for chunk in wrapper:
send_to_client(chunk)
wrapper.verify() # Raises ChecksumMismatchError if failed
"""
def __init__(
self,
stream: Any,
expected_hash: str,
artifact_id: Optional[str] = None,
s3_key: Optional[str] = None,
chunk_size: int = DEFAULT_CHUNK_SIZE,
on_failure: Optional[Callable[[Any], None]] = None,
):
"""
Initialize the verifying stream wrapper.
Args:
stream: Source stream
expected_hash: Expected SHA256 hash to verify against
artifact_id: Optional artifact ID for error context
s3_key: Optional S3 key for error context
chunk_size: Size of chunks to yield
on_failure: Optional callback called on verification failure
"""
if not is_valid_sha256(expected_hash):
raise InvalidHashFormatError(expected_hash)
self._hashing_wrapper = HashingStreamWrapper(stream, chunk_size)
self._expected_hash = expected_hash.lower()
self._artifact_id = artifact_id
self._s3_key = s3_key
self._on_failure = on_failure
self._verified: Optional[bool] = None
@property
def bytes_read(self) -> int:
"""Total bytes read so far."""
return self._hashing_wrapper.bytes_read
@property
def is_verified(self) -> Optional[bool]:
"""
Verification status.
Returns:
True if verified successfully, False if failed, None if not yet complete
"""
return self._verified
def __iter__(self) -> Generator[bytes, None, None]:
"""Iterate over chunks."""
yield from self._hashing_wrapper
def verify(self) -> bool:
"""
Verify the hash after stream is complete.
Must be called after fully consuming the iterator.
Returns:
True if verification passed
Raises:
ChecksumMismatchError: If verification failed
"""
actual_hash = self._hashing_wrapper.get_hash()
if actual_hash == self._expected_hash:
self._verified = True
logger.debug(
f"Verification passed for {self._artifact_id or 'unknown'}: {actual_hash[:16]}..."
)
return True
self._verified = False
error = ChecksumMismatchError(
expected=self._expected_hash,
actual=actual_hash,
artifact_id=self._artifact_id,
s3_key=self._s3_key,
size=self._hashing_wrapper.bytes_read,
)
# Log the failure
logger.error(f"Checksum verification FAILED after streaming: {error.to_dict()}")
# Call failure callback if provided
if self._on_failure:
try:
self._on_failure(error)
except Exception as e:
logger.warning(f"Verification failure callback raised exception: {e}")
raise error
def verify_silent(self) -> bool:
"""
Verify the hash without raising exception.
Returns:
True if verification passed, False otherwise
"""
try:
return self.verify()
except ChecksumMismatchError:
return False
def get_actual_hash(self) -> Optional[str]:
"""Get the actual computed hash (only available after iteration)."""
return self._hashing_wrapper.get_hash_if_complete()

View File

@@ -25,6 +25,7 @@ class Settings(BaseSettings):
database_pool_recycle: int = (
1800 # Recycle connections after this many seconds (30 min)
)
database_query_timeout: int = 30 # Query timeout in seconds (0 = no timeout)
# S3
s3_endpoint: str = ""
@@ -48,6 +49,21 @@ class Settings(BaseSettings):
3600 # Presigned URL expiry in seconds (default: 1 hour)
)
# Logging settings
log_level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
log_format: str = "auto" # "json", "standard", or "auto" (json in production)
# JWT Authentication settings (optional, for external identity providers)
jwt_enabled: bool = False # Enable JWT token validation
jwt_secret: str = "" # Secret key for HS256, or leave empty for RS256 with JWKS
jwt_algorithm: str = "HS256" # HS256 or RS256
jwt_issuer: str = "" # Expected issuer (iss claim), leave empty to skip validation
jwt_audience: str = "" # Expected audience (aud claim), leave empty to skip validation
jwt_jwks_url: str = "" # JWKS URL for RS256 (e.g., https://auth.example.com/.well-known/jwks.json)
jwt_username_claim: str = (
"sub" # JWT claim to use as username (sub, email, preferred_username, etc.)
)
@property
def database_url(self) -> str:
sslmode = f"?sslmode={self.database_sslmode}" if self.database_sslmode else ""

View File

@@ -12,6 +12,12 @@ from .models import Base
settings = get_settings()
logger = logging.getLogger(__name__)
# Build connect_args with query timeout if configured
connect_args = {}
if settings.database_query_timeout > 0:
# PostgreSQL statement_timeout is in milliseconds
connect_args["options"] = f"-c statement_timeout={settings.database_query_timeout * 1000}"
# Create engine with connection pool configuration
engine = create_engine(
settings.database_url,
@@ -21,6 +27,7 @@ engine = create_engine(
max_overflow=settings.database_max_overflow,
pool_timeout=settings.database_pool_timeout,
pool_recycle=settings.database_pool_recycle,
connect_args=connect_args,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

View File

@@ -0,0 +1,254 @@
"""
Structured logging configuration for Orchard.
This module provides:
- Structured JSON logging for production environments
- Request tracing via X-Request-ID header
- Verification failure logging with context
- Configurable log levels via environment
Usage:
from app.logging_config import setup_logging, get_request_id
setup_logging() # Call once at app startup
request_id = get_request_id() # Get current request's ID
"""
import logging
import json
import sys
import uuid
from datetime import datetime, timezone
from typing import Optional, Any, Dict
from contextvars import ContextVar
from .config import get_settings
# Context variable for request ID (thread-safe)
_request_id_var: ContextVar[Optional[str]] = ContextVar("request_id", default=None)
def get_request_id() -> Optional[str]:
"""Get the current request's ID from context."""
return _request_id_var.get()
def set_request_id(request_id: Optional[str] = None) -> str:
"""
Set the request ID for the current context.
If no ID provided, generates a new UUID.
Returns the request ID that was set.
"""
if request_id is None:
request_id = str(uuid.uuid4())
_request_id_var.set(request_id)
return request_id
def clear_request_id():
"""Clear the request ID from context."""
_request_id_var.set(None)
class JSONFormatter(logging.Formatter):
"""
JSON log formatter for structured logging.
Output format:
{
"timestamp": "2025-01-01T00:00:00.000Z",
"level": "INFO",
"logger": "app.routes",
"message": "Request completed",
"request_id": "abc-123",
"extra": {...}
}
"""
def format(self, record: logging.LogRecord) -> str:
log_entry: Dict[str, Any] = {
"timestamp": datetime.now(timezone.utc).isoformat(),
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
}
# Add request ID if available
request_id = get_request_id()
if request_id:
log_entry["request_id"] = request_id
# Add exception info if present
if record.exc_info:
log_entry["exception"] = self.formatException(record.exc_info)
# Add extra fields from record
extra_fields: Dict[str, Any] = {}
for key, value in record.__dict__.items():
if key not in (
"name",
"msg",
"args",
"created",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"pathname",
"process",
"processName",
"relativeCreated",
"stack_info",
"exc_info",
"exc_text",
"thread",
"threadName",
"message",
"asctime",
):
try:
json.dumps(value) # Ensure serializable
extra_fields[key] = value
except (TypeError, ValueError):
extra_fields[key] = str(value)
if extra_fields:
log_entry["extra"] = extra_fields
return json.dumps(log_entry)
class StandardFormatter(logging.Formatter):
"""
Standard log formatter for development.
Output format:
[2025-01-01 00:00:00] INFO [app.routes] [req-abc123] Request completed
"""
def format(self, record: logging.LogRecord) -> str:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
request_id = get_request_id()
req_str = f" [req-{request_id[:8]}]" if request_id else ""
base_msg = f"[{timestamp}] {record.levelname:5} [{record.name}]{req_str} {record.getMessage()}"
if record.exc_info:
base_msg += "\n" + self.formatException(record.exc_info)
return base_msg
def setup_logging(log_level: Optional[str] = None, json_format: Optional[bool] = None):
"""
Configure logging for the application.
Args:
log_level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL).
Defaults to ORCHARD_LOG_LEVEL env var or INFO.
json_format: Use JSON format. Defaults to True in production.
"""
settings = get_settings()
# Determine log level
if log_level is None:
log_level = getattr(settings, "log_level", "INFO")
effective_level = log_level if log_level else "INFO"
level = getattr(logging, effective_level.upper(), logging.INFO)
# Determine format
if json_format is None:
json_format = settings.is_production
# Create handler
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
# Set formatter
if json_format:
handler.setFormatter(JSONFormatter())
else:
handler.setFormatter(StandardFormatter())
# Configure root logger
root_logger = logging.getLogger()
root_logger.setLevel(level)
# Remove existing handlers
root_logger.handlers.clear()
root_logger.addHandler(handler)
# Configure specific loggers
for logger_name in ["app", "uvicorn", "uvicorn.access", "uvicorn.error"]:
logger = logging.getLogger(logger_name)
logger.setLevel(level)
logger.handlers.clear()
logger.addHandler(handler)
logger.propagate = False
# Quiet down noisy loggers
logging.getLogger("botocore").setLevel(logging.WARNING)
logging.getLogger("boto3").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
def log_verification_failure(
logger: logging.Logger,
expected_hash: str,
actual_hash: str,
artifact_id: Optional[str] = None,
s3_key: Optional[str] = None,
project: Optional[str] = None,
package: Optional[str] = None,
size: Optional[int] = None,
user_id: Optional[str] = None,
source_ip: Optional[str] = None,
verification_mode: Optional[str] = None,
):
"""
Log a verification failure with full context.
This creates a structured log entry with all relevant details
for debugging and alerting.
"""
logger.error(
"Checksum verification failed",
extra={
"event": "verification_failure",
"expected_hash": expected_hash,
"actual_hash": actual_hash,
"artifact_id": artifact_id,
"s3_key": s3_key,
"project": project,
"package": package,
"size": size,
"user_id": user_id,
"source_ip": source_ip,
"verification_mode": verification_mode,
"hash_match": expected_hash == actual_hash,
},
)
def log_verification_success(
logger: logging.Logger,
artifact_id: str,
size: Optional[int] = None,
verification_mode: Optional[str] = None,
duration_ms: Optional[float] = None,
):
"""Log a successful verification."""
logger.info(
f"Verification passed for artifact {artifact_id[:16]}...",
extra={
"event": "verification_success",
"artifact_id": artifact_id,
"size": size,
"verification_mode": verification_mode,
"duration_ms": duration_ms,
},
)

View File

@@ -1,14 +1,19 @@
from fastapi import FastAPI
from fastapi import FastAPI, Request
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from contextlib import asynccontextmanager
import logging
import os
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from .config import get_settings
from .database import init_db, SessionLocal
from .routes import router
from .seed import seed_database
from .auth import create_default_admin
from .rate_limit import limiter
settings = get_settings()
logging.basicConfig(level=logging.INFO)
@@ -20,6 +25,18 @@ async def lifespan(app: FastAPI):
# Startup: initialize database
init_db()
# Create default admin user if no users exist
db = SessionLocal()
try:
admin = create_default_admin(db)
if admin:
logger.warning(
"Default admin user created with username 'admin' and password 'changeme123'. "
"CHANGE THIS PASSWORD IMMEDIATELY!"
)
finally:
db.close()
# Seed test data in development mode
if settings.is_development:
logger.info(f"Running in {settings.env} mode - checking for seed data")
@@ -42,13 +59,21 @@ app = FastAPI(
lifespan=lifespan,
)
# Set up rate limiting
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
# Include API routes
app.include_router(router)
# Serve static files (React build) if the directory exists
static_dir = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "dist")
if os.path.exists(static_dir):
app.mount("/assets", StaticFiles(directory=os.path.join(static_dir, "assets")), name="assets")
app.mount(
"/assets",
StaticFiles(directory=os.path.join(static_dir, "assets")),
name="assets",
)
@app.get("/")
async def serve_spa():
@@ -60,6 +85,7 @@ if os.path.exists(static_dir):
# Don't catch API routes or health endpoint
if full_path.startswith("api/") or full_path.startswith("health"):
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Not found")
# Serve SPA for all other routes (including /project/*)
@@ -68,4 +94,5 @@ if os.path.exists(static_dir):
return FileResponse(index_path)
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Not found")

View File

@@ -1,8 +1,17 @@
from datetime import datetime
from typing import Optional
from sqlalchemy import (
Column, String, Text, Boolean, Integer, BigInteger,
DateTime, ForeignKey, CheckConstraint, Index, JSON
Column,
String,
Text,
Boolean,
Integer,
BigInteger,
DateTime,
ForeignKey,
CheckConstraint,
Index,
JSON,
ARRAY,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship, declarative_base
@@ -19,11 +28,17 @@ class Project(Base):
description = Column(Text)
is_public = Column(Boolean, default=True)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
created_by = Column(String(255), nullable=False)
packages = relationship("Package", back_populates="project", cascade="all, delete-orphan")
permissions = relationship("AccessPermission", back_populates="project", cascade="all, delete-orphan")
packages = relationship(
"Package", back_populates="project", cascade="all, delete-orphan"
)
permissions = relationship(
"AccessPermission", back_populates="project", cascade="all, delete-orphan"
)
__table_args__ = (
Index("idx_projects_name", "name"),
@@ -35,32 +50,44 @@ class Package(Base):
__tablename__ = "packages"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
project_id = Column(UUID(as_uuid=True), ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
project_id = Column(
UUID(as_uuid=True),
ForeignKey("projects.id", ondelete="CASCADE"),
nullable=False,
)
name = Column(String(255), nullable=False)
description = Column(Text)
format = Column(String(50), default="generic", nullable=False)
platform = Column(String(50), default="any", nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
project = relationship("Project", back_populates="packages")
tags = relationship("Tag", back_populates="package", cascade="all, delete-orphan")
uploads = relationship("Upload", back_populates="package", cascade="all, delete-orphan")
consumers = relationship("Consumer", back_populates="package", cascade="all, delete-orphan")
uploads = relationship(
"Upload", back_populates="package", cascade="all, delete-orphan"
)
consumers = relationship(
"Consumer", back_populates="package", cascade="all, delete-orphan"
)
__table_args__ = (
Index("idx_packages_project_id", "project_id"),
Index("idx_packages_name", "name"),
Index("idx_packages_format", "format"),
Index("idx_packages_platform", "platform"),
Index("idx_packages_project_name", "project_id", "name", unique=True), # Composite unique index
Index(
"idx_packages_project_name", "project_id", "name", unique=True
), # Composite unique index
CheckConstraint(
"format IN ('generic', 'npm', 'pypi', 'docker', 'deb', 'rpm', 'maven', 'nuget', 'helm')",
name="check_package_format"
name="check_package_format",
),
CheckConstraint(
"platform IN ('any', 'linux', 'darwin', 'windows', 'linux-amd64', 'linux-arm64', 'darwin-amd64', 'darwin-arm64', 'windows-amd64')",
name="check_package_platform"
name="check_package_platform",
),
{"extend_existing": True},
)
@@ -76,7 +103,9 @@ class Artifact(Base):
checksum_md5 = Column(String(32)) # MD5 hash for additional verification
checksum_sha1 = Column(String(40)) # SHA1 hash for compatibility
s3_etag = Column(String(64)) # S3 ETag for verification
artifact_metadata = Column("metadata", JSON, default=dict) # Format-specific metadata (column name is 'metadata')
artifact_metadata = Column(
"metadata", JSON, default=dict
) # Format-specific metadata (column name is 'metadata')
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
created_by = Column(String(255), nullable=False)
ref_count = Column(Integer, default=1)
@@ -113,22 +142,34 @@ class Tag(Base):
__tablename__ = "tags"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id", ondelete="CASCADE"), nullable=False)
package_id = Column(
UUID(as_uuid=True),
ForeignKey("packages.id", ondelete="CASCADE"),
nullable=False,
)
name = Column(String(255), nullable=False)
artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
created_by = Column(String(255), nullable=False)
package = relationship("Package", back_populates="tags")
artifact = relationship("Artifact", back_populates="tags")
history = relationship("TagHistory", back_populates="tag", cascade="all, delete-orphan")
history = relationship(
"TagHistory", back_populates="tag", cascade="all, delete-orphan"
)
__table_args__ = (
Index("idx_tags_package_id", "package_id"),
Index("idx_tags_artifact_id", "artifact_id"),
Index("idx_tags_package_name", "package_id", "name", unique=True), # Composite unique index
Index("idx_tags_package_created_at", "package_id", "created_at"), # For recent tags queries
Index(
"idx_tags_package_name", "package_id", "name", unique=True
), # Composite unique index
Index(
"idx_tags_package_created_at", "package_id", "created_at"
), # For recent tags queries
)
@@ -136,7 +177,9 @@ class TagHistory(Base):
__tablename__ = "tag_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tag_id = Column(UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False)
tag_id = Column(
UUID(as_uuid=True), ForeignKey("tags.id", ondelete="CASCADE"), nullable=False
)
old_artifact_id = Column(String(64), ForeignKey("artifacts.id"))
new_artifact_id = Column(String(64), ForeignKey("artifacts.id"), nullable=False)
change_type = Column(String(20), nullable=False, default="update")
@@ -148,7 +191,9 @@ class TagHistory(Base):
__table_args__ = (
Index("idx_tag_history_tag_id", "tag_id"),
Index("idx_tag_history_changed_at", "changed_at"),
CheckConstraint("change_type IN ('create', 'update', 'delete')", name="check_change_type"),
CheckConstraint(
"change_type IN ('create', 'update', 'delete')", name="check_change_type"
),
)
@@ -164,6 +209,11 @@ class Upload(Base):
duration_ms = Column(Integer) # Upload timing in milliseconds
deduplicated = Column(Boolean, default=False) # Whether artifact was deduplicated
checksum_verified = Column(Boolean, default=True) # Whether checksum was verified
status = Column(
String(20), default="completed", nullable=False
) # pending, completed, failed
error_message = Column(Text) # Error details for failed uploads
client_checksum = Column(String(64)) # Client-provided SHA256 for verification
uploaded_at = Column(DateTime(timezone=True), default=datetime.utcnow)
uploaded_by = Column(String(255), nullable=False)
source_ip = Column(String(45))
@@ -177,6 +227,35 @@ class Upload(Base):
Index("idx_uploads_uploaded_at", "uploaded_at"),
Index("idx_uploads_package_uploaded_at", "package_id", "uploaded_at"),
Index("idx_uploads_uploaded_by_at", "uploaded_by", "uploaded_at"),
Index("idx_uploads_status", "status"),
Index("idx_uploads_status_uploaded_at", "status", "uploaded_at"),
CheckConstraint(
"status IN ('pending', 'completed', 'failed')", name="check_upload_status"
),
)
class UploadLock(Base):
"""Track in-progress uploads for conflict detection (409 responses)."""
__tablename__ = "upload_locks"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
sha256_hash = Column(String(64), nullable=False)
package_id = Column(
UUID(as_uuid=True),
ForeignKey("packages.id", ondelete="CASCADE"),
nullable=False,
)
locked_at = Column(DateTime(timezone=True), default=datetime.utcnow)
locked_by = Column(String(255), nullable=False)
expires_at = Column(DateTime(timezone=True), nullable=False)
__table_args__ = (
Index("idx_upload_locks_expires_at", "expires_at"),
Index(
"idx_upload_locks_hash_package", "sha256_hash", "package_id", unique=True
),
)
@@ -184,7 +263,11 @@ class Consumer(Base):
__tablename__ = "consumers"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
package_id = Column(UUID(as_uuid=True), ForeignKey("packages.id", ondelete="CASCADE"), nullable=False)
package_id = Column(
UUID(as_uuid=True),
ForeignKey("packages.id", ondelete="CASCADE"),
nullable=False,
)
project_url = Column(String(2048), nullable=False)
last_access = Column(DateTime(timezone=True), default=datetime.utcnow)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
@@ -201,7 +284,11 @@ class AccessPermission(Base):
__tablename__ = "access_permissions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
project_id = Column(UUID(as_uuid=True), ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
project_id = Column(
UUID(as_uuid=True),
ForeignKey("projects.id", ondelete="CASCADE"),
nullable=False,
)
user_id = Column(String(255), nullable=False)
level = Column(String(20), nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
@@ -216,20 +303,104 @@ class AccessPermission(Base):
)
class User(Base):
"""User account for authentication."""
__tablename__ = "users"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
username = Column(String(255), unique=True, nullable=False)
password_hash = Column(String(255)) # NULL if OIDC-only user
email = Column(String(255))
is_admin = Column(Boolean, default=False)
is_active = Column(Boolean, default=True)
must_change_password = Column(Boolean, default=False)
oidc_subject = Column(String(255)) # OIDC subject claim
oidc_issuer = Column(String(512)) # OIDC issuer URL
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
updated_at = Column(
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow
)
last_login = Column(DateTime(timezone=True))
# Relationships
api_keys = relationship(
"APIKey", back_populates="owner", cascade="all, delete-orphan"
)
sessions = relationship(
"Session", back_populates="user", cascade="all, delete-orphan"
)
__table_args__ = (
Index("idx_users_username", "username"),
Index("idx_users_email", "email"),
Index("idx_users_oidc_subject", "oidc_subject"),
)
class Session(Base):
"""User session for web login."""
__tablename__ = "sessions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
)
token_hash = Column(String(64), unique=True, nullable=False)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
expires_at = Column(DateTime(timezone=True), nullable=False)
last_accessed = Column(DateTime(timezone=True), default=datetime.utcnow)
user_agent = Column(String(512))
ip_address = Column(String(45))
user = relationship("User", back_populates="sessions")
__table_args__ = (
Index("idx_sessions_user_id", "user_id"),
Index("idx_sessions_token_hash", "token_hash"),
Index("idx_sessions_expires_at", "expires_at"),
)
class AuthSettings(Base):
"""Authentication settings for OIDC configuration."""
__tablename__ = "auth_settings"
key = Column(String(255), primary_key=True)
value = Column(Text, nullable=False)
updated_at = Column(DateTime(timezone=True), default=datetime.utcnow)
class APIKey(Base):
__tablename__ = "api_keys"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
key_hash = Column(String(64), unique=True, nullable=False)
name = Column(String(255), nullable=False)
user_id = Column(String(255), nullable=False)
user_id = Column(
String(255), nullable=False
) # Legacy field, kept for compatibility
owner_id = Column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True, # Nullable for migration compatibility
)
description = Column(Text)
scopes = Column(ARRAY(String), default=["read", "write"])
created_at = Column(DateTime(timezone=True), default=datetime.utcnow)
expires_at = Column(DateTime(timezone=True))
last_used = Column(DateTime(timezone=True))
owner = relationship("User", back_populates="api_keys")
__table_args__ = (
Index("idx_api_keys_user_id", "user_id"),
Index("idx_api_keys_key_hash", "key_hash"),
Index("idx_api_keys_owner_id", "owner_id"),
)
@@ -252,3 +423,51 @@ class AuditLog(Base):
Index("idx_audit_logs_resource_timestamp", "resource", "timestamp"),
Index("idx_audit_logs_user_timestamp", "user_id", "timestamp"),
)
class ProjectHistory(Base):
"""Track changes to project metadata over time."""
__tablename__ = "project_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
project_id = Column(
UUID(as_uuid=True),
ForeignKey("projects.id", ondelete="CASCADE"),
nullable=False,
)
field_name = Column(String(100), nullable=False)
old_value = Column(Text)
new_value = Column(Text)
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
changed_by = Column(String(255), nullable=False)
__table_args__ = (
Index("idx_project_history_project_id", "project_id"),
Index("idx_project_history_changed_at", "changed_at"),
Index("idx_project_history_project_changed_at", "project_id", "changed_at"),
)
class PackageHistory(Base):
"""Track changes to package metadata over time."""
__tablename__ = "package_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
package_id = Column(
UUID(as_uuid=True),
ForeignKey("packages.id", ondelete="CASCADE"),
nullable=False,
)
field_name = Column(String(100), nullable=False)
old_value = Column(Text)
new_value = Column(Text)
changed_at = Column(DateTime(timezone=True), default=datetime.utcnow)
changed_by = Column(String(255), nullable=False)
__table_args__ = (
Index("idx_package_history_package_id", "package_id"),
Index("idx_package_history_changed_at", "changed_at"),
Index("idx_package_history_package_changed_at", "package_id", "changed_at"),
)

16
backend/app/rate_limit.py Normal file
View File

@@ -0,0 +1,16 @@
"""Rate limiting configuration for Orchard API.
Uses slowapi for rate limiting with IP-based keys.
"""
import os
from slowapi import Limiter
from slowapi.util import get_remote_address
# Rate limiter - uses IP address as key
limiter = Limiter(key_func=get_remote_address)
# Rate limit strings - configurable via environment for testing
# Default: 5 login attempts per minute per IP
# In tests: set ORCHARD_LOGIN_RATE_LIMIT to a high value like "1000/minute"
LOGIN_RATE_LIMIT = os.environ.get("ORCHARD_LOGIN_RATE_LIMIT", "5/minute")

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,7 @@ class PaginationMeta(BaseModel):
limit: int
total: int
total_pages: int
has_more: bool = False # True if there are more pages after current page
class PaginatedResponse(BaseModel, Generic[T]):
@@ -39,6 +40,20 @@ class ProjectResponse(BaseModel):
from_attributes = True
class ProjectUpdate(BaseModel):
"""Schema for updating a project"""
description: Optional[str] = None
is_public: Optional[bool] = None
class ProjectWithAccessResponse(ProjectResponse):
"""Project response with user's access level included"""
access_level: Optional[str] = None # 'read', 'write', 'admin', or None
is_owner: bool = False
# Package format and platform enums
PACKAGE_FORMATS = [
"generic",
@@ -86,6 +101,14 @@ class PackageResponse(BaseModel):
from_attributes = True
class PackageUpdate(BaseModel):
"""Schema for updating a package"""
description: Optional[str] = None
format: Optional[str] = None
platform: Optional[str] = None
class TagSummary(BaseModel):
"""Lightweight tag info for embedding in package responses"""
@@ -189,6 +212,93 @@ class TagHistoryResponse(BaseModel):
from_attributes = True
class TagHistoryDetailResponse(BaseModel):
"""Tag history with artifact metadata for each version"""
id: UUID
tag_id: UUID
tag_name: str
old_artifact_id: Optional[str]
new_artifact_id: str
changed_at: datetime
changed_by: str
# Artifact metadata for new artifact
artifact_size: int
artifact_original_name: Optional[str]
artifact_content_type: Optional[str]
class Config:
from_attributes = True
# Audit log schemas
class AuditLogResponse(BaseModel):
"""Audit log entry response"""
id: UUID
action: str
resource: str
user_id: str
details: Optional[Dict[str, Any]]
timestamp: datetime
source_ip: Optional[str]
class Config:
from_attributes = True
# Upload history schemas
class UploadHistoryResponse(BaseModel):
"""Upload event with artifact details"""
id: UUID
artifact_id: str
package_id: UUID
package_name: str
project_name: str
original_name: Optional[str]
tag_name: Optional[str]
uploaded_at: datetime
uploaded_by: str
source_ip: Optional[str]
deduplicated: bool
# Artifact metadata
artifact_size: int
artifact_content_type: Optional[str]
class Config:
from_attributes = True
# Artifact provenance schemas
class ArtifactProvenanceResponse(BaseModel):
"""Full provenance/history of an artifact"""
artifact_id: str
sha256: str
size: int
content_type: Optional[str]
original_name: Optional[str]
created_at: datetime
created_by: str
ref_count: int
# First upload info
first_uploaded_at: datetime
first_uploaded_by: str
# Usage statistics
upload_count: int
# References
packages: List[Dict[str, Any]] # List of {project_name, package_name, tag_names}
tags: List[
Dict[str, Any]
] # List of {project_name, package_name, tag_name, created_at}
# Upload history
uploads: List[Dict[str, Any]] # List of upload events
class Config:
from_attributes = True
class ArtifactTagInfo(BaseModel):
"""Tag info for embedding in artifact responses"""
@@ -240,6 +350,44 @@ class PackageArtifactResponse(BaseModel):
from_attributes = True
class GlobalArtifactResponse(BaseModel):
"""Artifact with project/package context for global listing"""
id: str
sha256: str
size: int
content_type: Optional[str]
original_name: Optional[str]
created_at: datetime
created_by: str
format_metadata: Optional[Dict[str, Any]] = None
ref_count: int = 0
# Context from tags/packages
projects: List[str] = [] # List of project names containing this artifact
packages: List[str] = [] # List of "project/package" paths
tags: List[str] = [] # List of "project/package:tag" references
class Config:
from_attributes = True
class GlobalTagResponse(BaseModel):
"""Tag with project/package context for global listing"""
id: UUID
name: str
artifact_id: str
created_at: datetime
created_by: str
project_name: str
package_name: str
artifact_size: Optional[int] = None
artifact_content_type: Optional[str] = None
class Config:
from_attributes = True
# Upload response
class UploadResponse(BaseModel):
artifact_id: str
@@ -254,6 +402,11 @@ class UploadResponse(BaseModel):
format_metadata: Optional[Dict[str, Any]] = None
deduplicated: bool = False
ref_count: int = 1 # Current reference count after this upload
# Enhanced metadata (Issue #19)
upload_id: Optional[UUID] = None # UUID of the upload record
content_type: Optional[str] = None
original_name: Optional[str] = None
created_at: Optional[datetime] = None
# Resumable upload schemas
@@ -440,6 +593,19 @@ class StorageStatsResponse(BaseModel):
storage_saved_bytes: int # Bytes saved through deduplication
class ConsistencyCheckResponse(BaseModel):
"""Result of S3/Database consistency check"""
total_artifacts_checked: int
orphaned_s3_objects: int # Objects in S3 but not in DB
missing_s3_objects: int # Records in DB but not in S3
size_mismatches: int # Records where DB size != S3 size
healthy: bool
orphaned_s3_keys: List[str] = [] # Limited list of orphaned S3 keys
missing_s3_keys: List[str] = [] # Limited list of missing S3 keys
size_mismatch_artifacts: List[Dict[str, Any]] = [] # Limited list of mismatches
class DeduplicationStatsResponse(BaseModel):
"""Deduplication effectiveness statistics"""
@@ -527,3 +693,173 @@ class StatsReportResponse(BaseModel):
format: str # "json", "csv", "markdown"
generated_at: datetime
content: str # The report content
# Authentication schemas
class LoginRequest(BaseModel):
"""Login request with username and password"""
username: str
password: str
class LoginResponse(BaseModel):
"""Login response with user info"""
id: UUID
username: str
email: Optional[str]
is_admin: bool
must_change_password: bool
class ChangePasswordRequest(BaseModel):
"""Change password request"""
current_password: str
new_password: str
class UserResponse(BaseModel):
"""User information response"""
id: UUID
username: str
email: Optional[str]
is_admin: bool
is_active: bool
must_change_password: bool
created_at: datetime
last_login: Optional[datetime]
class Config:
from_attributes = True
class UserCreate(BaseModel):
"""Create user request (admin only)"""
username: str
password: str
email: Optional[str] = None
is_admin: bool = False
class UserUpdate(BaseModel):
"""Update user request (admin only)"""
email: Optional[str] = None
is_admin: Optional[bool] = None
is_active: Optional[bool] = None
class ResetPasswordRequest(BaseModel):
"""Reset password request (admin only)"""
new_password: str
class APIKeyCreate(BaseModel):
"""Create API key request"""
name: str
description: Optional[str] = None
scopes: Optional[List[str]] = None
class APIKeyResponse(BaseModel):
"""API key response (without the secret key)"""
id: UUID
name: str
description: Optional[str]
scopes: Optional[List[str]]
created_at: datetime
expires_at: Optional[datetime]
last_used: Optional[datetime]
class Config:
from_attributes = True
class APIKeyCreateResponse(BaseModel):
"""API key creation response (includes the secret key - only shown once)"""
id: UUID
name: str
description: Optional[str]
scopes: Optional[List[str]]
key: str # The actual API key - only returned on creation
created_at: datetime
expires_at: Optional[datetime]
# OIDC Configuration schemas
class OIDCConfigResponse(BaseModel):
"""OIDC configuration response (hides client secret)"""
enabled: bool
issuer_url: str
client_id: str
has_client_secret: bool # True if secret is configured, but don't expose it
scopes: List[str]
auto_create_users: bool
admin_group: str
class OIDCConfigUpdate(BaseModel):
"""Update OIDC configuration"""
enabled: Optional[bool] = None
issuer_url: Optional[str] = None
client_id: Optional[str] = None
client_secret: Optional[str] = None # Only set if changing
scopes: Optional[List[str]] = None
auto_create_users: Optional[bool] = None
admin_group: Optional[str] = None
class OIDCStatusResponse(BaseModel):
"""Public OIDC status response"""
enabled: bool
issuer_url: Optional[str] = None # Only included if enabled
class OIDCLoginResponse(BaseModel):
"""OIDC login initiation response"""
authorization_url: str
# Access Permission schemas
class AccessPermissionCreate(BaseModel):
"""Grant access to a user for a project"""
username: str
level: str # 'read', 'write', or 'admin'
expires_at: Optional[datetime] = None
@field_validator('level')
@classmethod
def validate_level(cls, v):
if v not in ('read', 'write', 'admin'):
raise ValueError("level must be 'read', 'write', or 'admin'")
return v
class AccessPermissionUpdate(BaseModel):
"""Update access permission"""
level: Optional[str] = None
expires_at: Optional[datetime] = None
@field_validator('level')
@classmethod
def validate_level(cls, v):
if v is not None and v not in ('read', 'write', 'admin'):
raise ValueError("level must be 'read', 'write', or 'admin'")
return v
class AccessPermissionResponse(BaseModel):
"""Access permission response"""
id: UUID
project_id: UUID
user_id: str
level: str
created_at: datetime
expires_at: Optional[datetime]
class Config:
from_attributes = True
class ProjectWithAccessResponse(ProjectResponse):
"""Project response with user's access level"""
user_access_level: Optional[str] = None

View File

@@ -6,7 +6,7 @@ from typing import List, Optional, Tuple
from sqlalchemy.orm import Session
import logging
from ..models import Artifact, Tag, Upload, Package
from ..models import Artifact, Tag
from ..repositories.artifact import ArtifactRepository
from ..repositories.tag import TagRepository
from ..storage import S3Storage
@@ -40,10 +40,14 @@ class ArtifactCleanupService:
artifact = self.artifact_repo.get_by_sha256(artifact_id)
if artifact:
artifact = self.artifact_repo.decrement_ref_count(artifact)
logger.info(f"Decremented ref_count for artifact {artifact_id}: now {artifact.ref_count}")
logger.info(
f"Decremented ref_count for artifact {artifact_id}: now {artifact.ref_count}"
)
return artifact
def on_tag_updated(self, old_artifact_id: str, new_artifact_id: str) -> Tuple[Optional[Artifact], Optional[Artifact]]:
def on_tag_updated(
self, old_artifact_id: str, new_artifact_id: str
) -> Tuple[Optional[Artifact], Optional[Artifact]]:
"""
Called when a tag is updated to point to a different artifact.
Decrements ref_count for old artifact, increments for new (if different).
@@ -58,13 +62,17 @@ class ArtifactCleanupService:
old_artifact = self.artifact_repo.get_by_sha256(old_artifact_id)
if old_artifact:
old_artifact = self.artifact_repo.decrement_ref_count(old_artifact)
logger.info(f"Decremented ref_count for old artifact {old_artifact_id}: now {old_artifact.ref_count}")
logger.info(
f"Decremented ref_count for old artifact {old_artifact_id}: now {old_artifact.ref_count}"
)
# Increment new artifact ref_count
new_artifact = self.artifact_repo.get_by_sha256(new_artifact_id)
if new_artifact:
new_artifact = self.artifact_repo.increment_ref_count(new_artifact)
logger.info(f"Incremented ref_count for new artifact {new_artifact_id}: now {new_artifact.ref_count}")
logger.info(
f"Incremented ref_count for new artifact {new_artifact_id}: now {new_artifact.ref_count}"
)
return old_artifact, new_artifact
@@ -84,11 +92,15 @@ class ArtifactCleanupService:
if artifact:
self.artifact_repo.decrement_ref_count(artifact)
affected_artifacts.append(tag.artifact_id)
logger.info(f"Decremented ref_count for artifact {tag.artifact_id} (package delete)")
logger.info(
f"Decremented ref_count for artifact {tag.artifact_id} (package delete)"
)
return affected_artifacts
def cleanup_orphaned_artifacts(self, batch_size: int = 100, dry_run: bool = False) -> List[str]:
def cleanup_orphaned_artifacts(
self, batch_size: int = 100, dry_run: bool = False
) -> List[str]:
"""
Find and delete artifacts with ref_count = 0.
@@ -116,7 +128,9 @@ class ArtifactCleanupService:
# Then delete from database
self.artifact_repo.delete(artifact)
deleted_ids.append(artifact.id)
logger.info(f"Deleted orphaned artifact from database: {artifact.id}")
logger.info(
f"Deleted orphaned artifact from database: {artifact.id}"
)
except Exception as e:
logger.error(f"Failed to delete artifact {artifact.id}: {e}")
@@ -128,10 +142,12 @@ class ArtifactCleanupService:
def get_orphaned_count(self) -> int:
"""Get count of artifacts with ref_count = 0."""
from sqlalchemy import func
return (
self.db.query(func.count(Artifact.id))
.filter(Artifact.ref_count == 0)
.scalar() or 0
.scalar()
or 0
)
def verify_ref_counts(self, fix: bool = False) -> List[dict]:
@@ -173,7 +189,9 @@ class ArtifactCleanupService:
if fix:
artifact.ref_count = max(actual_count, 1)
logger.warning(f"Fixed ref_count for artifact {artifact.id}: {mismatch['stored_ref_count']} -> {artifact.ref_count}")
logger.warning(
f"Fixed ref_count for artifact {artifact.id}: {mismatch['stored_ref_count']} -> {artifact.ref_count}"
)
if fix and mismatches:
self.db.commit()

View File

@@ -22,6 +22,13 @@ from botocore.exceptions import (
)
from .config import get_settings
from .checksum import (
ChecksumMismatchError,
HashingStreamWrapper,
VerifyingStreamWrapper,
compute_sha256,
is_valid_sha256,
)
settings = get_settings()
logger = logging.getLogger(__name__)
@@ -202,6 +209,9 @@ class StorageResult(NamedTuple):
md5: Optional[str] = None
sha1: Optional[str] = None
s3_etag: Optional[str] = None
already_existed: bool = (
False # True if artifact was deduplicated (S3 object already existed)
)
class S3StorageUnavailableError(StorageError):
@@ -354,6 +364,7 @@ class S3Storage:
md5=md5_hash,
sha1=sha1_hash,
s3_etag=s3_etag,
already_existed=exists,
)
def _store_multipart(self, file: BinaryIO, content_length: int) -> StorageResult:
@@ -433,6 +444,7 @@ class S3Storage:
md5=md5_hash,
sha1=sha1_hash,
s3_etag=s3_etag,
already_existed=True,
)
# Seek back to start for upload
@@ -486,6 +498,7 @@ class S3Storage:
md5=md5_hash,
sha1=sha1_hash,
s3_etag=s3_etag,
already_existed=False,
)
except Exception as e:
@@ -535,6 +548,7 @@ class S3Storage:
md5=md5_hash,
sha1=sha1_hash,
s3_etag=s3_etag,
already_existed=True,
)
# Upload based on size
@@ -615,6 +629,7 @@ class S3Storage:
md5=md5_hash,
sha1=sha1_hash,
s3_etag=s3_etag,
already_existed=False,
)
def initiate_resumable_upload(self, expected_hash: str) -> Dict[str, Any]:
@@ -868,6 +883,95 @@ class S3Storage:
logger.error(f"Unexpected error during storage health check: {e}")
return False
def get_verified(self, s3_key: str, expected_hash: str) -> bytes:
"""
Download and verify content matches expected SHA256 hash.
This method downloads the entire content, computes its hash, and
verifies it matches the expected hash before returning.
Args:
s3_key: The S3 storage key of the file
expected_hash: Expected SHA256 hash (64 hex characters)
Returns:
File content as bytes (only if verification passes)
Raises:
ChecksumMismatchError: If computed hash doesn't match expected
ClientError: If S3 operation fails
"""
if not is_valid_sha256(expected_hash):
raise ValueError(f"Invalid SHA256 hash format: {expected_hash}")
content = self.get(s3_key)
actual_hash = compute_sha256(content)
if actual_hash != expected_hash.lower():
raise ChecksumMismatchError(
expected=expected_hash.lower(),
actual=actual_hash,
s3_key=s3_key,
size=len(content),
)
logger.debug(f"Verification passed for {s3_key}: {actual_hash[:16]}...")
return content
def get_stream_verified(
self,
s3_key: str,
expected_hash: str,
range_header: Optional[str] = None,
) -> Tuple[VerifyingStreamWrapper, int, Optional[str]]:
"""
Get a verifying stream wrapper for an object.
Returns a wrapper that computes the hash as chunks are read and
can verify after streaming completes. Note that verification happens
AFTER content has been streamed to the client.
IMPORTANT: For range requests, verification is not supported because
we cannot verify a partial download against the full file hash.
Args:
s3_key: The S3 storage key of the file
expected_hash: Expected SHA256 hash (64 hex characters)
range_header: Optional HTTP Range header (verification disabled if set)
Returns:
Tuple of (VerifyingStreamWrapper, content_length, content_range)
The wrapper has a verify() method to call after streaming.
Raises:
ValueError: If expected_hash is invalid format
ClientError: If S3 operation fails
"""
if not is_valid_sha256(expected_hash):
raise ValueError(f"Invalid SHA256 hash format: {expected_hash}")
# Get the S3 stream
stream, content_length, content_range = self.get_stream(s3_key, range_header)
# For range requests, we cannot verify (partial content)
# Return a HashingStreamWrapper that just tracks bytes without verification
if range_header or content_range:
logger.debug(
f"Range request for {s3_key} - verification disabled (partial content)"
)
# Return a basic hashing wrapper (caller should not verify)
hashing_wrapper = HashingStreamWrapper(stream)
return hashing_wrapper, content_length, content_range
# Create verifying wrapper
verifying_wrapper = VerifyingStreamWrapper(
stream=stream,
expected_hash=expected_hash,
s3_key=s3_key,
)
return verifying_wrapper, content_length, content_range
def verify_integrity(self, s3_key: str, expected_sha256: str) -> bool:
"""
Verify the integrity of a stored object by downloading and re-hashing.

View File

@@ -9,6 +9,8 @@ pydantic==2.5.3
pydantic-settings==2.1.0
python-jose[cryptography]==3.3.0
passlib[bcrypt]==1.7.4
bcrypt==4.0.1
slowapi==0.1.9
# Test dependencies
pytest>=7.4.0

View File

@@ -4,15 +4,14 @@ Test configuration and fixtures for Orchard backend tests.
This module provides:
- Database fixtures with test isolation
- Mock S3 storage using moto
- Test data factories for common scenarios
- Shared pytest fixtures
"""
import os
import pytest
import hashlib
from typing import Generator, BinaryIO
from unittest.mock import MagicMock, patch
import io
from typing import Generator
from unittest.mock import MagicMock
# Set test environment defaults before importing app modules
# Use setdefault to NOT override existing env vars (from docker-compose)
@@ -26,54 +25,27 @@ os.environ.setdefault("ORCHARD_S3_BUCKET", "test-bucket")
os.environ.setdefault("ORCHARD_S3_ACCESS_KEY_ID", "test")
os.environ.setdefault("ORCHARD_S3_SECRET_ACCESS_KEY", "test")
# =============================================================================
# Test Data Factories
# =============================================================================
def create_test_file(content: bytes = None, size: int = 1024) -> io.BytesIO:
"""
Create a test file with known content.
Args:
content: Specific content to use, or None to generate random-ish content
size: Size of generated content if content is None
Returns:
BytesIO object with the content
"""
if content is None:
content = os.urandom(size)
return io.BytesIO(content)
def compute_sha256(content: bytes) -> str:
"""Compute SHA256 hash of content as lowercase hex string."""
return hashlib.sha256(content).hexdigest()
def compute_md5(content: bytes) -> str:
"""Compute MD5 hash of content as lowercase hex string."""
return hashlib.md5(content).hexdigest()
def compute_sha1(content: bytes) -> str:
"""Compute SHA1 hash of content as lowercase hex string."""
return hashlib.sha1(content).hexdigest()
# Known test data with pre-computed hashes
TEST_CONTENT_HELLO = b"Hello, World!"
TEST_HASH_HELLO = "dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f"
TEST_MD5_HELLO = "65a8e27d8879283831b664bd8b7f0ad4"
TEST_SHA1_HELLO = "0a0a9f2a6772942557ab5355d76af442f8f65e01"
TEST_CONTENT_EMPTY = b""
# Note: Empty content should be rejected by the storage layer
TEST_CONTENT_BINARY = bytes(range(256))
TEST_HASH_BINARY = compute_sha256(TEST_CONTENT_BINARY)
# Re-export factory functions for backward compatibility
from tests.factories import (
create_test_file,
compute_sha256,
compute_md5,
compute_sha1,
upload_test_file,
TEST_CONTENT_HELLO,
TEST_HASH_HELLO,
TEST_MD5_HELLO,
TEST_SHA1_HELLO,
TEST_CONTENT_EMPTY,
TEST_CONTENT_BINARY,
TEST_HASH_BINARY,
get_s3_client,
get_s3_bucket,
list_s3_objects_by_hash,
count_s3_objects_by_prefix,
s3_object_exists,
delete_s3_object_by_hash,
)
# =============================================================================
@@ -210,9 +182,10 @@ def test_app():
@pytest.fixture
def integration_client():
"""
Create a test client for integration tests.
Create an authenticated test client for integration tests.
Uses the real database and MinIO from docker-compose.local.yml.
Authenticates as admin for write operations.
"""
from httpx import Client
@@ -220,6 +193,15 @@ def integration_client():
base_url = os.environ.get("ORCHARD_TEST_URL", "http://localhost:8080")
with Client(base_url=base_url, timeout=30.0) as client:
# Login as admin to enable write operations
login_response = client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# If login fails, tests will fail - that's expected if auth is broken
if login_response.status_code != 200:
# Try to continue without auth for backward compatibility
pass
yield client
@@ -289,126 +271,3 @@ def test_content():
content = f"test-content-{uuid.uuid4().hex}".encode()
sha256 = compute_sha256(content)
return (content, sha256)
def upload_test_file(
client,
project: str,
package: str,
content: bytes,
filename: str = "test.bin",
tag: str = None,
) -> dict:
"""
Helper function to upload a test file.
Returns the upload response as a dict.
"""
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
data = {}
if tag:
data["tag"] = tag
response = client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data=data if data else None,
)
assert response.status_code == 200, f"Upload failed: {response.text}"
return response.json()
# =============================================================================
# S3 Direct Access Helpers (for integration tests)
# =============================================================================
def get_s3_client():
"""
Create a boto3 S3 client for direct S3 access in integration tests.
Uses environment variables for configuration (same as the app).
Note: When running in container, S3 endpoint should be 'minio:9000' not 'localhost:9000'.
"""
import boto3
from botocore.config import Config
config = Config(s3={"addressing_style": "path"})
# Use the same endpoint as the app (minio:9000 in container, localhost:9000 locally)
endpoint = os.environ.get("ORCHARD_S3_ENDPOINT", "http://minio:9000")
return boto3.client(
"s3",
endpoint_url=endpoint,
region_name=os.environ.get("ORCHARD_S3_REGION", "us-east-1"),
aws_access_key_id=os.environ.get("ORCHARD_S3_ACCESS_KEY_ID", "minioadmin"),
aws_secret_access_key=os.environ.get(
"ORCHARD_S3_SECRET_ACCESS_KEY", "minioadmin"
),
config=config,
)
def get_s3_bucket():
"""Get the S3 bucket name from environment."""
return os.environ.get("ORCHARD_S3_BUCKET", "orchard-artifacts")
def list_s3_objects_by_hash(sha256_hash: str) -> list:
"""
List S3 objects that match a specific SHA256 hash.
Uses the fruits/{hash[:2]}/{hash[2:4]}/{hash} key pattern.
Returns list of matching object keys.
"""
client = get_s3_client()
bucket = get_s3_bucket()
prefix = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
response = client.list_objects_v2(Bucket=bucket, Prefix=prefix)
if "Contents" not in response:
return []
return [obj["Key"] for obj in response["Contents"]]
def count_s3_objects_by_prefix(prefix: str) -> int:
"""
Count S3 objects with a given prefix.
Useful for checking if duplicate uploads created multiple objects.
"""
client = get_s3_client()
bucket = get_s3_bucket()
response = client.list_objects_v2(Bucket=bucket, Prefix=prefix)
if "Contents" not in response:
return 0
return len(response["Contents"])
def s3_object_exists(sha256_hash: str) -> bool:
"""
Check if an S3 object exists for a given SHA256 hash.
"""
objects = list_s3_objects_by_hash(sha256_hash)
return len(objects) > 0
def delete_s3_object_by_hash(sha256_hash: str) -> bool:
"""
Delete an S3 object by its SHA256 hash (for test cleanup).
"""
client = get_s3_client()
bucket = get_s3_bucket()
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
try:
client.delete_object(Bucket=bucket, Key=s3_key)
return True
except Exception:
return False

288
backend/tests/factories.py Normal file
View File

@@ -0,0 +1,288 @@
"""
Test data factories for Orchard backend tests.
This module provides factory functions for creating test data,
including test files, pre-computed hashes, and helper utilities.
"""
import hashlib
import io
import os
import uuid
from typing import Optional
# =============================================================================
# Hash Computation Utilities
# =============================================================================
def compute_sha256(content: bytes) -> str:
"""Compute SHA256 hash of content as lowercase hex string."""
return hashlib.sha256(content).hexdigest()
def compute_md5(content: bytes) -> str:
"""Compute MD5 hash of content as lowercase hex string."""
return hashlib.md5(content).hexdigest()
def compute_sha1(content: bytes) -> str:
"""Compute SHA1 hash of content as lowercase hex string."""
return hashlib.sha1(content).hexdigest()
# =============================================================================
# Test File Factories
# =============================================================================
def create_test_file(content: Optional[bytes] = None, size: int = 1024) -> io.BytesIO:
"""
Create a test file with known content.
Args:
content: Specific content to use, or None to generate random-ish content
size: Size of generated content if content is None
Returns:
BytesIO object with the content
"""
if content is None:
content = os.urandom(size)
return io.BytesIO(content)
def create_unique_content(prefix: str = "test-content") -> tuple[bytes, str]:
"""
Create unique test content with its SHA256 hash.
Args:
prefix: Prefix for the content string
Returns:
Tuple of (content_bytes, sha256_hash)
"""
content = f"{prefix}-{uuid.uuid4().hex}".encode()
sha256 = compute_sha256(content)
return content, sha256
# =============================================================================
# Known Test Data (Pre-computed hashes for deterministic tests)
# =============================================================================
TEST_CONTENT_HELLO = b"Hello, World!"
TEST_HASH_HELLO = "dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f"
TEST_MD5_HELLO = "65a8e27d8879283831b664bd8b7f0ad4"
TEST_SHA1_HELLO = "0a0a9f2a6772942557ab5355d76af442f8f65e01"
TEST_CONTENT_EMPTY = b""
# Note: Empty content should be rejected by the storage layer
TEST_CONTENT_BINARY = bytes(range(256))
TEST_HASH_BINARY = compute_sha256(TEST_CONTENT_BINARY)
# =============================================================================
# API Test Helpers
# =============================================================================
def upload_test_file(
client,
project: str,
package: str,
content: bytes,
filename: str = "test.bin",
tag: Optional[str] = None,
) -> dict:
"""
Helper function to upload a test file via the API.
Args:
client: HTTP client (httpx or TestClient)
project: Project name
package: Package name
content: File content as bytes
filename: Original filename
tag: Optional tag to assign
Returns:
The upload response as a dict
"""
files = {"file": (filename, io.BytesIO(content), "application/octet-stream")}
data = {}
if tag:
data["tag"] = tag
response = client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data=data if data else None,
)
assert response.status_code == 200, f"Upload failed: {response.text}"
return response.json()
# =============================================================================
# Project/Package Factories
# =============================================================================
def create_test_project(client, unique_id: Optional[str] = None) -> str:
"""
Create a test project via the API.
Args:
client: HTTP client
unique_id: Unique identifier for the project name
Returns:
Project name
"""
if unique_id is None:
unique_id = uuid.uuid4().hex[:8]
project_name = f"test-project-{unique_id}"
response = client.post(
"/api/v1/projects",
json={"name": project_name, "description": "Test project", "is_public": True},
)
assert response.status_code == 200, f"Failed to create project: {response.text}"
return project_name
def create_test_package(client, project: str, unique_id: Optional[str] = None) -> str:
"""
Create a test package via the API.
Args:
client: HTTP client
project: Project name
unique_id: Unique identifier for the package name
Returns:
Package name
"""
if unique_id is None:
unique_id = uuid.uuid4().hex[:8]
package_name = f"test-package-{unique_id}"
response = client.post(
f"/api/v1/project/{project}/packages",
json={"name": package_name, "description": "Test package"},
)
assert response.status_code == 200, f"Failed to create package: {response.text}"
return package_name
def delete_test_project(client, project: str) -> None:
"""
Delete a test project (cleanup helper).
Args:
client: HTTP client
project: Project name to delete
"""
try:
client.delete(f"/api/v1/projects/{project}")
except Exception:
pass # Ignore cleanup errors
# =============================================================================
# S3 Test Helpers
# =============================================================================
def get_s3_client():
"""
Create a boto3 S3 client for direct S3 access in integration tests.
Uses environment variables for configuration (same as the app).
Note: When running in container, S3 endpoint should be 'minio:9000' not 'localhost:9000'.
"""
import boto3
from botocore.config import Config
config = Config(s3={"addressing_style": "path"})
# Use the same endpoint as the app (minio:9000 in container, localhost:9000 locally)
endpoint = os.environ.get("ORCHARD_S3_ENDPOINT", "http://minio:9000")
return boto3.client(
"s3",
endpoint_url=endpoint,
region_name=os.environ.get("ORCHARD_S3_REGION", "us-east-1"),
aws_access_key_id=os.environ.get("ORCHARD_S3_ACCESS_KEY_ID", "minioadmin"),
aws_secret_access_key=os.environ.get(
"ORCHARD_S3_SECRET_ACCESS_KEY", "minioadmin"
),
config=config,
)
def get_s3_bucket() -> str:
"""Get the S3 bucket name from environment."""
return os.environ.get("ORCHARD_S3_BUCKET", "orchard-artifacts")
def list_s3_objects_by_hash(sha256_hash: str) -> list:
"""
List S3 objects that match a specific SHA256 hash.
Uses the fruits/{hash[:2]}/{hash[2:4]}/{hash} key pattern.
Returns list of matching object keys.
"""
client = get_s3_client()
bucket = get_s3_bucket()
prefix = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
response = client.list_objects_v2(Bucket=bucket, Prefix=prefix)
if "Contents" not in response:
return []
return [obj["Key"] for obj in response["Contents"]]
def count_s3_objects_by_prefix(prefix: str) -> int:
"""
Count S3 objects with a given prefix.
Useful for checking if duplicate uploads created multiple objects.
"""
client = get_s3_client()
bucket = get_s3_bucket()
response = client.list_objects_v2(Bucket=bucket, Prefix=prefix)
if "Contents" not in response:
return 0
return len(response["Contents"])
def s3_object_exists(sha256_hash: str) -> bool:
"""
Check if an S3 object exists for a given SHA256 hash.
"""
objects = list_s3_objects_by_hash(sha256_hash)
return len(objects) > 0
def delete_s3_object_by_hash(sha256_hash: str) -> bool:
"""
Delete an S3 object by its SHA256 hash (for test cleanup).
"""
client = get_s3_client()
bucket = get_s3_bucket()
s3_key = f"fruits/{sha256_hash[:2]}/{sha256_hash[2:4]}/{sha256_hash}"
try:
client.delete_object(Bucket=bucket, Key=s3_key)
return True
except Exception:
return False

View File

View File

@@ -0,0 +1,638 @@
"""
Integration tests for artifact API endpoints.
Tests cover:
- Artifact retrieval by ID
- Artifact stats endpoint
- Artifact provenance/history
- Artifact uploads listing
- Garbage collection endpoints
- Orphaned artifacts management
"""
import pytest
from tests.factories import compute_sha256, upload_test_file
class TestArtifactRetrieval:
"""Tests for artifact retrieval endpoints."""
@pytest.mark.integration
def test_get_artifact_by_id(self, integration_client, test_package):
"""Test retrieving an artifact by its SHA256 ID."""
project_name, package_name = test_package
content = b"artifact retrieval test"
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package_name, content, tag="v1"
)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
data = response.json()
assert data["id"] == expected_hash
assert data["sha256"] == expected_hash
assert data["size"] == len(content)
assert "ref_count" in data
assert "created_at" in data
@pytest.mark.integration
def test_get_nonexistent_artifact(self, integration_client):
"""Test getting a non-existent artifact returns 404."""
fake_hash = "a" * 64
response = integration_client.get(f"/api/v1/artifact/{fake_hash}")
assert response.status_code == 404
@pytest.mark.integration
def test_artifact_includes_tags(self, integration_client, test_package):
"""Test artifact response includes tags pointing to it."""
project_name, package_name = test_package
content = b"artifact with tags test"
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package_name, content, tag="tagged-v1"
)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
data = response.json()
assert "tags" in data
assert len(data["tags"]) >= 1
tag = data["tags"][0]
assert "name" in tag
assert "package_name" in tag
assert "project_name" in tag
class TestArtifactStats:
"""Tests for artifact statistics endpoint."""
@pytest.mark.integration
def test_artifact_stats_returns_valid_response(
self, integration_client, test_package, unique_test_id
):
"""Test artifact stats returns expected fields."""
project, package = test_package
content = f"artifact stats test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project, package, content, tag=f"art-{unique_test_id}"
)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
assert response.status_code == 200
data = response.json()
assert "artifact_id" in data
assert "sha256" in data
assert "size" in data
assert "ref_count" in data
assert "storage_savings" in data
assert "tags" in data
assert "projects" in data
assert "packages" in data
@pytest.mark.integration
def test_artifact_stats_not_found(self, integration_client):
"""Test artifact stats returns 404 for non-existent artifact."""
fake_hash = "0" * 64
response = integration_client.get(f"/api/v1/artifact/{fake_hash}/stats")
assert response.status_code == 404
@pytest.mark.integration
def test_artifact_stats_shows_correct_projects(
self, integration_client, unique_test_id
):
"""Test artifact stats shows all projects using the artifact."""
content = f"multi-project artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
proj1 = f"art-stats-a-{unique_test_id}"
proj2 = f"art-stats-b-{unique_test_id}"
try:
# Create projects and packages
integration_client.post(
"/api/v1/projects",
json={"name": proj1, "description": "Test", "is_public": True},
)
integration_client.post(
"/api/v1/projects",
json={"name": proj2, "description": "Test", "is_public": True},
)
integration_client.post(
f"/api/v1/project/{proj1}/packages",
json={"name": "pkg", "description": "Test"},
)
integration_client.post(
f"/api/v1/project/{proj2}/packages",
json={"name": "pkg", "description": "Test"},
)
# Upload same content to both projects
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
# Check artifact stats
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
assert response.status_code == 200
data = response.json()
assert len(data["projects"]) == 2
assert proj1 in data["projects"]
assert proj2 in data["projects"]
finally:
integration_client.delete(f"/api/v1/projects/{proj1}")
integration_client.delete(f"/api/v1/projects/{proj2}")
class TestArtifactProvenance:
"""Tests for artifact provenance/history endpoint."""
@pytest.mark.integration
def test_artifact_history_returns_200(self, integration_client, test_package):
"""Test artifact history endpoint returns 200."""
project_name, package_name = test_package
upload_result = upload_test_file(
integration_client,
project_name,
package_name,
b"provenance test content",
"prov.txt",
)
artifact_id = upload_result["artifact_id"]
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/history")
assert response.status_code == 200
@pytest.mark.integration
def test_artifact_history_has_required_fields(
self, integration_client, test_package
):
"""Test artifact history has all required fields."""
project_name, package_name = test_package
upload_result = upload_test_file(
integration_client,
project_name,
package_name,
b"provenance fields test",
"fields.txt",
)
artifact_id = upload_result["artifact_id"]
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/history")
assert response.status_code == 200
data = response.json()
assert "artifact_id" in data
assert "sha256" in data
assert "size" in data
assert "created_at" in data
assert "created_by" in data
assert "ref_count" in data
assert "first_uploaded_at" in data
assert "first_uploaded_by" in data
assert "upload_count" in data
assert "packages" in data
assert "tags" in data
assert "uploads" in data
@pytest.mark.integration
def test_artifact_history_not_found(self, integration_client):
"""Test non-existent artifact returns 404."""
fake_hash = "b" * 64
response = integration_client.get(f"/api/v1/artifact/{fake_hash}/history")
assert response.status_code == 404
@pytest.mark.integration
def test_artifact_history_with_tag(self, integration_client, test_package):
"""Test artifact history includes tag information when tagged."""
project_name, package_name = test_package
upload_result = upload_test_file(
integration_client,
project_name,
package_name,
b"tagged provenance test",
"tagged.txt",
tag="v1.0.0",
)
artifact_id = upload_result["artifact_id"]
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/history")
assert response.status_code == 200
data = response.json()
assert len(data["tags"]) >= 1
tag = data["tags"][0]
assert "project_name" in tag
assert "package_name" in tag
assert "tag_name" in tag
class TestArtifactUploads:
"""Tests for artifact uploads listing endpoint."""
@pytest.mark.integration
def test_artifact_uploads_returns_200(self, integration_client, test_package):
"""Test artifact uploads endpoint returns 200."""
project_name, package_name = test_package
upload_result = upload_test_file(
integration_client,
project_name,
package_name,
b"artifact upload test",
"artifact.txt",
)
artifact_id = upload_result["artifact_id"]
response = integration_client.get(f"/api/v1/artifact/{artifact_id}/uploads")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
assert len(data["items"]) >= 1
@pytest.mark.integration
def test_artifact_uploads_not_found(self, integration_client):
"""Test non-existent artifact returns 404."""
fake_hash = "a" * 64
response = integration_client.get(f"/api/v1/artifact/{fake_hash}/uploads")
assert response.status_code == 404
class TestOrphanedArtifacts:
"""Tests for orphaned artifacts management."""
@pytest.mark.integration
def test_list_orphaned_artifacts_returns_list(self, integration_client):
"""Test orphaned artifacts endpoint returns a list."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts")
assert response.status_code == 200
assert isinstance(response.json(), list)
@pytest.mark.integration
def test_orphaned_artifact_has_required_fields(self, integration_client):
"""Test orphaned artifact response has required fields."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1")
assert response.status_code == 200
data = response.json()
if len(data) > 0:
artifact = data[0]
assert "id" in artifact
assert "size" in artifact
assert "created_at" in artifact
assert "created_by" in artifact
assert "original_name" in artifact
@pytest.mark.integration
def test_orphaned_artifacts_respects_limit(self, integration_client):
"""Test orphaned artifacts endpoint respects limit parameter."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=5")
assert response.status_code == 200
assert len(response.json()) <= 5
@pytest.mark.integration
def test_artifact_becomes_orphaned_when_tag_deleted(
self, integration_client, test_package, unique_test_id
):
"""Test artifact appears in orphaned list after tag is deleted."""
project, package = test_package
content = f"orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag
upload_test_file(integration_client, project, package, content, tag="temp-tag")
# Verify not in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash not in orphaned_ids
# Delete the tag
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag")
# Verify now in orphaned list
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash in orphaned_ids
class TestGarbageCollection:
"""Tests for garbage collection endpoint."""
@pytest.mark.integration
def test_garbage_collect_dry_run_returns_response(self, integration_client):
"""Test garbage collection dry run returns valid response."""
response = integration_client.post("/api/v1/admin/garbage-collect?dry_run=true")
assert response.status_code == 200
data = response.json()
assert "artifacts_deleted" in data
assert "bytes_freed" in data
assert "artifact_ids" in data
assert "dry_run" in data
assert data["dry_run"] is True
@pytest.mark.integration
def test_garbage_collect_dry_run_doesnt_delete(
self, integration_client, test_package, unique_test_id
):
"""Test garbage collection dry run doesn't actually delete artifacts."""
project, package = test_package
content = f"dry run test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload and delete tag to create orphan
upload_test_file(integration_client, project, package, content, tag="dry-run")
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
# Verify artifact exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
# Run garbage collection in dry-run mode
gc_response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=1000"
)
assert gc_response.status_code == 200
assert expected_hash in gc_response.json()["artifact_ids"]
# Verify artifact STILL exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
@pytest.mark.integration
def test_garbage_collect_preserves_referenced_artifacts(
self, integration_client, test_package, unique_test_id
):
"""Test garbage collection doesn't delete artifacts with ref_count > 0."""
project, package = test_package
content = f"preserve test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag (ref_count=1)
upload_test_file(integration_client, project, package, content, tag="keep-this")
# Verify artifact exists with ref_count=1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == 1
# Run garbage collection (dry_run to not affect other tests)
gc_response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=1000"
)
assert gc_response.status_code == 200
# Verify artifact was NOT in delete list
assert expected_hash not in gc_response.json()["artifact_ids"]
# Verify artifact still exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_garbage_collect_respects_limit(self, integration_client):
"""Test garbage collection respects limit parameter."""
response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=5"
)
assert response.status_code == 200
assert response.json()["artifacts_deleted"] <= 5
@pytest.mark.integration
def test_garbage_collect_returns_bytes_freed(self, integration_client):
"""Test garbage collection returns accurate bytes_freed."""
response = integration_client.post("/api/v1/admin/garbage-collect?dry_run=true")
assert response.status_code == 200
data = response.json()
assert data["bytes_freed"] >= 0
assert isinstance(data["bytes_freed"], int)
class TestGlobalUploads:
"""Tests for global uploads endpoint."""
@pytest.mark.integration
def test_global_uploads_returns_200(self, integration_client):
"""Test global uploads endpoint returns 200."""
response = integration_client.get("/api/v1/uploads")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_global_uploads_pagination(self, integration_client):
"""Test global uploads endpoint respects pagination."""
response = integration_client.get("/api/v1/uploads?limit=5&page=1")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
assert data["pagination"]["page"] == 1
@pytest.mark.integration
def test_global_uploads_filter_by_project(self, integration_client, test_package):
"""Test filtering global uploads by project name."""
project_name, package_name = test_package
# Upload a file
upload_test_file(
integration_client,
project_name,
package_name,
b"global filter test",
"global.txt",
)
response = integration_client.get(f"/api/v1/uploads?project={project_name}")
assert response.status_code == 200
data = response.json()
for item in data["items"]:
assert item["project_name"] == project_name
@pytest.mark.integration
def test_global_uploads_has_more_field(self, integration_client):
"""Test pagination includes has_more field."""
response = integration_client.get("/api/v1/uploads?limit=1")
assert response.status_code == 200
data = response.json()
assert "has_more" in data["pagination"]
assert isinstance(data["pagination"]["has_more"], bool)
class TestGlobalArtifacts:
"""Tests for global artifacts endpoint."""
@pytest.mark.integration
def test_global_artifacts_returns_200(self, integration_client):
"""Test global artifacts endpoint returns 200."""
response = integration_client.get("/api/v1/artifacts")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_global_artifacts_pagination(self, integration_client):
"""Test global artifacts endpoint respects pagination."""
response = integration_client.get("/api/v1/artifacts?limit=5&page=1")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_global_artifacts_filter_by_size(self, integration_client):
"""Test filtering global artifacts by size range."""
response = integration_client.get(
"/api/v1/artifacts?min_size=1&max_size=1000000"
)
assert response.status_code == 200
data = response.json()
for item in data["items"]:
assert 1 <= item["size"] <= 1000000
@pytest.mark.integration
def test_global_artifacts_sort_by_size(self, integration_client):
"""Test sorting global artifacts by size."""
response = integration_client.get("/api/v1/artifacts?sort=size&order=desc")
assert response.status_code == 200
data = response.json()
if len(data["items"]) > 1:
sizes = [item["size"] for item in data["items"]]
assert sizes == sorted(sizes, reverse=True)
@pytest.mark.integration
def test_global_artifacts_invalid_sort_returns_400(self, integration_client):
"""Test invalid sort field returns 400."""
response = integration_client.get("/api/v1/artifacts?sort=invalid_field")
assert response.status_code == 400
class TestGlobalTags:
"""Tests for global tags endpoint."""
@pytest.mark.integration
def test_global_tags_returns_200(self, integration_client):
"""Test global tags endpoint returns 200."""
response = integration_client.get("/api/v1/tags")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_global_tags_pagination(self, integration_client):
"""Test global tags endpoint respects pagination."""
response = integration_client.get("/api/v1/tags?limit=5&page=1")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_global_tags_has_project_context(self, integration_client):
"""Test global tags response includes project/package context."""
response = integration_client.get("/api/v1/tags?limit=1")
assert response.status_code == 200
data = response.json()
if len(data["items"]) > 0:
item = data["items"][0]
assert "project_name" in item
assert "package_name" in item
assert "artifact_id" in item
@pytest.mark.integration
def test_global_tags_search_with_wildcard(self, integration_client):
"""Test global tags search supports wildcards."""
response = integration_client.get("/api/v1/tags?search=v*")
assert response.status_code == 200
# Just verify it doesn't error; results may vary
class TestAuditLogs:
"""Tests for global audit logs endpoint."""
@pytest.mark.integration
def test_list_audit_logs_returns_valid_response(self, integration_client):
"""Test audit logs endpoint returns valid paginated response."""
response = integration_client.get("/api/v1/audit-logs")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
assert isinstance(data["items"], list)
pagination = data["pagination"]
assert "page" in pagination
assert "limit" in pagination
assert "total" in pagination
assert "total_pages" in pagination
@pytest.mark.integration
def test_audit_logs_respects_pagination(self, integration_client):
"""Test audit logs endpoint respects limit parameter."""
response = integration_client.get("/api/v1/audit-logs?limit=5")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_audit_logs_filter_by_action(self, integration_client, test_package):
"""Test filtering audit logs by action type."""
project_name, package_name = test_package
response = integration_client.get("/api/v1/audit-logs?action=project.create")
assert response.status_code == 200
data = response.json()
for item in data["items"]:
assert item["action"] == "project.create"
@pytest.mark.integration
def test_audit_log_entry_has_required_fields(
self, integration_client, test_project
):
"""Test audit log entries have all required fields."""
response = integration_client.get("/api/v1/audit-logs?limit=10")
assert response.status_code == 200
data = response.json()
if data["items"]:
item = data["items"][0]
assert "id" in item
assert "action" in item
assert "resource" in item
assert "user_id" in item
assert "timestamp" in item

View File

@@ -0,0 +1,760 @@
"""Integration tests for authentication API endpoints."""
import pytest
from uuid import uuid4
class TestAuthLogin:
"""Tests for login endpoint."""
@pytest.mark.integration
def test_login_success(self, integration_client):
"""Test successful login with default admin credentials."""
response = integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
assert response.status_code == 200
data = response.json()
assert data["username"] == "admin"
assert data["is_admin"] is True
assert "orchard_session" in response.cookies
@pytest.mark.integration
def test_login_invalid_password(self, integration_client):
"""Test login with wrong password."""
response = integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "wrongpassword"},
)
assert response.status_code == 401
assert "Invalid username or password" in response.json()["detail"]
@pytest.mark.integration
def test_login_nonexistent_user(self, integration_client):
"""Test login with non-existent user."""
response = integration_client.post(
"/api/v1/auth/login",
json={"username": "nonexistent", "password": "password"},
)
assert response.status_code == 401
class TestAuthLogout:
"""Tests for logout endpoint."""
@pytest.mark.integration
def test_logout_success(self, integration_client):
"""Test successful logout."""
# First login
login_response = integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
assert login_response.status_code == 200
# Then logout
logout_response = integration_client.post("/api/v1/auth/logout")
assert logout_response.status_code == 200
assert "Logged out successfully" in logout_response.json()["message"]
@pytest.mark.integration
def test_logout_without_session(self, integration_client):
"""Test logout without being logged in."""
response = integration_client.post("/api/v1/auth/logout")
# Should succeed even without session
assert response.status_code == 200
class TestAuthMe:
"""Tests for get current user endpoint."""
@pytest.mark.integration
def test_get_me_authenticated(self, integration_client):
"""Test getting current user when authenticated."""
# Login first
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.get("/api/v1/auth/me")
assert response.status_code == 200
data = response.json()
assert data["username"] == "admin"
assert data["is_admin"] is True
assert "id" in data
assert "created_at" in data
@pytest.mark.integration
def test_get_me_unauthenticated(self, integration_client):
"""Test getting current user without authentication."""
# Clear any existing cookies
integration_client.cookies.clear()
response = integration_client.get("/api/v1/auth/me")
assert response.status_code == 401
assert "Not authenticated" in response.json()["detail"]
class TestAuthChangePassword:
"""Tests for change password endpoint."""
@pytest.mark.integration
def test_change_password_success(self, integration_client):
"""Test successful password change."""
# Login first
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Change password
response = integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "changeme123", "new_password": "newpassword123"},
)
assert response.status_code == 200
# Verify old password no longer works
integration_client.cookies.clear()
response = integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
assert response.status_code == 401
# Verify new password works
response = integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "newpassword123"},
)
assert response.status_code == 200
# Reset password back to original for other tests
reset_response = integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "newpassword123", "new_password": "changeme123"},
)
assert reset_response.status_code == 200, "Failed to reset admin password back to default"
@pytest.mark.integration
def test_change_password_wrong_current(self, integration_client):
"""Test password change with wrong current password."""
# Login first
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "wrongpassword", "new_password": "newpassword"},
)
assert response.status_code == 400
assert "Current password is incorrect" in response.json()["detail"]
class TestAPIKeys:
"""Tests for API key management endpoints."""
@pytest.mark.integration
def test_create_and_list_api_key(self, integration_client):
"""Test creating and listing API keys."""
# Login first
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create API key
create_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "test-key", "description": "Test API key"},
)
assert create_response.status_code == 200
data = create_response.json()
assert data["name"] == "test-key"
assert data["description"] == "Test API key"
assert "key" in data
assert data["key"].startswith("orch_")
key_id = data["id"]
api_key = data["key"]
# List API keys
list_response = integration_client.get("/api/v1/auth/keys")
assert list_response.status_code == 200
keys = list_response.json()
assert any(k["id"] == key_id for k in keys)
# Clean up - delete the key
integration_client.delete(f"/api/v1/auth/keys/{key_id}")
@pytest.mark.integration
def test_use_api_key_for_auth(self, integration_client):
"""Test using API key for authentication."""
# Login and create API key
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
create_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "auth-test-key"},
)
api_key = create_response.json()["key"]
key_id = create_response.json()["id"]
# Clear cookies and use API key
integration_client.cookies.clear()
response = integration_client.get(
"/api/v1/auth/me",
headers={"Authorization": f"Bearer {api_key}"},
)
assert response.status_code == 200
assert response.json()["username"] == "admin"
# Clean up
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
integration_client.delete(f"/api/v1/auth/keys/{key_id}")
@pytest.mark.integration
def test_delete_api_key(self, integration_client):
"""Test revoking an API key."""
# Login and create API key
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
create_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "delete-test-key"},
)
key_id = create_response.json()["id"]
api_key = create_response.json()["key"]
# Delete the key
delete_response = integration_client.delete(f"/api/v1/auth/keys/{key_id}")
assert delete_response.status_code == 200
# Verify key no longer works
integration_client.cookies.clear()
response = integration_client.get(
"/api/v1/auth/me",
headers={"Authorization": f"Bearer {api_key}"},
)
assert response.status_code == 401
class TestAdminUserManagement:
"""Tests for admin user management endpoints."""
@pytest.mark.integration
def test_list_users(self, integration_client):
"""Test listing users as admin."""
# Login as admin
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.get("/api/v1/admin/users")
assert response.status_code == 200
users = response.json()
assert len(users) >= 1
assert any(u["username"] == "admin" for u in users)
@pytest.mark.integration
def test_create_user(self, integration_client):
"""Test creating a new user as admin."""
# Login as admin
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create new user
test_username = f"testuser_{uuid4().hex[:8]}"
response = integration_client.post(
"/api/v1/admin/users",
json={
"username": test_username,
"password": "testpassword",
"email": "test@example.com",
},
)
assert response.status_code == 200
data = response.json()
assert data["username"] == test_username
assert data["email"] == "test@example.com"
assert data["is_admin"] is False
# Verify new user can login
integration_client.cookies.clear()
login_response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "testpassword"},
)
assert login_response.status_code == 200
@pytest.mark.integration
def test_update_user(self, integration_client):
"""Test updating a user as admin."""
# Login as admin
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create a test user
test_username = f"updateuser_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password"},
)
# Update the user
response = integration_client.put(
f"/api/v1/admin/users/{test_username}",
json={"email": "updated@example.com", "is_admin": True},
)
assert response.status_code == 200
data = response.json()
assert data["email"] == "updated@example.com"
assert data["is_admin"] is True
@pytest.mark.integration
def test_reset_user_password(self, integration_client):
"""Test resetting a user's password as admin."""
# Login as admin
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create a test user
test_username = f"resetuser_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "oldpassword"},
)
# Reset password
response = integration_client.post(
f"/api/v1/admin/users/{test_username}/reset-password",
json={"new_password": "newpassword"},
)
assert response.status_code == 200
# Verify new password works
integration_client.cookies.clear()
login_response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "newpassword"},
)
assert login_response.status_code == 200
@pytest.mark.integration
def test_non_admin_cannot_access_admin_endpoints(self, integration_client):
"""Test that non-admin users cannot access admin endpoints."""
# Login as admin and create non-admin user
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"nonadmin_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password", "is_admin": False},
)
# Login as non-admin
integration_client.cookies.clear()
integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password"},
)
# Try to access admin endpoints
response = integration_client.get("/api/v1/admin/users")
assert response.status_code == 403
assert "Admin privileges required" in response.json()["detail"]
class TestSecurityEdgeCases:
"""Tests for security edge cases and validation."""
@pytest.mark.integration
def test_login_inactive_user(self, integration_client):
"""Test that inactive users cannot login."""
# Login as admin and create a user
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"inactive_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Deactivate the user
integration_client.put(
f"/api/v1/admin/users/{test_username}",
json={"is_active": False},
)
# Try to login as inactive user
integration_client.cookies.clear()
response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
assert response.status_code == 401
assert "Invalid username or password" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_create(self, integration_client):
"""Test that short passwords are rejected when creating users."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.post(
"/api/v1/admin/users",
json={"username": f"shortpw_{uuid4().hex[:8]}", "password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_change(self, integration_client):
"""Test that short passwords are rejected when changing password."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "changeme123", "new_password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_reset(self, integration_client):
"""Test that short passwords are rejected when resetting password."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create a test user first
test_username = f"resetshort_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
response = integration_client.post(
f"/api/v1/admin/users/{test_username}/reset-password",
json={"new_password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_duplicate_username_rejected(self, integration_client):
"""Test that duplicate usernames are rejected."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"duplicate_{uuid4().hex[:8]}"
# Create user first time
response1 = integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
assert response1.status_code == 200
# Try to create same username again
response2 = integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password456"},
)
assert response2.status_code == 409
assert "already exists" in response2.json()["detail"]
@pytest.mark.integration
def test_cannot_delete_other_users_api_key(self, integration_client):
"""Test that users cannot delete API keys owned by other users."""
# Login as admin and create an API key
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
create_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "admin-key"},
)
admin_key_id = create_response.json()["id"]
# Create a non-admin user
test_username = f"nonadmin_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Login as non-admin
integration_client.cookies.clear()
integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
# Try to delete admin's API key
response = integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
assert response.status_code == 403
assert "Cannot delete another user's API key" in response.json()["detail"]
# Cleanup: login as admin and delete the key
integration_client.cookies.clear()
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
@pytest.mark.integration
def test_sessions_invalidated_on_password_change(self, integration_client):
"""Test that all sessions are invalidated when password is changed."""
# Create a test user
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"sessiontest_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Login as test user
integration_client.cookies.clear()
login_response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
assert login_response.status_code == 200
# Verify session works
me_response = integration_client.get("/api/v1/auth/me")
assert me_response.status_code == 200
# Change password
integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "password123", "new_password": "newpassword123"},
)
# Old session should be invalidated - try to access /me
# (note: the change-password call itself may have cleared the session cookie)
me_response2 = integration_client.get("/api/v1/auth/me")
# This should fail because all sessions were invalidated
assert me_response2.status_code == 401
class TestSecurityEdgeCases:
"""Tests for security edge cases and validation."""
@pytest.mark.integration
def test_login_inactive_user(self, integration_client):
"""Test that inactive users cannot login."""
# Login as admin and create a user
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"inactive_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Deactivate the user
integration_client.put(
f"/api/v1/admin/users/{test_username}",
json={"is_active": False},
)
# Try to login as inactive user
integration_client.cookies.clear()
response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
assert response.status_code == 401
assert "Invalid username or password" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_create(self, integration_client):
"""Test that short passwords are rejected when creating users."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.post(
"/api/v1/admin/users",
json={"username": f"shortpw_{uuid4().hex[:8]}", "password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_change(self, integration_client):
"""Test that short passwords are rejected when changing password."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
response = integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "changeme123", "new_password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_password_too_short_on_reset(self, integration_client):
"""Test that short passwords are rejected when resetting password."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
# Create a test user first
test_username = f"resetshort_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
response = integration_client.post(
f"/api/v1/admin/users/{test_username}/reset-password",
json={"new_password": "short"},
)
assert response.status_code == 400
assert "at least 8 characters" in response.json()["detail"]
@pytest.mark.integration
def test_duplicate_username_rejected(self, integration_client):
"""Test that duplicate usernames are rejected."""
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"duplicate_{uuid4().hex[:8]}"
# Create user first time
response1 = integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
assert response1.status_code == 200
# Try to create same username again
response2 = integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password456"},
)
assert response2.status_code == 409
assert "already exists" in response2.json()["detail"]
@pytest.mark.integration
def test_cannot_delete_other_users_api_key(self, integration_client):
"""Test that users cannot delete API keys owned by other users."""
# Login as admin and create an API key
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
create_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "admin-key"},
)
admin_key_id = create_response.json()["id"]
# Create a non-admin user
test_username = f"nonadmin_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Login as non-admin
integration_client.cookies.clear()
integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
# Try to delete admin's API key
response = integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
assert response.status_code == 403
assert "Cannot delete another user's API key" in response.json()["detail"]
# Cleanup: login as admin and delete the key
integration_client.cookies.clear()
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
integration_client.delete(f"/api/v1/auth/keys/{admin_key_id}")
@pytest.mark.integration
def test_sessions_invalidated_on_password_change(self, integration_client):
"""Test that all sessions are invalidated when password is changed."""
# Create a test user
integration_client.post(
"/api/v1/auth/login",
json={"username": "admin", "password": "changeme123"},
)
test_username = f"sessiontest_{uuid4().hex[:8]}"
integration_client.post(
"/api/v1/admin/users",
json={"username": test_username, "password": "password123"},
)
# Login as test user
integration_client.cookies.clear()
login_response = integration_client.post(
"/api/v1/auth/login",
json={"username": test_username, "password": "password123"},
)
assert login_response.status_code == 200
# Verify session works
me_response = integration_client.get("/api/v1/auth/me")
assert me_response.status_code == 200
# Change password
integration_client.post(
"/api/v1/auth/change-password",
json={"current_password": "password123", "new_password": "newpassword123"},
)
# Old session should be invalidated - try to access /me
# (note: the change-password call itself may have cleared the session cookie)
me_response2 = integration_client.get("/api/v1/auth/me")
# This should fail because all sessions were invalidated
assert me_response2.status_code == 401

View File

@@ -0,0 +1,345 @@
"""
Integration tests for package API endpoints.
Tests cover:
- Package CRUD operations
- Package listing with pagination, search, filtering
- Package stats endpoint
- Package-level audit logs
- Cascade delete behavior
"""
import pytest
from tests.factories import compute_sha256, upload_test_file
class TestPackageCRUD:
"""Tests for package create, read, update, delete operations."""
@pytest.mark.integration
def test_create_package(self, integration_client, test_project, unique_test_id):
"""Test creating a new package."""
package_name = f"test-create-pkg-{unique_test_id}"
response = integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={
"name": package_name,
"description": "Test package",
"format": "npm",
"platform": "linux",
},
)
assert response.status_code == 200
data = response.json()
assert data["name"] == package_name
assert data["description"] == "Test package"
assert data["format"] == "npm"
assert data["platform"] == "linux"
@pytest.mark.integration
def test_get_package(self, integration_client, test_package):
"""Test getting a package by name."""
project_name, package_name = test_package
response = integration_client.get(
f"/api/v1/project/{project_name}/packages/{package_name}"
)
assert response.status_code == 200
data = response.json()
assert data["name"] == package_name
@pytest.mark.integration
def test_get_nonexistent_package(self, integration_client, test_project):
"""Test getting a non-existent package returns 404."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/nonexistent-pkg"
)
assert response.status_code == 404
@pytest.mark.integration
def test_list_packages(self, integration_client, test_package):
"""Test listing packages includes created package."""
project_name, package_name = test_package
response = integration_client.get(f"/api/v1/project/{project_name}/packages")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
package_names = [p["name"] for p in data["items"]]
assert package_name in package_names
@pytest.mark.integration
def test_delete_package(self, integration_client, test_project, unique_test_id):
"""Test deleting a package."""
package_name = f"test-delete-pkg-{unique_test_id}"
# Create package
integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={"name": package_name, "description": "To be deleted"},
)
# Delete package
response = integration_client.delete(
f"/api/v1/project/{test_project}/packages/{package_name}"
)
assert response.status_code == 204
# Verify deleted
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/{package_name}"
)
assert response.status_code == 404
class TestPackageListingFilters:
"""Tests for package listing with filters and pagination."""
@pytest.mark.integration
def test_packages_pagination(self, integration_client, test_project):
"""Test package listing respects pagination parameters."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages?page=1&limit=5"
)
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
assert data["pagination"]["page"] == 1
@pytest.mark.integration
def test_packages_filter_by_format(
self, integration_client, test_project, unique_test_id
):
"""Test package filtering by format."""
# Create a package with specific format
package_name = f"npm-pkg-{unique_test_id}"
integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={"name": package_name, "format": "npm"},
)
response = integration_client.get(
f"/api/v1/project/{test_project}/packages?format=npm"
)
assert response.status_code == 200
data = response.json()
for pkg in data["items"]:
assert pkg["format"] == "npm"
@pytest.mark.integration
def test_packages_filter_by_platform(
self, integration_client, test_project, unique_test_id
):
"""Test package filtering by platform."""
# Create a package with specific platform
package_name = f"linux-pkg-{unique_test_id}"
integration_client.post(
f"/api/v1/project/{test_project}/packages",
json={"name": package_name, "platform": "linux"},
)
response = integration_client.get(
f"/api/v1/project/{test_project}/packages?platform=linux"
)
assert response.status_code == 200
data = response.json()
for pkg in data["items"]:
assert pkg["platform"] == "linux"
class TestPackageStats:
"""Tests for package statistics endpoint."""
@pytest.mark.integration
def test_package_stats_returns_valid_response(
self, integration_client, test_package
):
"""Test package stats endpoint returns expected fields."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/packages/{package}/stats"
)
assert response.status_code == 200
data = response.json()
assert "package_id" in data
assert "package_name" in data
assert "project_name" in data
assert "tag_count" in data
assert "artifact_count" in data
assert "total_size_bytes" in data
assert "upload_count" in data
assert "deduplicated_uploads" in data
assert "storage_saved_bytes" in data
assert "deduplication_ratio" in data
@pytest.mark.integration
def test_package_stats_not_found(self, integration_client, test_project):
"""Test package stats returns 404 for non-existent package."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/nonexistent-package/stats"
)
assert response.status_code == 404
class TestPackageAuditLogs:
"""Tests for package-level audit logs endpoint."""
@pytest.mark.integration
def test_package_audit_logs_returns_200(self, integration_client, test_package):
"""Test package audit logs endpoint returns 200."""
project_name, package_name = test_package
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/audit-logs"
)
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_package_audit_logs_project_not_found(self, integration_client):
"""Test non-existent project returns 404."""
response = integration_client.get(
"/api/v1/project/nonexistent/nonexistent/audit-logs"
)
assert response.status_code == 404
@pytest.mark.integration
def test_package_audit_logs_package_not_found(
self, integration_client, test_project
):
"""Test non-existent package returns 404."""
response = integration_client.get(
f"/api/v1/project/{test_project}/nonexistent-package/audit-logs"
)
assert response.status_code == 404
class TestPackageCascadeDelete:
"""Tests for cascade delete behavior when deleting packages."""
@pytest.mark.integration
def test_ref_count_decrements_on_package_delete(
self, integration_client, unique_test_id
):
"""Test ref_count decrements for all tags when package is deleted."""
project_name = f"cascade-pkg-{unique_test_id}"
package_name = f"test-pkg-{unique_test_id}"
# Create project
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
# Create package
response = integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": package_name, "description": "Test package"},
)
assert response.status_code == 200
# Upload content with multiple tags
content = f"cascade delete test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v3"
)
# Verify ref_count is 3
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 3
# Delete the package
delete_response = integration_client.delete(
f"/api/v1/project/{project_name}/packages/{package_name}"
)
assert delete_response.status_code == 204
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
# Cleanup
integration_client.delete(f"/api/v1/projects/{project_name}")
class TestPackageUploads:
"""Tests for package-level uploads endpoint."""
@pytest.mark.integration
def test_package_uploads_returns_200(self, integration_client, test_package):
"""Test package uploads endpoint returns 200."""
project_name, package_name = test_package
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/uploads"
)
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_package_uploads_after_upload(self, integration_client, test_package):
"""Test uploads are recorded after file upload."""
project_name, package_name = test_package
# Upload a file
upload_result = upload_test_file(
integration_client,
project_name,
package_name,
b"test upload content",
"test.txt",
)
assert upload_result["artifact_id"]
# Check uploads endpoint
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/uploads"
)
assert response.status_code == 200
data = response.json()
assert len(data["items"]) >= 1
# Verify upload record fields
upload = data["items"][0]
assert "artifact_id" in upload
assert "package_name" in upload
assert "project_name" in upload
assert "uploaded_at" in upload
assert "uploaded_by" in upload
@pytest.mark.integration
def test_package_uploads_project_not_found(self, integration_client):
"""Test non-existent project returns 404."""
response = integration_client.get(
"/api/v1/project/nonexistent/nonexistent/uploads"
)
assert response.status_code == 404

View File

@@ -0,0 +1,325 @@
"""
Integration tests for project API endpoints.
Tests cover:
- Project CRUD operations
- Project listing with pagination, search, and sorting
- Project stats endpoint
- Project-level audit logs
- Cascade delete behavior
"""
import pytest
from tests.factories import compute_sha256, upload_test_file
class TestProjectCRUD:
"""Tests for project create, read, update, delete operations."""
@pytest.mark.integration
def test_create_project(self, integration_client, unique_test_id):
"""Test creating a new project."""
project_name = f"test-create-{unique_test_id}"
try:
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
data = response.json()
assert data["name"] == project_name
assert data["description"] == "Test project"
assert data["is_public"] is True
assert "id" in data
assert "created_at" in data
finally:
integration_client.delete(f"/api/v1/projects/{project_name}")
@pytest.mark.integration
def test_get_project(self, integration_client, test_project):
"""Test getting a project by name."""
response = integration_client.get(f"/api/v1/projects/{test_project}")
assert response.status_code == 200
data = response.json()
assert data["name"] == test_project
@pytest.mark.integration
def test_get_nonexistent_project(self, integration_client):
"""Test getting a non-existent project returns 404."""
response = integration_client.get("/api/v1/projects/nonexistent-project-xyz")
assert response.status_code == 404
@pytest.mark.integration
def test_list_projects(self, integration_client, test_project):
"""Test listing projects includes created project."""
# Search specifically for our test project to avoid pagination issues
response = integration_client.get(f"/api/v1/projects?search={test_project}")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
project_names = [p["name"] for p in data["items"]]
assert test_project in project_names
@pytest.mark.integration
def test_delete_project(self, integration_client, unique_test_id):
"""Test deleting a project."""
project_name = f"test-delete-{unique_test_id}"
# Create project
integration_client.post(
"/api/v1/projects",
json={"name": project_name, "description": "To be deleted"},
)
# Delete project
response = integration_client.delete(f"/api/v1/projects/{project_name}")
assert response.status_code == 204
# Verify deleted
response = integration_client.get(f"/api/v1/projects/{project_name}")
assert response.status_code == 404
class TestProjectListingFilters:
"""Tests for project listing with filters and pagination."""
@pytest.mark.integration
def test_projects_pagination(self, integration_client):
"""Test project listing respects pagination parameters."""
response = integration_client.get("/api/v1/projects?page=1&limit=5")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
assert data["pagination"]["page"] == 1
assert "has_more" in data["pagination"]
@pytest.mark.integration
def test_projects_search(self, integration_client, test_project):
"""Test project search by name."""
# Search using the unique portion of our test project name
# test_project format is "test-project-test-{uuid[:8]}"
unique_part = test_project.split("-")[-1] # Get the UUID portion
response = integration_client.get(
f"/api/v1/projects?search={unique_part}"
)
assert response.status_code == 200
data = response.json()
# Our project should be in results
project_names = [p["name"] for p in data["items"]]
assert test_project in project_names
@pytest.mark.integration
def test_projects_sort_by_name(self, integration_client):
"""Test project sorting by name."""
response = integration_client.get("/api/v1/projects?sort=name&order=asc")
assert response.status_code == 200
data = response.json()
names = [p["name"] for p in data["items"]]
assert names == sorted(names)
class TestProjectStats:
"""Tests for project statistics endpoint."""
@pytest.mark.integration
def test_project_stats_returns_valid_response(
self, integration_client, test_project
):
"""Test project stats endpoint returns expected fields."""
response = integration_client.get(f"/api/v1/projects/{test_project}/stats")
assert response.status_code == 200
data = response.json()
assert "project_id" in data
assert "project_name" in data
assert "package_count" in data
assert "tag_count" in data
assert "artifact_count" in data
assert "total_size_bytes" in data
assert "upload_count" in data
assert "deduplicated_uploads" in data
assert "storage_saved_bytes" in data
assert "deduplication_ratio" in data
@pytest.mark.integration
def test_project_stats_not_found(self, integration_client):
"""Test project stats returns 404 for non-existent project."""
response = integration_client.get("/api/v1/projects/nonexistent-project/stats")
assert response.status_code == 404
class TestProjectAuditLogs:
"""Tests for project-level audit logs endpoint."""
@pytest.mark.integration
def test_project_audit_logs_returns_200(self, integration_client, test_project):
"""Test project audit logs endpoint returns 200."""
response = integration_client.get(f"/api/v1/projects/{test_project}/audit-logs")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_project_audit_logs_not_found(self, integration_client):
"""Test non-existent project returns 404."""
response = integration_client.get(
"/api/v1/projects/nonexistent-project/audit-logs"
)
assert response.status_code == 404
class TestProjectCascadeDelete:
"""Tests for cascade delete behavior when deleting projects."""
@pytest.mark.integration
def test_project_delete_cascades_to_packages(
self, integration_client, unique_test_id
):
"""Test deleting project cascades to packages."""
project_name = f"cascade-proj-{unique_test_id}"
package_name = f"cascade-pkg-{unique_test_id}"
try:
# Create project and package
integration_client.post(
"/api/v1/projects",
json={"name": project_name, "description": "Test", "is_public": True},
)
integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": package_name, "description": "Test package"},
)
# Verify package exists
response = integration_client.get(
f"/api/v1/project/{project_name}/packages/{package_name}"
)
assert response.status_code == 200
# Delete project
integration_client.delete(f"/api/v1/projects/{project_name}")
# Verify project is deleted (and package with it)
response = integration_client.get(f"/api/v1/projects/{project_name}")
assert response.status_code == 404
except Exception:
# Cleanup if test fails
integration_client.delete(f"/api/v1/projects/{project_name}")
raise
@pytest.mark.integration
def test_ref_count_decrements_on_project_delete(
self, integration_client, unique_test_id
):
"""Test ref_count decrements for all tags when project is deleted."""
project_name = f"cascade-proj-{unique_test_id}"
package1_name = f"pkg1-{unique_test_id}"
package2_name = f"pkg2-{unique_test_id}"
# Create project
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
# Create two packages
for pkg_name in [package1_name, package2_name]:
response = integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": pkg_name, "description": "Test package"},
)
assert response.status_code == 200
# Upload same content with tags in both packages
content = f"project cascade test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package1_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package1_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="latest"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="stable"
)
# Verify ref_count is 4 (2 tags in each of 2 packages)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 4
# Delete the project
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
assert delete_response.status_code == 204
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
class TestProjectUploads:
"""Tests for project-level uploads endpoint."""
@pytest.mark.integration
def test_project_uploads_returns_200(self, integration_client, test_project):
"""Test project uploads endpoint returns 200."""
response = integration_client.get(f"/api/v1/project/{test_project}/uploads")
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
@pytest.mark.integration
def test_project_uploads_after_upload(self, integration_client, test_package):
"""Test uploads are recorded in project uploads."""
project_name, package_name = test_package
# Upload a file
upload_test_file(
integration_client,
project_name,
package_name,
b"project uploads test",
"project.txt",
)
response = integration_client.get(f"/api/v1/project/{project_name}/uploads")
assert response.status_code == 200
data = response.json()
assert len(data["items"]) >= 1
# Verify project name matches
for item in data["items"]:
assert item["project_name"] == project_name
@pytest.mark.integration
def test_project_uploads_not_found(self, integration_client):
"""Test non-existent project returns 404."""
response = integration_client.get("/api/v1/project/nonexistent/uploads")
assert response.status_code == 404

View File

@@ -0,0 +1,403 @@
"""
Integration tests for tag API endpoints.
Tests cover:
- Tag CRUD operations
- Tag listing with pagination and search
- Tag history tracking
- ref_count behavior with tag operations
"""
import pytest
from tests.factories import compute_sha256, upload_test_file
class TestTagCRUD:
"""Tests for tag create, read, delete operations."""
@pytest.mark.integration
def test_create_tag_via_upload(self, integration_client, test_package):
"""Test creating a tag via upload endpoint."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"tag create test",
tag="v1.0.0",
)
assert result["tag"] == "v1.0.0"
assert result["artifact_id"]
@pytest.mark.integration
def test_create_tag_via_post(
self, integration_client, test_package, unique_test_id
):
"""Test creating a tag via POST /tags endpoint."""
project_name, package_name = test_package
# First upload an artifact
result = upload_test_file(
integration_client,
project_name,
package_name,
b"artifact for tag",
)
artifact_id = result["artifact_id"]
# Create tag via POST
tag_name = f"post-tag-{unique_test_id}"
response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": tag_name, "artifact_id": artifact_id},
)
assert response.status_code == 200
data = response.json()
assert data["name"] == tag_name
assert data["artifact_id"] == artifact_id
@pytest.mark.integration
def test_get_tag(self, integration_client, test_package):
"""Test getting a tag by name."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"get tag test",
tag="get-tag",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/get-tag"
)
assert response.status_code == 200
data = response.json()
assert data["name"] == "get-tag"
assert "artifact_id" in data
assert "artifact_size" in data
assert "artifact_content_type" in data
@pytest.mark.integration
def test_list_tags(self, integration_client, test_package):
"""Test listing tags for a package."""
project_name, package_name = test_package
# Create some tags
upload_test_file(
integration_client,
project_name,
package_name,
b"list tags test",
tag="list-v1",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags"
)
assert response.status_code == 200
data = response.json()
assert "items" in data
assert "pagination" in data
tag_names = [t["name"] for t in data["items"]]
assert "list-v1" in tag_names
@pytest.mark.integration
def test_delete_tag(self, integration_client, test_package):
"""Test deleting a tag."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"delete tag test",
tag="to-delete",
)
# Delete tag
response = integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
)
assert response.status_code == 204
# Verify deleted
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/to-delete"
)
assert response.status_code == 404
class TestTagListingFilters:
"""Tests for tag listing with filters and search."""
@pytest.mark.integration
def test_tags_pagination(self, integration_client, test_package):
"""Test tag listing respects pagination."""
project_name, package_name = test_package
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags?limit=5"
)
assert response.status_code == 200
data = response.json()
assert len(data["items"]) <= 5
assert data["pagination"]["limit"] == 5
@pytest.mark.integration
def test_tags_search(self, integration_client, test_package, unique_test_id):
"""Test tag search by name."""
project_name, package_name = test_package
tag_name = f"searchable-{unique_test_id}"
upload_test_file(
integration_client,
project_name,
package_name,
b"search test",
tag=tag_name,
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags?search=searchable"
)
assert response.status_code == 200
data = response.json()
tag_names = [t["name"] for t in data["items"]]
assert tag_name in tag_names
class TestTagHistory:
"""Tests for tag history tracking."""
@pytest.mark.integration
def test_tag_history_on_create(self, integration_client, test_package):
"""Test tag history is created when tag is created."""
project_name, package_name = test_package
upload_test_file(
integration_client,
project_name,
package_name,
b"history create test",
tag="history-create",
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/history-create/history"
)
assert response.status_code == 200
data = response.json()
assert len(data) >= 1
@pytest.mark.integration
def test_tag_history_on_update(
self, integration_client, test_package, unique_test_id
):
"""Test tag history is created when tag is updated."""
project_name, package_name = test_package
tag_name = f"history-update-{unique_test_id}"
# Create tag with first artifact
upload_test_file(
integration_client,
project_name,
package_name,
b"first content",
tag=tag_name,
)
# Update tag with second artifact
upload_test_file(
integration_client,
project_name,
package_name,
b"second content",
tag=tag_name,
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/tags/{tag_name}/history"
)
assert response.status_code == 200
data = response.json()
# Should have at least 2 history entries (create + update)
assert len(data) >= 2
class TestTagRefCount:
"""Tests for ref_count behavior with tag operations."""
@pytest.mark.integration
def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package):
"""Test ref_count decrements when a tag is deleted."""
project_name, package_name = test_package
content = b"ref count delete test"
expected_hash = compute_sha256(content)
# Upload with two tags
upload_test_file(
integration_client, project_name, package_name, content, tag="rc-v1"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="rc-v2"
)
# Verify ref_count is 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2
# Delete one tag
delete_response = integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/rc-v1"
)
assert delete_response.status_code == 204
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_ref_count_zero_after_all_tags_deleted(
self, integration_client, test_package
):
"""Test ref_count goes to 0 when all tags are deleted."""
project_name, package_name = test_package
content = b"orphan test content"
expected_hash = compute_sha256(content)
# Upload with one tag
upload_test_file(
integration_client, project_name, package_name, content, tag="only-tag"
)
# Delete the tag
integration_client.delete(
f"/api/v1/project/{project_name}/{package_name}/tags/only-tag"
)
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
@pytest.mark.integration
def test_ref_count_adjusts_on_tag_update(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count adjusts when a tag is updated to point to different artifact."""
project_name, package_name = test_package
# Upload two different artifacts
content1 = f"artifact one {unique_test_id}".encode()
content2 = f"artifact two {unique_test_id}".encode()
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
# Upload first artifact with tag "latest"
upload_test_file(
integration_client, project_name, package_name, content1, tag="latest"
)
# Verify first artifact has ref_count 1
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 1
# Upload second artifact with different tag
upload_test_file(
integration_client, project_name, package_name, content2, tag="stable"
)
# Now update "latest" tag to point to second artifact
upload_test_file(
integration_client, project_name, package_name, content2, tag="latest"
)
# Verify first artifact ref_count decreased to 0
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 0
# Verify second artifact ref_count increased to 2
response = integration_client.get(f"/api/v1/artifact/{hash2}")
assert response.json()["ref_count"] == 2
@pytest.mark.integration
def test_ref_count_unchanged_when_tag_same_artifact(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count doesn't change when tag is 'updated' to same artifact."""
project_name, package_name = test_package
content = f"same artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag
upload_test_file(
integration_client, project_name, package_name, content, tag="same-v1"
)
# Verify ref_count is 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Upload same content with same tag (no-op)
upload_test_file(
integration_client, project_name, package_name, content, tag="same-v1"
)
# Verify ref_count is still 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_tag_via_post_endpoint_increments_ref_count(
self, integration_client, test_package, unique_test_id
):
"""Test creating tag via POST /tags endpoint increments ref_count."""
project_name, package_name = test_package
content = f"tag endpoint test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload artifact without tag
result = upload_test_file(
integration_client, project_name, package_name, content, filename="test.bin"
)
artifact_id = result["artifact_id"]
# Verify ref_count is 0 (no tags yet)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
# Create tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": "post-v1", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Create another tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/tags",
json={"name": "post-latest", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2

View File

@@ -1,33 +1,109 @@
"""
Integration tests for duplicate uploads and storage verification.
These tests require the full stack to be running (docker-compose.local.yml).
Integration tests for upload and download API endpoints.
Tests cover:
- Duplicate upload scenarios across packages and projects
- Storage verification (single S3 object, single artifact row)
- Upload table tracking
- Content integrity verification
- Upload functionality and deduplication
- Download by tag and artifact ID
- Concurrent upload handling
- Failure cleanup
- File size validation
- Upload failure cleanup
- S3 storage verification
"""
import pytest
import io
import threading
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from tests.conftest import (
from tests.factories import (
compute_sha256,
upload_test_file,
list_s3_objects_by_hash,
s3_object_exists,
delete_s3_object_by_hash,
)
class TestDuplicateUploadScenarios:
"""Integration tests for duplicate upload behavior."""
class TestUploadBasics:
"""Tests for basic upload functionality."""
@pytest.mark.integration
def test_upload_returns_artifact_id(self, integration_client, test_package):
"""Test upload returns the artifact ID (SHA256 hash)."""
project_name, package_name = test_package
content = b"basic upload test"
expected_hash = compute_sha256(content)
result = upload_test_file(
integration_client, project_name, package_name, content, tag="v1"
)
assert result["artifact_id"] == expected_hash
@pytest.mark.integration
def test_upload_response_has_upload_id(self, integration_client, test_package):
"""Test upload response includes upload_id."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"upload id test",
"uploadid.txt",
)
assert "upload_id" in result
assert result["upload_id"] is not None
@pytest.mark.integration
def test_upload_response_has_content_type(self, integration_client, test_package):
"""Test upload response includes content_type."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"content type test",
"content.txt",
)
assert "content_type" in result
@pytest.mark.integration
def test_upload_response_has_original_name(self, integration_client, test_package):
"""Test upload response includes original_name."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"original name test",
"originalname.txt",
)
assert "original_name" in result
assert result["original_name"] == "originalname.txt"
@pytest.mark.integration
def test_upload_response_has_created_at(self, integration_client, test_package):
"""Test upload response includes created_at."""
project_name, package_name = test_package
result = upload_test_file(
integration_client,
project_name,
package_name,
b"created at test",
"createdat.txt",
)
assert "created_at" in result
assert result["created_at"] is not None
class TestDuplicateUploads:
"""Tests for duplicate upload deduplication behavior."""
@pytest.mark.integration
def test_same_file_twice_returns_same_artifact_id(
@@ -103,62 +179,11 @@ class TestDuplicateUploadScenarios:
assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True
@pytest.mark.integration
def test_same_file_different_projects_shares_artifact(
self, integration_client, unique_test_id
):
"""Test uploading same file to different projects shares artifact."""
content = f"content shared across projects {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Create two projects with packages
proj1 = f"project-x-{unique_test_id}"
proj2 = f"project-y-{unique_test_id}"
pkg_name = "shared-pkg"
try:
# Create projects and packages
integration_client.post(
"/api/v1/projects",
json={"name": proj1, "description": "Project X", "is_public": True},
)
integration_client.post(
"/api/v1/projects",
json={"name": proj2, "description": "Project Y", "is_public": True},
)
integration_client.post(
f"/api/v1/project/{proj1}/packages",
json={"name": pkg_name, "description": "Package"},
)
integration_client.post(
f"/api/v1/project/{proj2}/packages",
json={"name": pkg_name, "description": "Package"},
)
# Upload to first project
result1 = upload_test_file(
integration_client, proj1, pkg_name, content, tag="v1"
)
assert result1["artifact_id"] == expected_hash
assert result1["deduplicated"] is False
# Upload to second project
result2 = upload_test_file(
integration_client, proj2, pkg_name, content, tag="v1"
)
assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True
finally:
# Cleanup
integration_client.delete(f"/api/v1/projects/{proj1}")
integration_client.delete(f"/api/v1/projects/{proj2}")
@pytest.mark.integration
def test_same_file_different_filenames_shares_artifact(
self, integration_client, test_package
):
"""Test uploading same file with different original filenames shares artifact."""
"""Test uploading same file with different filenames shares artifact."""
project, package = test_package
content = b"content with different filenames"
expected_hash = compute_sha256(content)
@@ -186,110 +211,68 @@ class TestDuplicateUploadScenarios:
assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True
@pytest.mark.integration
def test_same_file_different_tags_shares_artifact(
self, integration_client, test_package, unique_test_id
):
"""Test uploading same file with different tags shares artifact."""
project, package = test_package
content = f"content with different tags {unique_test_id}".encode()
expected_hash = compute_sha256(content)
tags = ["latest", "stable", "v1.0.0", "release"]
for i, tag in enumerate(tags):
result = upload_test_file(
integration_client, project, package, content, tag=tag
class TestDownload:
"""Tests for download functionality."""
@pytest.mark.integration
def test_download_by_tag(self, integration_client, test_package):
"""Test downloading artifact by tag name."""
project, package = test_package
original_content = b"download by tag test"
upload_test_file(
integration_client, project, package, original_content, tag="download-tag"
)
assert result["artifact_id"] == expected_hash
if i == 0:
assert result["deduplicated"] is False
else:
assert result["deduplicated"] is True
class TestStorageVerification:
"""Tests to verify storage behavior after duplicate uploads."""
@pytest.mark.integration
def test_artifact_table_single_row_after_duplicates(
self, integration_client, test_package
):
"""Test artifact table contains only one row after duplicate uploads."""
project, package = test_package
content = b"content for single row test"
expected_hash = compute_sha256(content)
# Upload same content multiple times with different tags
for tag in ["v1", "v2", "v3"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Query artifact - should exist and be unique
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
artifact = response.json()
assert artifact["id"] == expected_hash
assert artifact["ref_count"] == 3
@pytest.mark.integration
def test_upload_table_multiple_rows_for_duplicates(
self, integration_client, test_package
):
"""Test upload table contains multiple rows for duplicate uploads (event tracking)."""
project, package = test_package
content = b"content for upload tracking test"
# Upload same content 3 times
for tag in ["upload1", "upload2", "upload3"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Check package stats - should show 3 uploads but fewer unique artifacts
response = integration_client.get(
f"/api/v1/project/{project}/packages/{package}"
f"/api/v1/project/{project}/{package}/+/download-tag",
params={"mode": "proxy"},
)
assert response.status_code == 200
pkg_info = response.json()
assert pkg_info["tag_count"] == 3
assert response.content == original_content
@pytest.mark.integration
def test_artifact_content_matches_original(self, integration_client, test_package):
"""Test artifact content retrieved matches original content exactly."""
def test_download_by_artifact_id(self, integration_client, test_package):
"""Test downloading artifact by artifact ID."""
project, package = test_package
original_content = b"download by id test"
expected_hash = compute_sha256(original_content)
upload_test_file(integration_client, project, package, original_content)
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/artifact:{expected_hash}",
params={"mode": "proxy"},
)
assert response.status_code == 200
assert response.content == original_content
@pytest.mark.integration
def test_download_nonexistent_tag(self, integration_client, test_package):
"""Test downloading nonexistent tag returns 404."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/nonexistent-tag"
)
assert response.status_code == 404
@pytest.mark.integration
def test_content_matches_original(self, integration_client, test_package):
"""Test downloaded content matches original exactly."""
project, package = test_package
original_content = b"exact content verification test data 12345"
# Upload
result = upload_test_file(
upload_test_file(
integration_client, project, package, original_content, tag="verify"
)
# Download and compare
download_response = integration_client.get(
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/verify", params={"mode": "proxy"}
)
assert download_response.status_code == 200
downloaded_content = download_response.content
assert downloaded_content == original_content
@pytest.mark.integration
def test_storage_stats_reflect_deduplication(
self, integration_client, test_package
):
"""Test total storage size matches single artifact size after duplicates."""
project, package = test_package
content = b"content for storage stats test - should only count once"
content_size = len(content)
# Upload same content 5 times
for tag in ["a", "b", "c", "d", "e"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Check global stats
response = integration_client.get("/api/v1/stats")
assert response.status_code == 200
stats = response.json()
# Deduplication should show savings
assert stats["deduplicated_uploads"] > 0
assert stats["storage_saved_bytes"] > 0
assert response.content == original_content
class TestConcurrentUploads:
@@ -303,12 +286,19 @@ class TestConcurrentUploads:
expected_hash = compute_sha256(content)
num_concurrent = 5
# Create an API key for worker threads
api_key_response = integration_client.post(
"/api/v1/auth/keys",
json={"name": "concurrent-test-key"},
)
assert api_key_response.status_code == 200, f"Failed to create API key: {api_key_response.text}"
api_key = api_key_response.json()["key"]
results = []
errors = []
def upload_worker(tag_suffix):
try:
# Create a new client for this thread
from httpx import Client
base_url = "http://localhost:8080"
@@ -324,6 +314,7 @@ class TestConcurrentUploads:
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": f"concurrent-{tag_suffix}"},
headers={"Authorization": f"Bearer {api_key}"},
)
if response.status_code == 200:
results.append(response.json())
@@ -332,13 +323,11 @@ class TestConcurrentUploads:
except Exception as e:
errors.append(str(e))
# Run concurrent uploads
with ThreadPoolExecutor(max_workers=num_concurrent) as executor:
futures = [executor.submit(upload_worker, i) for i in range(num_concurrent)]
for future in as_completed(futures):
pass # Wait for all to complete
pass
# Verify results
assert len(errors) == 0, f"Errors during concurrent uploads: {errors}"
assert len(results) == num_concurrent
@@ -353,227 +342,27 @@ class TestConcurrentUploads:
assert response.json()["ref_count"] == num_concurrent
class TestDeduplicationAcrossRestarts:
"""Tests for deduplication persistence."""
@pytest.mark.integration
def test_deduplication_persists(
self, integration_client, test_package, unique_test_id
):
"""
Test deduplication works with persisted data.
This test uploads content, then uploads the same content again.
Since the database persists, the second upload should detect
the existing artifact even without server restart.
"""
project, package = test_package
content = f"persisted content for dedup test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# First upload
result1 = upload_test_file(
integration_client, project, package, content, tag="persist1"
)
assert result1["artifact_id"] == expected_hash
assert result1["deduplicated"] is False
# Second upload (simulating after restart - data is persisted)
result2 = upload_test_file(
integration_client, project, package, content, tag="persist2"
)
assert result2["artifact_id"] == expected_hash
assert result2["deduplicated"] is True
# Verify artifact exists with correct ref_count
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == 2
class TestS3ObjectVerification:
"""Tests to verify S3 storage behavior directly."""
@pytest.mark.integration
def test_s3_bucket_single_object_after_duplicates(
self, integration_client, test_package, unique_test_id
):
"""Test S3 bucket contains only one object after duplicate uploads."""
project, package = test_package
content = f"content for s3 object count test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload same content multiple times with different tags
for tag in ["s3test1", "s3test2", "s3test3"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Verify only one S3 object exists for this hash
s3_objects = list_s3_objects_by_hash(expected_hash)
assert len(s3_objects) == 1, (
f"Expected 1 S3 object, found {len(s3_objects)}: {s3_objects}"
)
# Verify the object key follows expected pattern
expected_key = (
f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
)
assert s3_objects[0] == expected_key
class TestUploadFailureCleanup:
"""Tests for cleanup when uploads fail."""
@pytest.mark.integration
def test_upload_failure_invalid_project_no_orphaned_s3(
self, integration_client, unique_test_id
):
"""Test upload to non-existent project doesn't leave orphaned S3 objects."""
content = f"content for orphan s3 test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Attempt upload to non-existent project
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
files=files,
data={"tag": "test"},
)
# Upload should fail
assert response.status_code == 404
# Verify no S3 object was created
assert not s3_object_exists(expected_hash), (
"Orphaned S3 object found after failed upload"
)
@pytest.mark.integration
def test_upload_failure_invalid_package_no_orphaned_s3(
self, integration_client, test_project, unique_test_id
):
"""Test upload to non-existent package doesn't leave orphaned S3 objects."""
content = f"content for orphan s3 test pkg {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Attempt upload to non-existent package
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files,
data={"tag": "test"},
)
# Upload should fail
assert response.status_code == 404
# Verify no S3 object was created
assert not s3_object_exists(expected_hash), (
"Orphaned S3 object found after failed upload"
)
@pytest.mark.integration
def test_upload_failure_empty_file_no_orphaned_s3(
self, integration_client, test_package, unique_test_id
):
"""Test upload of empty file doesn't leave orphaned S3 objects or DB records."""
project, package = test_package
content = b"" # Empty content
# Attempt upload of empty file
files = {"file": ("empty.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": f"empty-{unique_test_id}"},
)
# Upload should fail (empty files are rejected)
assert response.status_code in (400, 422), (
f"Expected 400/422, got {response.status_code}"
)
@pytest.mark.integration
def test_upload_failure_no_orphaned_database_records(
self, integration_client, test_project, unique_test_id
):
"""Test failed upload doesn't leave orphaned database records."""
content = f"content for db orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Attempt upload to non-existent package (should fail before DB insert)
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files,
data={"tag": "test"},
)
# Upload should fail
assert response.status_code == 404
# Verify no artifact record was created
artifact_response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert artifact_response.status_code == 404, (
"Orphaned artifact record found after failed upload"
)
@pytest.mark.integration
def test_duplicate_tag_upload_handles_gracefully(
self, integration_client, test_package, unique_test_id
):
"""Test uploading with duplicate tag is handled without orphaned data."""
project, package = test_package
content1 = f"content version 1 {unique_test_id}".encode()
content2 = f"content version 2 {unique_test_id}".encode()
tag = f"duplicate-tag-{unique_test_id}"
# First upload with tag
result1 = upload_test_file(
integration_client, project, package, content1, tag=tag
)
hash1 = result1["artifact_id"]
# Second upload with same tag (should update the tag to point to new artifact)
result2 = upload_test_file(
integration_client, project, package, content2, tag=tag
)
hash2 = result2["artifact_id"]
# Both artifacts should exist
assert integration_client.get(f"/api/v1/artifact/{hash1}").status_code == 200
assert integration_client.get(f"/api/v1/artifact/{hash2}").status_code == 200
# Tag should point to the second artifact
tag_response = integration_client.get(
f"/api/v1/project/{project}/{package}/tags/{tag}"
)
assert tag_response.status_code == 200
assert tag_response.json()["artifact_id"] == hash2
class TestFileSizeValidation:
"""Tests for file size limits and empty file rejection."""
@pytest.mark.integration
def test_empty_file_rejected(self, integration_client, test_package):
"""Test that empty files are rejected with appropriate error."""
"""Test empty files are rejected with appropriate error."""
project, package = test_package
# Try to upload empty content
files = {"file": ("empty.txt", io.BytesIO(b""), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
)
# Should be rejected (422 from storage layer or validation)
assert response.status_code in [422, 400]
@pytest.mark.integration
def test_small_valid_file_accepted(self, integration_client, test_package):
"""Test that small (1 byte) files are accepted."""
"""Test small (1 byte) files are accepted."""
project, package = test_package
content = b"X" # Single byte
content = b"X"
result = upload_test_file(
integration_client, project, package, content, tag="tiny"
@@ -586,7 +375,7 @@ class TestFileSizeValidation:
def test_file_size_reported_correctly(
self, integration_client, test_package, unique_test_id
):
"""Test that file size is correctly reported in response."""
"""Test file size is correctly reported in response."""
project, package = test_package
content = f"Test content for size check {unique_test_id}".encode()
expected_size = len(content)
@@ -602,3 +391,328 @@ class TestFileSizeValidation:
f"/api/v1/artifact/{result['artifact_id']}"
)
assert artifact_response.json()["size"] == expected_size
class TestUploadFailureCleanup:
"""Tests for cleanup when uploads fail."""
@pytest.mark.integration
def test_upload_failure_invalid_project_no_orphaned_s3(
self, integration_client, unique_test_id
):
"""Test upload to non-existent project doesn't leave orphaned S3 objects."""
content = f"content for orphan s3 test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/nonexistent-project-{unique_test_id}/nonexistent-pkg/upload",
files=files,
data={"tag": "test"},
)
assert response.status_code == 404
# Verify no S3 object was created
assert not s3_object_exists(expected_hash), (
"Orphaned S3 object found after failed upload"
)
@pytest.mark.integration
def test_upload_failure_invalid_package_no_orphaned_s3(
self, integration_client, test_project, unique_test_id
):
"""Test upload to non-existent package doesn't leave orphaned S3 objects."""
content = f"content for orphan s3 test pkg {unique_test_id}".encode()
expected_hash = compute_sha256(content)
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files,
data={"tag": "test"},
)
assert response.status_code == 404
assert not s3_object_exists(expected_hash), (
"Orphaned S3 object found after failed upload"
)
@pytest.mark.integration
def test_upload_failure_no_orphaned_database_records(
self, integration_client, test_project, unique_test_id
):
"""Test failed upload doesn't leave orphaned database records."""
content = f"content for db orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{test_project}/nonexistent-package-{unique_test_id}/upload",
files=files,
data={"tag": "test"},
)
assert response.status_code == 404
artifact_response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert artifact_response.status_code == 404, (
"Orphaned artifact record found after failed upload"
)
class TestS3StorageVerification:
"""Tests to verify S3 storage behavior."""
@pytest.mark.integration
def test_s3_single_object_after_duplicates(
self, integration_client, test_package, unique_test_id
):
"""Test S3 bucket contains only one object after duplicate uploads."""
project, package = test_package
content = f"content for s3 object count test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload same content multiple times
for tag in ["s3test1", "s3test2", "s3test3"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Verify only one S3 object exists
s3_objects = list_s3_objects_by_hash(expected_hash)
assert len(s3_objects) == 1, (
f"Expected 1 S3 object, found {len(s3_objects)}: {s3_objects}"
)
# Verify object key follows expected pattern
expected_key = (
f"fruits/{expected_hash[:2]}/{expected_hash[2:4]}/{expected_hash}"
)
assert s3_objects[0] == expected_key
@pytest.mark.integration
def test_artifact_table_single_row_after_duplicates(
self, integration_client, test_package
):
"""Test artifact table contains only one row after duplicate uploads."""
project, package = test_package
content = b"content for single row test"
expected_hash = compute_sha256(content)
# Upload same content multiple times
for tag in ["v1", "v2", "v3"]:
upload_test_file(integration_client, project, package, content, tag=tag)
# Query artifact
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
artifact = response.json()
assert artifact["id"] == expected_hash
assert artifact["ref_count"] == 3
class TestSecurityPathTraversal:
"""Tests for path traversal attack prevention.
Note: Orchard uses content-addressable storage where files are stored by
SHA256 hash, not filename. Filenames are metadata only and never used in
file path construction, so path traversal in filenames is not a security
vulnerability. These tests verify the system handles unusual inputs safely.
"""
@pytest.mark.integration
def test_path_traversal_in_filename_stored_safely(
self, integration_client, test_package
):
"""Test filenames with path traversal are stored safely (as metadata only)."""
project, package = test_package
content = b"path traversal test content"
expected_hash = compute_sha256(content)
files = {
"file": (
"../../../etc/passwd",
io.BytesIO(content),
"application/octet-stream",
)
}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": "traversal-test"},
)
assert response.status_code == 200
result = response.json()
assert result["artifact_id"] == expected_hash
s3_objects = list_s3_objects_by_hash(expected_hash)
assert len(s3_objects) == 1
assert ".." not in s3_objects[0]
@pytest.mark.integration
def test_path_traversal_in_package_name(self, integration_client, test_project):
"""Test package names with path traversal sequences are rejected."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
@pytest.mark.integration
def test_path_traversal_in_tag_name(self, integration_client, test_package):
"""Test tag names with path traversal are handled safely."""
project, package = test_package
content = b"tag traversal test"
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": "../../../etc/passwd"},
)
assert response.status_code in [200, 400, 422]
@pytest.mark.integration
def test_download_path_traversal_in_ref(self, integration_client, test_package):
"""Test download ref with path traversal is rejected."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
@pytest.mark.integration
def test_path_traversal_in_package_name(self, integration_client, test_project):
"""Test package names with path traversal sequences are rejected."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
@pytest.mark.integration
def test_path_traversal_in_tag_name(self, integration_client, test_package):
"""Test tag names with path traversal are rejected or handled safely."""
project, package = test_package
content = b"tag traversal test"
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
data={"tag": "../../../etc/passwd"},
)
assert response.status_code in [200, 400, 422]
@pytest.mark.integration
def test_download_path_traversal_in_ref(self, integration_client, test_package):
"""Test download ref with path traversal is rejected."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/{package}/+/../../../etc/passwd"
)
assert response.status_code in [400, 404, 422]
class TestSecurityMalformedRequests:
"""Tests for malformed request handling."""
@pytest.mark.integration
def test_upload_missing_file_field(self, integration_client, test_package):
"""Test upload without file field returns appropriate error."""
project, package = test_package
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
data={"tag": "no-file"},
)
assert response.status_code == 422
@pytest.mark.integration
def test_upload_null_bytes_in_filename(self, integration_client, test_package):
"""Test filename with null bytes is handled safely."""
project, package = test_package
content = b"null byte test"
files = {
"file": ("test\x00.bin", io.BytesIO(content), "application/octet-stream")
}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
)
assert response.status_code in [200, 400, 422]
@pytest.mark.integration
def test_upload_very_long_filename(self, integration_client, test_package):
"""Test very long filename is handled (truncated or rejected)."""
project, package = test_package
content = b"long filename test"
long_filename = "a" * 1000 + ".bin"
files = {
"file": (long_filename, io.BytesIO(content), "application/octet-stream")
}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
)
assert response.status_code in [200, 400, 413, 422]
@pytest.mark.integration
def test_upload_special_characters_in_filename(
self, integration_client, test_package
):
"""Test filenames with special characters are handled safely."""
project, package = test_package
content = b"special char test"
special_filenames = [
"test<script>.bin",
'test"quote.bin',
"test'apostrophe.bin",
"test;semicolon.bin",
"test|pipe.bin",
]
for filename in special_filenames:
files = {
"file": (filename, io.BytesIO(content), "application/octet-stream")
}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
)
assert response.status_code in [200, 400, 422], (
f"Unexpected status {response.status_code} for filename: {filename}"
)
@pytest.mark.integration
def test_invalid_checksum_header_format(self, integration_client, test_package):
"""Test invalid X-Checksum-SHA256 header format is rejected."""
project, package = test_package
content = b"checksum test"
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
headers={"X-Checksum-SHA256": "not-a-valid-hash"},
)
assert response.status_code == 400
assert "Invalid" in response.json().get("detail", "")
@pytest.mark.integration
def test_checksum_mismatch_rejected(self, integration_client, test_package):
"""Test upload with wrong checksum is rejected."""
project, package = test_package
content = b"checksum mismatch test"
wrong_hash = "0" * 64
files = {"file": ("test.bin", io.BytesIO(content), "application/octet-stream")}
response = integration_client.post(
f"/api/v1/project/{project}/{package}/upload",
files=files,
headers={"X-Checksum-SHA256": wrong_hash},
)
assert response.status_code == 422
assert "verification failed" in response.json().get("detail", "").lower()

View File

@@ -0,0 +1,675 @@
"""
Tests for checksum calculation, verification, and download verification.
This module tests:
- SHA256 hash computation (bytes and streams)
- HashingStreamWrapper incremental hashing
- VerifyingStreamWrapper with verification
- ChecksumMismatchError exception handling
- Download verification API endpoints
"""
import pytest
import hashlib
import io
from typing import Generator
from app.checksum import (
compute_sha256,
compute_sha256_stream,
verify_checksum,
verify_checksum_strict,
is_valid_sha256,
sha256_to_base64,
HashingStreamWrapper,
VerifyingStreamWrapper,
ChecksumMismatchError,
ChecksumError,
InvalidHashFormatError,
DEFAULT_CHUNK_SIZE,
)
# =============================================================================
# Test Data
# =============================================================================
# Known test vectors
TEST_CONTENT_HELLO = b"Hello, World!"
TEST_HASH_HELLO = "dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f"
TEST_CONTENT_EMPTY = b""
TEST_HASH_EMPTY = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
TEST_CONTENT_BINARY = bytes(range(256))
TEST_HASH_BINARY = hashlib.sha256(TEST_CONTENT_BINARY).hexdigest()
# Invalid hashes for testing
INVALID_HASH_TOO_SHORT = "abcd1234"
INVALID_HASH_TOO_LONG = "a" * 65
INVALID_HASH_NON_HEX = "zzzz" + "a" * 60
INVALID_HASH_EMPTY = ""
# =============================================================================
# Unit Tests - SHA256 Computation
# =============================================================================
class TestComputeSHA256:
"""Tests for compute_sha256 function."""
def test_known_content_matches_expected_hash(self):
"""Test SHA256 of known content matches pre-computed hash."""
result = compute_sha256(TEST_CONTENT_HELLO)
assert result == TEST_HASH_HELLO
def test_returns_64_character_hex_string(self):
"""Test result is exactly 64 hex characters."""
result = compute_sha256(TEST_CONTENT_HELLO)
assert len(result) == 64
assert all(c in "0123456789abcdef" for c in result)
def test_returns_lowercase_hex(self):
"""Test result is lowercase."""
result = compute_sha256(TEST_CONTENT_HELLO)
assert result == result.lower()
def test_empty_content_returns_empty_hash(self):
"""Test empty bytes returns SHA256 of empty content."""
result = compute_sha256(TEST_CONTENT_EMPTY)
assert result == TEST_HASH_EMPTY
def test_deterministic_same_input_same_output(self):
"""Test same input always produces same output."""
content = b"test content for determinism"
result1 = compute_sha256(content)
result2 = compute_sha256(content)
assert result1 == result2
def test_different_content_different_hash(self):
"""Test different content produces different hash."""
hash1 = compute_sha256(b"content A")
hash2 = compute_sha256(b"content B")
assert hash1 != hash2
def test_single_bit_change_different_hash(self):
"""Test single bit change produces completely different hash."""
content1 = b"\x00" * 100
content2 = b"\x00" * 99 + b"\x01"
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
assert hash1 != hash2
def test_binary_content(self):
"""Test hashing binary content with all byte values."""
result = compute_sha256(TEST_CONTENT_BINARY)
assert result == TEST_HASH_BINARY
assert len(result) == 64
def test_large_content(self):
"""Test hashing larger content (1MB)."""
large_content = b"x" * (1024 * 1024)
result = compute_sha256(large_content)
expected = hashlib.sha256(large_content).hexdigest()
assert result == expected
def test_none_content_raises_error(self):
"""Test None content raises ChecksumError."""
with pytest.raises(ChecksumError, match="Cannot compute hash of None"):
compute_sha256(None)
class TestComputeSHA256Stream:
"""Tests for compute_sha256_stream function."""
def test_file_like_object(self):
"""Test hashing from file-like object."""
file_obj = io.BytesIO(TEST_CONTENT_HELLO)
result = compute_sha256_stream(file_obj)
assert result == TEST_HASH_HELLO
def test_iterator(self):
"""Test hashing from iterator of chunks."""
def chunk_iterator():
yield b"Hello, "
yield b"World!"
result = compute_sha256_stream(chunk_iterator())
assert result == TEST_HASH_HELLO
def test_various_chunk_sizes_same_result(self):
"""Test different chunk sizes produce same hash."""
content = b"x" * 10000
expected = hashlib.sha256(content).hexdigest()
for chunk_size in [1, 10, 100, 1000, 8192]:
file_obj = io.BytesIO(content)
result = compute_sha256_stream(file_obj, chunk_size=chunk_size)
assert result == expected, f"Failed for chunk_size={chunk_size}"
def test_single_byte_chunks(self):
"""Test with 1-byte chunks (edge case)."""
content = b"ABC"
file_obj = io.BytesIO(content)
result = compute_sha256_stream(file_obj, chunk_size=1)
expected = hashlib.sha256(content).hexdigest()
assert result == expected
def test_empty_stream(self):
"""Test empty stream returns empty content hash."""
file_obj = io.BytesIO(b"")
result = compute_sha256_stream(file_obj)
assert result == TEST_HASH_EMPTY
# =============================================================================
# Unit Tests - Hash Validation
# =============================================================================
class TestIsValidSHA256:
"""Tests for is_valid_sha256 function."""
def test_valid_hash_lowercase(self):
"""Test valid lowercase hash."""
assert is_valid_sha256(TEST_HASH_HELLO) is True
def test_valid_hash_uppercase(self):
"""Test valid uppercase hash."""
assert is_valid_sha256(TEST_HASH_HELLO.upper()) is True
def test_valid_hash_mixed_case(self):
"""Test valid mixed case hash."""
mixed = TEST_HASH_HELLO[:32].upper() + TEST_HASH_HELLO[32:].lower()
assert is_valid_sha256(mixed) is True
def test_invalid_too_short(self):
"""Test hash that's too short."""
assert is_valid_sha256(INVALID_HASH_TOO_SHORT) is False
def test_invalid_too_long(self):
"""Test hash that's too long."""
assert is_valid_sha256(INVALID_HASH_TOO_LONG) is False
def test_invalid_non_hex(self):
"""Test hash with non-hex characters."""
assert is_valid_sha256(INVALID_HASH_NON_HEX) is False
def test_invalid_empty(self):
"""Test empty string."""
assert is_valid_sha256(INVALID_HASH_EMPTY) is False
def test_invalid_none(self):
"""Test None value."""
assert is_valid_sha256(None) is False
class TestSHA256ToBase64:
"""Tests for sha256_to_base64 function."""
def test_converts_to_base64(self):
"""Test conversion to base64."""
result = sha256_to_base64(TEST_HASH_HELLO)
# Verify it's valid base64
import base64
decoded = base64.b64decode(result)
assert len(decoded) == 32 # SHA256 is 32 bytes
def test_invalid_hash_raises_error(self):
"""Test invalid hash raises InvalidHashFormatError."""
with pytest.raises(InvalidHashFormatError):
sha256_to_base64(INVALID_HASH_TOO_SHORT)
# =============================================================================
# Unit Tests - Verification Functions
# =============================================================================
class TestVerifyChecksum:
"""Tests for verify_checksum function."""
def test_matching_checksum_returns_true(self):
"""Test matching checksum returns True."""
result = verify_checksum(TEST_CONTENT_HELLO, TEST_HASH_HELLO)
assert result is True
def test_mismatched_checksum_returns_false(self):
"""Test mismatched checksum returns False."""
wrong_hash = "a" * 64
result = verify_checksum(TEST_CONTENT_HELLO, wrong_hash)
assert result is False
def test_case_insensitive_comparison(self):
"""Test comparison is case-insensitive."""
result = verify_checksum(TEST_CONTENT_HELLO, TEST_HASH_HELLO.upper())
assert result is True
def test_invalid_hash_format_raises_error(self):
"""Test invalid hash format raises error."""
with pytest.raises(InvalidHashFormatError):
verify_checksum(TEST_CONTENT_HELLO, INVALID_HASH_TOO_SHORT)
class TestVerifyChecksumStrict:
"""Tests for verify_checksum_strict function."""
def test_matching_checksum_returns_none(self):
"""Test matching checksum doesn't raise."""
# Should not raise
verify_checksum_strict(TEST_CONTENT_HELLO, TEST_HASH_HELLO)
def test_mismatched_checksum_raises_error(self):
"""Test mismatched checksum raises ChecksumMismatchError."""
wrong_hash = "a" * 64
with pytest.raises(ChecksumMismatchError) as exc_info:
verify_checksum_strict(TEST_CONTENT_HELLO, wrong_hash)
error = exc_info.value
assert error.expected == wrong_hash.lower()
assert error.actual == TEST_HASH_HELLO
assert error.size == len(TEST_CONTENT_HELLO)
def test_error_includes_context(self):
"""Test error includes artifact_id and s3_key context."""
wrong_hash = "a" * 64
with pytest.raises(ChecksumMismatchError) as exc_info:
verify_checksum_strict(
TEST_CONTENT_HELLO,
wrong_hash,
artifact_id="test-artifact-123",
s3_key="fruits/ab/cd/abcd1234...",
)
error = exc_info.value
assert error.artifact_id == "test-artifact-123"
assert error.s3_key == "fruits/ab/cd/abcd1234..."
# =============================================================================
# Unit Tests - HashingStreamWrapper
# =============================================================================
class TestHashingStreamWrapper:
"""Tests for HashingStreamWrapper class."""
def test_computes_correct_hash(self):
"""Test wrapper computes correct hash."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
# Consume the stream
chunks = list(wrapper)
# Verify hash
assert wrapper.get_hash() == TEST_HASH_HELLO
def test_yields_correct_chunks(self):
"""Test wrapper yields all content."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
chunks = list(wrapper)
content = b"".join(chunks)
assert content == TEST_CONTENT_HELLO
def test_tracks_bytes_read(self):
"""Test bytes_read property tracks correctly."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
assert wrapper.bytes_read == 0
list(wrapper) # Consume
assert wrapper.bytes_read == len(TEST_CONTENT_HELLO)
def test_get_hash_before_iteration_consumes_stream(self):
"""Test get_hash() consumes stream if not already done."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
# Call get_hash without iterating
hash_result = wrapper.get_hash()
assert hash_result == TEST_HASH_HELLO
assert wrapper.bytes_read == len(TEST_CONTENT_HELLO)
def test_get_hash_if_complete_before_iteration_returns_none(self):
"""Test get_hash_if_complete returns None before iteration."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
assert wrapper.get_hash_if_complete() is None
def test_get_hash_if_complete_after_iteration_returns_hash(self):
"""Test get_hash_if_complete returns hash after iteration."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
list(wrapper) # Consume
assert wrapper.get_hash_if_complete() == TEST_HASH_HELLO
def test_custom_chunk_size(self):
"""Test custom chunk size is respected."""
content = b"x" * 1000
stream = io.BytesIO(content)
wrapper = HashingStreamWrapper(stream, chunk_size=100)
chunks = list(wrapper)
# Each chunk should be at most 100 bytes
for chunk in chunks[:-1]: # All but last
assert len(chunk) == 100
# Total content should match
assert b"".join(chunks) == content
def test_iterator_interface(self):
"""Test wrapper supports iterator interface."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = HashingStreamWrapper(stream)
# Should be able to use for loop
result = b""
for chunk in wrapper:
result += chunk
assert result == TEST_CONTENT_HELLO
# =============================================================================
# Unit Tests - VerifyingStreamWrapper
# =============================================================================
class TestVerifyingStreamWrapper:
"""Tests for VerifyingStreamWrapper class."""
def test_verify_success(self):
"""Test verification succeeds for matching content."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = VerifyingStreamWrapper(stream, TEST_HASH_HELLO)
# Consume stream
list(wrapper)
# Verify should succeed
result = wrapper.verify()
assert result is True
assert wrapper.is_verified is True
def test_verify_failure_raises_error(self):
"""Test verification failure raises ChecksumMismatchError."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrong_hash = "a" * 64
wrapper = VerifyingStreamWrapper(stream, wrong_hash)
# Consume stream
list(wrapper)
# Verify should fail
with pytest.raises(ChecksumMismatchError):
wrapper.verify()
assert wrapper.is_verified is False
def test_verify_silent_success(self):
"""Test verify_silent returns True on success."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = VerifyingStreamWrapper(stream, TEST_HASH_HELLO)
list(wrapper)
result = wrapper.verify_silent()
assert result is True
def test_verify_silent_failure(self):
"""Test verify_silent returns False on failure."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrong_hash = "a" * 64
wrapper = VerifyingStreamWrapper(stream, wrong_hash)
list(wrapper)
result = wrapper.verify_silent()
assert result is False
def test_invalid_expected_hash_raises_error(self):
"""Test invalid expected hash raises error at construction."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
with pytest.raises(InvalidHashFormatError):
VerifyingStreamWrapper(stream, INVALID_HASH_TOO_SHORT)
def test_on_failure_callback(self):
"""Test on_failure callback is called on verification failure."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrong_hash = "a" * 64
callback_called = []
def on_failure(error):
callback_called.append(error)
wrapper = VerifyingStreamWrapper(stream, wrong_hash, on_failure=on_failure)
list(wrapper)
with pytest.raises(ChecksumMismatchError):
wrapper.verify()
assert len(callback_called) == 1
assert isinstance(callback_called[0], ChecksumMismatchError)
def test_get_actual_hash_after_iteration(self):
"""Test get_actual_hash returns hash after iteration."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrapper = VerifyingStreamWrapper(stream, TEST_HASH_HELLO)
# Before iteration
assert wrapper.get_actual_hash() is None
list(wrapper)
# After iteration
assert wrapper.get_actual_hash() == TEST_HASH_HELLO
def test_includes_context_in_error(self):
"""Test error includes artifact_id and s3_key."""
stream = io.BytesIO(TEST_CONTENT_HELLO)
wrong_hash = "a" * 64
wrapper = VerifyingStreamWrapper(
stream,
wrong_hash,
artifact_id="test-artifact",
s3_key="test/key",
)
list(wrapper)
with pytest.raises(ChecksumMismatchError) as exc_info:
wrapper.verify()
error = exc_info.value
assert error.artifact_id == "test-artifact"
assert error.s3_key == "test/key"
# =============================================================================
# Unit Tests - ChecksumMismatchError
# =============================================================================
class TestChecksumMismatchError:
"""Tests for ChecksumMismatchError class."""
def test_to_dict(self):
"""Test to_dict returns proper dictionary."""
error = ChecksumMismatchError(
expected="a" * 64,
actual="b" * 64,
artifact_id="test-123",
s3_key="test/key",
size=1024,
)
result = error.to_dict()
assert result["error"] == "checksum_mismatch"
assert result["expected"] == "a" * 64
assert result["actual"] == "b" * 64
assert result["artifact_id"] == "test-123"
assert result["s3_key"] == "test/key"
assert result["size"] == 1024
def test_message_format(self):
"""Test error message format."""
error = ChecksumMismatchError(
expected="a" * 64,
actual="b" * 64,
)
assert "verification failed" in str(error).lower()
assert "expected" in str(error).lower()
def test_custom_message(self):
"""Test custom message is used."""
error = ChecksumMismatchError(
expected="a" * 64,
actual="b" * 64,
message="Custom error message",
)
assert str(error) == "Custom error message"
# =============================================================================
# Corruption Simulation Tests
# =============================================================================
class TestCorruptionDetection:
"""Tests for detecting corrupted content."""
def test_detect_truncated_content(self):
"""Test detection of truncated content."""
original = TEST_CONTENT_HELLO
truncated = original[:-1] # Remove last byte
original_hash = compute_sha256(original)
truncated_hash = compute_sha256(truncated)
assert original_hash != truncated_hash
assert verify_checksum(truncated, original_hash) is False
def test_detect_extra_bytes(self):
"""Test detection of content with extra bytes."""
original = TEST_CONTENT_HELLO
extended = original + b"\x00" # Add null byte
original_hash = compute_sha256(original)
assert verify_checksum(extended, original_hash) is False
def test_detect_single_bit_flip(self):
"""Test detection of single bit flip."""
original = TEST_CONTENT_HELLO
# Flip first bit of first byte
corrupted = bytes([original[0] ^ 0x01]) + original[1:]
original_hash = compute_sha256(original)
assert verify_checksum(corrupted, original_hash) is False
def test_detect_wrong_content(self):
"""Test detection of completely different content."""
original = TEST_CONTENT_HELLO
different = b"Something completely different"
original_hash = compute_sha256(original)
assert verify_checksum(different, original_hash) is False
def test_detect_empty_vs_nonempty(self):
"""Test detection of empty content vs non-empty."""
original = TEST_CONTENT_HELLO
empty = b""
original_hash = compute_sha256(original)
assert verify_checksum(empty, original_hash) is False
def test_streaming_detection_of_corruption(self):
"""Test VerifyingStreamWrapper detects corruption."""
original = b"Original content that will be corrupted"
original_hash = compute_sha256(original)
# Corrupt the content
corrupted = b"Corrupted content that is different"
stream = io.BytesIO(corrupted)
wrapper = VerifyingStreamWrapper(stream, original_hash)
list(wrapper) # Consume
with pytest.raises(ChecksumMismatchError):
wrapper.verify()
# =============================================================================
# Edge Case Tests
# =============================================================================
class TestEdgeCases:
"""Tests for edge cases and boundary conditions."""
def test_null_bytes_in_content(self):
"""Test content with null bytes."""
content = b"\x00\x00\x00"
hash_result = compute_sha256(content)
assert verify_checksum(content, hash_result) is True
def test_whitespace_only_content(self):
"""Test content with only whitespace."""
content = b" \t\n\r "
hash_result = compute_sha256(content)
assert verify_checksum(content, hash_result) is True
def test_large_content_streaming(self):
"""Test streaming verification of large content."""
# 1MB of content
large_content = b"x" * (1024 * 1024)
expected_hash = compute_sha256(large_content)
stream = io.BytesIO(large_content)
wrapper = VerifyingStreamWrapper(stream, expected_hash)
# Consume and verify
chunks = list(wrapper)
assert wrapper.verify() is True
assert b"".join(chunks) == large_content
def test_unicode_bytes_content(self):
"""Test content with unicode bytes."""
content = "Hello, 世界! 🌍".encode("utf-8")
hash_result = compute_sha256(content)
assert verify_checksum(content, hash_result) is True
def test_maximum_chunk_size_larger_than_content(self):
"""Test chunk size larger than content."""
content = b"small"
stream = io.BytesIO(content)
wrapper = HashingStreamWrapper(stream, chunk_size=1024 * 1024)
chunks = list(wrapper)
assert len(chunks) == 1
assert chunks[0] == content
assert wrapper.get_hash() == compute_sha256(content)

View File

@@ -0,0 +1,460 @@
"""
Integration tests for download verification API endpoints.
These tests verify:
- Checksum headers in download responses
- Pre-verification mode
- Streaming verification mode
- HEAD request headers
- Verification failure handling
"""
import pytest
import hashlib
import base64
import io
# =============================================================================
# Test Fixtures
# =============================================================================
@pytest.fixture
def upload_test_file(integration_client):
"""
Factory fixture to upload a test file and return its artifact ID.
Usage:
artifact_id = upload_test_file(project, package, content, tag="v1.0")
"""
def _upload(project_name: str, package_name: str, content: bytes, tag: str = None):
files = {
"file": ("test-file.bin", io.BytesIO(content), "application/octet-stream")
}
data = {}
if tag:
data["tag"] = tag
response = integration_client.post(
f"/api/v1/project/{project_name}/{package_name}/upload",
files=files,
data=data,
)
assert response.status_code == 200, f"Upload failed: {response.text}"
return response.json()["artifact_id"]
return _upload
# =============================================================================
# Integration Tests - Download Headers
# =============================================================================
class TestDownloadChecksumHeaders:
"""Tests for checksum headers in download responses."""
@pytest.mark.integration
def test_download_includes_sha256_header(
self, integration_client, test_package, upload_test_file
):
"""Test download response includes X-Checksum-SHA256 header."""
project_name, package_name = test_package
content = b"Content for SHA256 header test"
# Upload file
artifact_id = upload_test_file(
project_name, package_name, content, tag="sha256-header-test"
)
# Download with proxy mode
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/sha256-header-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
assert "X-Checksum-SHA256" in response.headers
assert response.headers["X-Checksum-SHA256"] == artifact_id
@pytest.mark.integration
def test_download_includes_etag_header(
self, integration_client, test_package, upload_test_file
):
"""Test download response includes ETag header."""
project_name, package_name = test_package
content = b"Content for ETag header test"
artifact_id = upload_test_file(
project_name, package_name, content, tag="etag-test"
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/etag-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
assert "ETag" in response.headers
# ETag should be quoted artifact ID
assert response.headers["ETag"] == f'"{artifact_id}"'
@pytest.mark.integration
def test_download_includes_digest_header(
self, integration_client, test_package, upload_test_file
):
"""Test download response includes RFC 3230 Digest header."""
project_name, package_name = test_package
content = b"Content for Digest header test"
sha256 = hashlib.sha256(content).hexdigest()
upload_test_file(project_name, package_name, content, tag="digest-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/digest-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
assert "Digest" in response.headers
# Verify Digest format: sha-256=<base64>
digest = response.headers["Digest"]
assert digest.startswith("sha-256=")
# Verify base64 content matches
b64_hash = digest.split("=", 1)[1]
decoded = base64.b64decode(b64_hash)
assert decoded == bytes.fromhex(sha256)
@pytest.mark.integration
def test_download_includes_content_length_header(
self, integration_client, test_package, upload_test_file
):
"""Test download response includes X-Content-Length header."""
project_name, package_name = test_package
content = b"Content for X-Content-Length test"
upload_test_file(project_name, package_name, content, tag="content-length-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/content-length-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
assert "X-Content-Length" in response.headers
assert response.headers["X-Content-Length"] == str(len(content))
@pytest.mark.integration
def test_download_includes_verified_header_false(
self, integration_client, test_package, upload_test_file
):
"""Test download without verification has X-Verified: false."""
project_name, package_name = test_package
content = b"Content for X-Verified false test"
upload_test_file(project_name, package_name, content, tag="verified-false-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/verified-false-test",
params={"mode": "proxy", "verify": "false"},
)
assert response.status_code == 200
assert "X-Verified" in response.headers
assert response.headers["X-Verified"] == "false"
# =============================================================================
# Integration Tests - Pre-Verification Mode
# =============================================================================
class TestPreVerificationMode:
"""Tests for pre-verification download mode."""
@pytest.mark.integration
def test_pre_verify_success(
self, integration_client, test_package, upload_test_file
):
"""Test pre-verification mode succeeds for valid content."""
project_name, package_name = test_package
content = b"Content for pre-verification success test"
upload_test_file(project_name, package_name, content, tag="pre-verify-success")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-success",
params={"mode": "proxy", "verify": "true", "verify_mode": "pre"},
)
assert response.status_code == 200
assert response.content == content
assert "X-Verified" in response.headers
assert response.headers["X-Verified"] == "true"
@pytest.mark.integration
def test_pre_verify_returns_complete_content(
self, integration_client, test_package, upload_test_file
):
"""Test pre-verification returns complete content."""
project_name, package_name = test_package
# Use binary content to verify no corruption
content = bytes(range(256)) * 10 # 2560 bytes of all byte values
upload_test_file(project_name, package_name, content, tag="pre-verify-content")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/pre-verify-content",
params={"mode": "proxy", "verify": "true", "verify_mode": "pre"},
)
assert response.status_code == 200
assert response.content == content
# =============================================================================
# Integration Tests - Streaming Verification Mode
# =============================================================================
class TestStreamingVerificationMode:
"""Tests for streaming verification download mode."""
@pytest.mark.integration
def test_stream_verify_success(
self, integration_client, test_package, upload_test_file
):
"""Test streaming verification mode succeeds for valid content."""
project_name, package_name = test_package
content = b"Content for streaming verification success test"
upload_test_file(
project_name, package_name, content, tag="stream-verify-success"
)
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-success",
params={"mode": "proxy", "verify": "true", "verify_mode": "stream"},
)
assert response.status_code == 200
assert response.content == content
# X-Verified is "pending" for streaming mode (verified after transfer)
assert "X-Verified" in response.headers
@pytest.mark.integration
def test_stream_verify_large_content(
self, integration_client, test_package, upload_test_file
):
"""Test streaming verification with larger content."""
project_name, package_name = test_package
# 100KB of content
content = b"x" * (100 * 1024)
upload_test_file(project_name, package_name, content, tag="stream-verify-large")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/stream-verify-large",
params={"mode": "proxy", "verify": "true", "verify_mode": "stream"},
)
assert response.status_code == 200
assert response.content == content
# =============================================================================
# Integration Tests - HEAD Request Headers
# =============================================================================
class TestHeadRequestHeaders:
"""Tests for HEAD request checksum headers."""
@pytest.mark.integration
def test_head_includes_sha256_header(
self, integration_client, test_package, upload_test_file
):
"""Test HEAD request includes X-Checksum-SHA256 header."""
project_name, package_name = test_package
content = b"Content for HEAD SHA256 test"
artifact_id = upload_test_file(
project_name, package_name, content, tag="head-sha256-test"
)
response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-sha256-test"
)
assert response.status_code == 200
assert "X-Checksum-SHA256" in response.headers
assert response.headers["X-Checksum-SHA256"] == artifact_id
@pytest.mark.integration
def test_head_includes_etag(
self, integration_client, test_package, upload_test_file
):
"""Test HEAD request includes ETag header."""
project_name, package_name = test_package
content = b"Content for HEAD ETag test"
artifact_id = upload_test_file(
project_name, package_name, content, tag="head-etag-test"
)
response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-etag-test"
)
assert response.status_code == 200
assert "ETag" in response.headers
assert response.headers["ETag"] == f'"{artifact_id}"'
@pytest.mark.integration
def test_head_includes_digest(
self, integration_client, test_package, upload_test_file
):
"""Test HEAD request includes Digest header."""
project_name, package_name = test_package
content = b"Content for HEAD Digest test"
upload_test_file(project_name, package_name, content, tag="head-digest-test")
response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-digest-test"
)
assert response.status_code == 200
assert "Digest" in response.headers
assert response.headers["Digest"].startswith("sha-256=")
@pytest.mark.integration
def test_head_includes_content_length(
self, integration_client, test_package, upload_test_file
):
"""Test HEAD request includes X-Content-Length header."""
project_name, package_name = test_package
content = b"Content for HEAD Content-Length test"
upload_test_file(project_name, package_name, content, tag="head-length-test")
response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-length-test"
)
assert response.status_code == 200
assert "X-Content-Length" in response.headers
assert response.headers["X-Content-Length"] == str(len(content))
@pytest.mark.integration
def test_head_no_body(self, integration_client, test_package, upload_test_file):
"""Test HEAD request returns no body."""
project_name, package_name = test_package
content = b"Content for HEAD no-body test"
upload_test_file(project_name, package_name, content, tag="head-no-body-test")
response = integration_client.head(
f"/api/v1/project/{project_name}/{package_name}/+/head-no-body-test"
)
assert response.status_code == 200
assert response.content == b""
# =============================================================================
# Integration Tests - Range Requests
# =============================================================================
class TestRangeRequestHeaders:
"""Tests for range request handling with checksum headers."""
@pytest.mark.integration
def test_range_request_includes_checksum_headers(
self, integration_client, test_package, upload_test_file
):
"""Test range request includes checksum headers."""
project_name, package_name = test_package
content = b"Content for range request checksum header test"
upload_test_file(project_name, package_name, content, tag="range-checksum-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/range-checksum-test",
headers={"Range": "bytes=0-9"},
params={"mode": "proxy"},
)
assert response.status_code == 206
assert "X-Checksum-SHA256" in response.headers
# Checksum is for the FULL file, not the range
assert len(response.headers["X-Checksum-SHA256"]) == 64
# =============================================================================
# Integration Tests - Client-Side Verification
# =============================================================================
class TestClientSideVerification:
"""Tests demonstrating client-side verification using headers."""
@pytest.mark.integration
def test_client_can_verify_downloaded_content(
self, integration_client, test_package, upload_test_file
):
"""Test client can verify downloaded content using header."""
project_name, package_name = test_package
content = b"Content for client-side verification test"
upload_test_file(project_name, package_name, content, tag="client-verify-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/client-verify-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
# Get expected hash from header
expected_hash = response.headers["X-Checksum-SHA256"]
# Compute actual hash of downloaded content
actual_hash = hashlib.sha256(response.content).hexdigest()
# Verify match
assert actual_hash == expected_hash
@pytest.mark.integration
def test_client_can_verify_using_digest_header(
self, integration_client, test_package, upload_test_file
):
"""Test client can verify using RFC 3230 Digest header."""
project_name, package_name = test_package
content = b"Content for Digest header verification"
upload_test_file(project_name, package_name, content, tag="digest-verify-test")
response = integration_client.get(
f"/api/v1/project/{project_name}/{package_name}/+/digest-verify-test",
params={"mode": "proxy"},
)
assert response.status_code == 200
# Parse Digest header
digest_header = response.headers["Digest"]
assert digest_header.startswith("sha-256=")
b64_hash = digest_header.split("=", 1)[1]
expected_hash_bytes = base64.b64decode(b64_hash)
# Compute actual hash of downloaded content
actual_hash_bytes = hashlib.sha256(response.content).digest()
# Verify match
assert actual_hash_bytes == expected_hash_bytes

View File

@@ -1,207 +0,0 @@
"""
Unit tests for duplicate detection and deduplication logic.
Tests cover:
- _exists() method correctly identifies existing S3 keys
- S3 key generation follows expected pattern
- Storage layer skips upload when artifact already exists
- Storage layer performs upload when artifact does not exist
"""
import pytest
import io
from unittest.mock import MagicMock, patch
from tests.conftest import (
compute_sha256,
TEST_CONTENT_HELLO,
TEST_HASH_HELLO,
)
class TestExistsMethod:
"""Tests for the _exists() method that checks S3 object existence."""
@pytest.mark.unit
def test_exists_returns_true_for_existing_key(self, mock_storage, mock_s3_client):
"""Test _exists() returns True when object exists."""
# Pre-populate the mock storage
test_key = "fruits/df/fd/test-hash"
mock_s3_client.objects[test_key] = b"content"
result = mock_storage._exists(test_key)
assert result is True
@pytest.mark.unit
def test_exists_returns_false_for_nonexistent_key(self, mock_storage):
"""Test _exists() returns False when object doesn't exist."""
result = mock_storage._exists("fruits/no/ne/nonexistent-key")
assert result is False
@pytest.mark.unit
def test_exists_handles_404_error(self, mock_storage):
"""Test _exists() handles 404 errors gracefully."""
# The mock client raises ClientError for nonexistent keys
result = mock_storage._exists("fruits/xx/yy/does-not-exist")
assert result is False
class TestS3KeyGeneration:
"""Tests for S3 key pattern generation."""
@pytest.mark.unit
def test_s3_key_pattern(self):
"""Test S3 key follows pattern: fruits/{hash[:2]}/{hash[2:4]}/{hash}"""
test_hash = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
expected_key = f"fruits/{test_hash[:2]}/{test_hash[2:4]}/{test_hash}"
# Expected: fruits/ab/cd/abcdef1234567890...
assert expected_key == f"fruits/ab/cd/{test_hash}"
@pytest.mark.unit
def test_s3_key_generation_in_storage(self, mock_storage):
"""Test storage layer generates correct S3 key."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
assert result.s3_key == expected_key
@pytest.mark.unit
def test_s3_key_uses_sha256_hash(self, mock_storage):
"""Test S3 key is derived from SHA256 hash."""
content = b"unique test content for key test"
file_obj = io.BytesIO(content)
expected_hash = compute_sha256(content)
result = mock_storage._store_simple(file_obj)
# Key should contain the hash
assert expected_hash in result.s3_key
class TestDeduplicationBehavior:
"""Tests for deduplication (skip upload when exists)."""
@pytest.mark.unit
def test_skips_upload_when_exists(self, mock_storage, mock_s3_client):
"""Test storage skips S3 upload when artifact already exists."""
content = TEST_CONTENT_HELLO
s3_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
# Pre-populate storage (simulate existing artifact)
mock_s3_client.objects[s3_key] = content
# Track put_object calls
original_put = mock_s3_client.put_object
put_called = []
def tracked_put(*args, **kwargs):
put_called.append(True)
return original_put(*args, **kwargs)
mock_s3_client.put_object = tracked_put
# Store the same content
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# put_object should NOT have been called (deduplication)
assert len(put_called) == 0
assert result.sha256 == TEST_HASH_HELLO
@pytest.mark.unit
def test_uploads_when_not_exists(self, mock_storage, mock_s3_client):
"""Test storage uploads to S3 when artifact doesn't exist."""
content = b"brand new unique content"
content_hash = compute_sha256(content)
s3_key = f"fruits/{content_hash[:2]}/{content_hash[2:4]}/{content_hash}"
# Ensure object doesn't exist
assert s3_key not in mock_s3_client.objects
# Store the content
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# Object should now exist in mock storage
assert s3_key in mock_s3_client.objects
assert mock_s3_client.objects[s3_key] == content
@pytest.mark.unit
def test_returns_same_hash_for_duplicate(self, mock_storage, mock_s3_client):
"""Test storing same content twice returns same hash."""
content = b"content to be stored twice"
# First store
file1 = io.BytesIO(content)
result1 = mock_storage._store_simple(file1)
# Second store (duplicate)
file2 = io.BytesIO(content)
result2 = mock_storage._store_simple(file2)
assert result1.sha256 == result2.sha256
assert result1.s3_key == result2.s3_key
@pytest.mark.unit
def test_different_content_different_keys(self, mock_storage):
"""Test different content produces different S3 keys."""
content1 = b"first content"
content2 = b"second content"
file1 = io.BytesIO(content1)
result1 = mock_storage._store_simple(file1)
file2 = io.BytesIO(content2)
result2 = mock_storage._store_simple(file2)
assert result1.sha256 != result2.sha256
assert result1.s3_key != result2.s3_key
class TestDeduplicationEdgeCases:
"""Edge case tests for deduplication."""
@pytest.mark.unit
def test_same_content_different_filenames(self, mock_storage):
"""Test same content with different metadata is deduplicated."""
content = b"identical content"
# Store with "filename1"
file1 = io.BytesIO(content)
result1 = mock_storage._store_simple(file1)
# Store with "filename2" (same content)
file2 = io.BytesIO(content)
result2 = mock_storage._store_simple(file2)
# Both should have same hash (content-addressable)
assert result1.sha256 == result2.sha256
@pytest.mark.unit
def test_whitespace_only_difference(self, mock_storage):
"""Test content differing only by whitespace produces different hashes."""
content1 = b"test content"
content2 = b"test content" # Extra space
content3 = b"test content " # Trailing space
file1 = io.BytesIO(content1)
file2 = io.BytesIO(content2)
file3 = io.BytesIO(content3)
result1 = mock_storage._store_simple(file1)
result2 = mock_storage._store_simple(file2)
result3 = mock_storage._store_simple(file3)
# All should be different (content-addressable)
assert len({result1.sha256, result2.sha256, result3.sha256}) == 3

View File

@@ -1,168 +0,0 @@
"""
Integration tests for garbage collection functionality.
Tests cover:
- Listing orphaned artifacts (ref_count=0)
- Garbage collection in dry-run mode
- Garbage collection actual deletion
- Verifying artifacts with refs are not deleted
"""
import pytest
from tests.conftest import (
compute_sha256,
upload_test_file,
)
class TestOrphanedArtifactsEndpoint:
"""Tests for GET /api/v1/admin/orphaned-artifacts endpoint."""
@pytest.mark.integration
def test_list_orphaned_artifacts_returns_list(self, integration_client):
"""Test orphaned artifacts endpoint returns a list."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts")
assert response.status_code == 200
assert isinstance(response.json(), list)
@pytest.mark.integration
def test_orphaned_artifact_has_required_fields(self, integration_client):
"""Test orphaned artifact response has required fields."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1")
assert response.status_code == 200
data = response.json()
if len(data) > 0:
artifact = data[0]
assert "id" in artifact
assert "size" in artifact
assert "created_at" in artifact
assert "created_by" in artifact
assert "original_name" in artifact
@pytest.mark.integration
def test_orphaned_artifacts_respects_limit(self, integration_client):
"""Test orphaned artifacts endpoint respects limit parameter."""
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=5")
assert response.status_code == 200
assert len(response.json()) <= 5
@pytest.mark.integration
def test_artifact_becomes_orphaned_when_tag_deleted(
self, integration_client, test_package, unique_test_id
):
"""Test artifact appears in orphaned list after tag is deleted."""
project, package = test_package
content = f"orphan test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag
upload_test_file(integration_client, project, package, content, tag="temp-tag")
# Verify not in orphaned list (has ref_count=1)
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash not in orphaned_ids
# Delete the tag
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/temp-tag")
# Verify now in orphaned list (ref_count=0)
response = integration_client.get("/api/v1/admin/orphaned-artifacts?limit=1000")
orphaned_ids = [a["id"] for a in response.json()]
assert expected_hash in orphaned_ids
class TestGarbageCollectionEndpoint:
"""Tests for POST /api/v1/admin/garbage-collect endpoint."""
@pytest.mark.integration
def test_garbage_collect_dry_run_returns_response(self, integration_client):
"""Test garbage collection dry run returns valid response."""
response = integration_client.post("/api/v1/admin/garbage-collect?dry_run=true")
assert response.status_code == 200
data = response.json()
assert "artifacts_deleted" in data
assert "bytes_freed" in data
assert "artifact_ids" in data
assert "dry_run" in data
assert data["dry_run"] is True
@pytest.mark.integration
def test_garbage_collect_dry_run_doesnt_delete(
self, integration_client, test_package, unique_test_id
):
"""Test garbage collection dry run doesn't actually delete artifacts."""
project, package = test_package
content = f"dry run test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload and delete tag to create orphan
upload_test_file(integration_client, project, package, content, tag="dry-run")
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/dry-run")
# Verify artifact exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
# Run garbage collection in dry-run mode
gc_response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=1000"
)
assert gc_response.status_code == 200
assert expected_hash in gc_response.json()["artifact_ids"]
# Verify artifact STILL exists (dry run didn't delete)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
@pytest.mark.integration
def test_garbage_collect_preserves_referenced_artifacts(
self, integration_client, test_package, unique_test_id
):
"""Test garbage collection doesn't delete artifacts with ref_count > 0."""
project, package = test_package
content = f"preserve test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag (ref_count=1)
upload_test_file(integration_client, project, package, content, tag="keep-this")
# Verify artifact exists with ref_count=1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == 1
# Run garbage collection (dry_run to not affect other tests)
gc_response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=1000"
)
assert gc_response.status_code == 200
# Verify artifact was NOT in delete list (has ref_count > 0)
assert expected_hash not in gc_response.json()["artifact_ids"]
# Verify artifact still exists
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_garbage_collect_respects_limit(self, integration_client):
"""Test garbage collection respects limit parameter."""
response = integration_client.post(
"/api/v1/admin/garbage-collect?dry_run=true&limit=5"
)
assert response.status_code == 200
assert response.json()["artifacts_deleted"] <= 5
@pytest.mark.integration
def test_garbage_collect_returns_bytes_freed(self, integration_client):
"""Test garbage collection returns accurate bytes_freed."""
response = integration_client.post("/api/v1/admin/garbage-collect?dry_run=true")
assert response.status_code == 200
data = response.json()
assert data["bytes_freed"] >= 0
assert isinstance(data["bytes_freed"], int)

View File

@@ -1,215 +0,0 @@
"""
Unit tests for SHA256 hash calculation and deduplication logic.
Tests cover:
- Hash computation produces consistent results
- Hash is always 64 character lowercase hexadecimal
- Different content produces different hashes
- Binary content handling
- Large file handling (streaming)
"""
import pytest
import hashlib
import io
from tests.conftest import (
create_test_file,
compute_sha256,
TEST_CONTENT_HELLO,
TEST_HASH_HELLO,
TEST_CONTENT_BINARY,
TEST_HASH_BINARY,
)
class TestHashComputation:
"""Unit tests for hash calculation functionality."""
@pytest.mark.unit
def test_sha256_consistent_results(self):
"""Test SHA256 hash produces consistent results for identical content."""
content = b"test content for hashing"
# Compute hash multiple times
hash1 = compute_sha256(content)
hash2 = compute_sha256(content)
hash3 = compute_sha256(content)
assert hash1 == hash2 == hash3
@pytest.mark.unit
def test_sha256_different_content_different_hash(self):
"""Test SHA256 produces different hashes for different content."""
content1 = b"content version 1"
content2 = b"content version 2"
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
assert hash1 != hash2
@pytest.mark.unit
def test_sha256_format_64_char_hex(self):
"""Test SHA256 hash is always 64 character lowercase hexadecimal."""
test_cases = [
b"", # Empty
b"a", # Single char
b"Hello, World!", # Normal string
bytes(range(256)), # All byte values
b"x" * 10000, # Larger content
]
for content in test_cases:
hash_value = compute_sha256(content)
# Check length
assert len(hash_value) == 64, (
f"Hash length should be 64, got {len(hash_value)}"
)
# Check lowercase
assert hash_value == hash_value.lower(), "Hash should be lowercase"
# Check hexadecimal
assert all(c in "0123456789abcdef" for c in hash_value), (
"Hash should be hex"
)
@pytest.mark.unit
def test_sha256_known_value(self):
"""Test SHA256 produces expected hash for known input."""
assert compute_sha256(TEST_CONTENT_HELLO) == TEST_HASH_HELLO
@pytest.mark.unit
def test_sha256_binary_content(self):
"""Test SHA256 handles binary content correctly."""
assert compute_sha256(TEST_CONTENT_BINARY) == TEST_HASH_BINARY
# Test with null bytes
content_with_nulls = b"\x00\x00test\x00\x00"
hash_value = compute_sha256(content_with_nulls)
assert len(hash_value) == 64
@pytest.mark.unit
def test_sha256_streaming_computation(self):
"""Test SHA256 can be computed in chunks (streaming)."""
# Large content
chunk_size = 8192
total_size = chunk_size * 10 # 80KB
content = b"x" * total_size
# Direct computation
direct_hash = compute_sha256(content)
# Streaming computation
hasher = hashlib.sha256()
for i in range(0, total_size, chunk_size):
hasher.update(content[i : i + chunk_size])
streaming_hash = hasher.hexdigest()
assert direct_hash == streaming_hash
@pytest.mark.unit
def test_sha256_order_matters(self):
"""Test that content order affects hash (not just content set)."""
content1 = b"AB"
content2 = b"BA"
assert compute_sha256(content1) != compute_sha256(content2)
class TestStorageHashComputation:
"""Tests for hash computation in the storage layer."""
@pytest.mark.unit
def test_storage_computes_sha256(self, mock_storage):
"""Test storage layer correctly computes SHA256 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
assert result.sha256 == TEST_HASH_HELLO
@pytest.mark.unit
def test_storage_computes_md5(self, mock_storage):
"""Test storage layer also computes MD5 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_md5 = hashlib.md5(content).hexdigest()
assert result.md5 == expected_md5
@pytest.mark.unit
def test_storage_computes_sha1(self, mock_storage):
"""Test storage layer also computes SHA1 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_sha1 = hashlib.sha1(content).hexdigest()
assert result.sha1 == expected_sha1
@pytest.mark.unit
def test_storage_returns_correct_size(self, mock_storage):
"""Test storage layer returns correct file size."""
content = b"test content with known size"
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
assert result.size == len(content)
@pytest.mark.unit
def test_storage_generates_correct_s3_key(self, mock_storage):
"""Test storage layer generates correct S3 key pattern."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# Key should be: fruits/{hash[:2]}/{hash[2:4]}/{hash}
expected_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
assert result.s3_key == expected_key
class TestHashEdgeCases:
"""Edge case tests for hash computation."""
@pytest.mark.unit
def test_hash_empty_content_rejected(self, mock_storage):
"""Test that empty content is rejected."""
from app.storage import HashComputationError
file_obj = io.BytesIO(b"")
with pytest.raises(HashComputationError):
mock_storage._store_simple(file_obj)
@pytest.mark.unit
def test_hash_large_file_streaming(self, mock_storage):
"""Test hash computation for large files uses streaming."""
# Create a 10MB file
size = 10 * 1024 * 1024
content = b"x" * size
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_hash = compute_sha256(content)
assert result.sha256 == expected_hash
@pytest.mark.unit
def test_hash_special_bytes(self):
"""Test hash handles all byte values correctly."""
# All possible byte values
content = bytes(range(256))
hash_value = compute_sha256(content)
assert len(hash_value) == 64
assert hash_value == TEST_HASH_BINARY

View File

@@ -1,458 +0,0 @@
"""
Unit and integration tests for reference counting behavior.
Tests cover:
- ref_count is set correctly for new artifacts
- ref_count increments on duplicate uploads
- ref_count query correctly identifies existing artifacts
- Artifact lookup by SHA256 hash works correctly
"""
import pytest
import io
from tests.conftest import (
compute_sha256,
upload_test_file,
TEST_CONTENT_HELLO,
TEST_HASH_HELLO,
)
class TestRefCountQuery:
"""Tests for ref_count querying and artifact lookup."""
@pytest.mark.integration
def test_artifact_lookup_by_sha256(self, integration_client, test_package):
"""Test artifact lookup by SHA256 hash (primary key) works correctly."""
project, package = test_package
content = b"unique content for lookup test"
expected_hash = compute_sha256(content)
# Upload a file
upload_result = upload_test_file(
integration_client, project, package, content, tag="v1"
)
assert upload_result["artifact_id"] == expected_hash
# Look up artifact by ID (SHA256)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
artifact = response.json()
assert artifact["id"] == expected_hash
assert artifact["sha256"] == expected_hash
assert artifact["size"] == len(content)
@pytest.mark.integration
def test_ref_count_query_identifies_existing_artifact(
self, integration_client, test_package
):
"""Test ref_count query correctly identifies existing artifacts by hash."""
project, package = test_package
content = b"content for ref count query test"
expected_hash = compute_sha256(content)
# Upload a file with a tag
upload_result = upload_test_file(
integration_client, project, package, content, tag="v1"
)
# Query artifact and check ref_count
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
artifact = response.json()
assert artifact["ref_count"] >= 1 # At least 1 from the tag
@pytest.mark.integration
def test_ref_count_set_to_1_for_new_artifact_with_tag(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count is set to 1 for new artifacts when created with a tag."""
project, package = test_package
content = f"brand new content for ref count test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload a new file with a tag
upload_result = upload_test_file(
integration_client, project, package, content, tag="initial"
)
assert upload_result["artifact_id"] == expected_hash
assert upload_result["ref_count"] == 1
assert upload_result["deduplicated"] is False
@pytest.mark.integration
def test_ref_count_increments_on_duplicate_upload_with_tag(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count is incremented when duplicate content is uploaded with a new tag."""
project, package = test_package
content = f"content that will be uploaded twice {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# First upload with tag
result1 = upload_test_file(
integration_client, project, package, content, tag="v1"
)
assert result1["ref_count"] == 1
assert result1["deduplicated"] is False
# Second upload with different tag (same content)
result2 = upload_test_file(
integration_client, project, package, content, tag="v2"
)
assert result2["artifact_id"] == expected_hash
assert result2["ref_count"] == 2
assert result2["deduplicated"] is True
@pytest.mark.integration
def test_ref_count_after_multiple_tags(self, integration_client, test_package):
"""Test ref_count correctly reflects number of tags pointing to artifact."""
project, package = test_package
content = b"content for multiple tag test"
expected_hash = compute_sha256(content)
# Upload with multiple tags
tags = ["v1", "v2", "v3", "latest"]
for i, tag in enumerate(tags):
result = upload_test_file(
integration_client, project, package, content, tag=tag
)
assert result["artifact_id"] == expected_hash
assert result["ref_count"] == i + 1
# Verify final ref_count via artifact endpoint
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.status_code == 200
assert response.json()["ref_count"] == len(tags)
class TestRefCountWithDeletion:
"""Tests for ref_count behavior when tags are deleted."""
@pytest.mark.integration
def test_ref_count_decrements_on_tag_delete(self, integration_client, test_package):
"""Test ref_count decrements when a tag is deleted."""
project, package = test_package
content = b"content for delete test"
expected_hash = compute_sha256(content)
# Upload with two tags
upload_test_file(integration_client, project, package, content, tag="v1")
upload_test_file(integration_client, project, package, content, tag="v2")
# Verify ref_count is 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2
# Delete one tag
delete_response = integration_client.delete(
f"/api/v1/project/{project}/{package}/tags/v1"
)
assert delete_response.status_code == 204
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_ref_count_zero_after_all_tags_deleted(
self, integration_client, test_package
):
"""Test ref_count goes to 0 when all tags are deleted."""
project, package = test_package
content = b"content that will be orphaned"
expected_hash = compute_sha256(content)
# Upload with one tag
upload_test_file(integration_client, project, package, content, tag="only-tag")
# Delete the tag
integration_client.delete(f"/api/v1/project/{project}/{package}/tags/only-tag")
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
class TestRefCountCascadeDelete:
"""Tests for ref_count behavior during cascade deletions."""
@pytest.mark.integration
def test_ref_count_decrements_on_package_delete(
self, integration_client, unique_test_id
):
"""Test ref_count decrements for all tags when package is deleted."""
# Create a project and package manually (not using fixtures to control cleanup)
project_name = f"cascade-pkg-{unique_test_id}"
package_name = f"test-pkg-{unique_test_id}"
# Create project
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
# Create package
response = integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": package_name, "description": "Test package"},
)
assert response.status_code == 200
# Upload content with multiple tags
content = f"cascade delete test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package_name, content, tag="v3"
)
# Verify ref_count is 3
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 3
# Delete the package (should cascade delete all tags and decrement ref_count)
delete_response = integration_client.delete(
f"/api/v1/project/{project_name}/packages/{package_name}"
)
assert delete_response.status_code == 204
# Verify ref_count is 0 (all tags were deleted)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
# Cleanup: delete the project
integration_client.delete(f"/api/v1/projects/{project_name}")
@pytest.mark.integration
def test_ref_count_decrements_on_project_delete(
self, integration_client, unique_test_id
):
"""Test ref_count decrements for all tags in all packages when project is deleted."""
# Create a project manually (not using fixtures to control cleanup)
project_name = f"cascade-proj-{unique_test_id}"
package1_name = f"pkg1-{unique_test_id}"
package2_name = f"pkg2-{unique_test_id}"
# Create project
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
# Create two packages
for pkg_name in [package1_name, package2_name]:
response = integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": pkg_name, "description": "Test package"},
)
assert response.status_code == 200
# Upload same content with tags in both packages
content = f"project cascade test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package1_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package1_name, content, tag="v2"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="latest"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="stable"
)
# Verify ref_count is 4 (2 tags in each of 2 packages)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 4
# Delete the project (should cascade delete all packages, tags, and decrement ref_count)
delete_response = integration_client.delete(f"/api/v1/projects/{project_name}")
assert delete_response.status_code == 204
# Verify ref_count is 0
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
@pytest.mark.integration
def test_shared_artifact_ref_count_partial_decrement(
self, integration_client, unique_test_id
):
"""Test ref_count correctly decrements when artifact is shared across packages."""
# Create project with two packages
project_name = f"shared-artifact-{unique_test_id}"
package1_name = f"pkg1-{unique_test_id}"
package2_name = f"pkg2-{unique_test_id}"
# Create project
response = integration_client.post(
"/api/v1/projects",
json={
"name": project_name,
"description": "Test project",
"is_public": True,
},
)
assert response.status_code == 200
# Create two packages
for pkg_name in [package1_name, package2_name]:
response = integration_client.post(
f"/api/v1/project/{project_name}/packages",
json={"name": pkg_name, "description": "Test package"},
)
assert response.status_code == 200
# Upload same content to both packages
content = f"shared artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
upload_test_file(
integration_client, project_name, package1_name, content, tag="v1"
)
upload_test_file(
integration_client, project_name, package2_name, content, tag="v1"
)
# Verify ref_count is 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2
# Delete only package1 (package2 still references the artifact)
delete_response = integration_client.delete(
f"/api/v1/project/{project_name}/packages/{package1_name}"
)
assert delete_response.status_code == 204
# Verify ref_count is 1 (only package2's tag remains)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Cleanup
integration_client.delete(f"/api/v1/projects/{project_name}")
class TestRefCountTagUpdate:
"""Tests for ref_count behavior when tags are updated to point to different artifacts."""
@pytest.mark.integration
def test_ref_count_adjusts_on_tag_update(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count adjusts when a tag is updated to point to a different artifact."""
project, package = test_package
# Upload two different artifacts
content1 = f"artifact one {unique_test_id}".encode()
content2 = f"artifact two {unique_test_id}".encode()
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
# Upload first artifact with tag "latest"
upload_test_file(integration_client, project, package, content1, tag="latest")
# Verify first artifact has ref_count 1
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 1
# Upload second artifact with different tag
upload_test_file(integration_client, project, package, content2, tag="stable")
# Now update "latest" tag to point to second artifact
# This is done by uploading the same content with the same tag
upload_test_file(integration_client, project, package, content2, tag="latest")
# Verify first artifact ref_count decreased to 0 (tag moved away)
response = integration_client.get(f"/api/v1/artifact/{hash1}")
assert response.json()["ref_count"] == 0
# Verify second artifact ref_count increased to 2 (stable + latest)
response = integration_client.get(f"/api/v1/artifact/{hash2}")
assert response.json()["ref_count"] == 2
@pytest.mark.integration
def test_ref_count_unchanged_when_tag_same_artifact(
self, integration_client, test_package, unique_test_id
):
"""Test ref_count doesn't change when tag is 'updated' to same artifact."""
project, package = test_package
content = f"same artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload with tag
upload_test_file(integration_client, project, package, content, tag="v1")
# Verify ref_count is 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Upload same content with same tag (no-op)
upload_test_file(integration_client, project, package, content, tag="v1")
# Verify ref_count is still 1 (no double-counting)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
@pytest.mark.integration
def test_tag_via_post_endpoint_increments_ref_count(
self, integration_client, test_package, unique_test_id
):
"""Test creating tag via POST /tags endpoint increments ref_count."""
project, package = test_package
content = f"tag endpoint test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload artifact without tag
result = upload_test_file(
integration_client, project, package, content, filename="test.bin", tag=None
)
artifact_id = result["artifact_id"]
# Verify ref_count is 0 (no tags yet)
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 0
# Create tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project}/{package}/tags",
json={"name": "v1.0.0", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 1
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 1
# Create another tag via POST endpoint
tag_response = integration_client.post(
f"/api/v1/project/{project}/{package}/tags",
json={"name": "latest", "artifact_id": artifact_id},
)
assert tag_response.status_code == 200
# Verify ref_count is now 2
response = integration_client.get(f"/api/v1/artifact/{expected_hash}")
assert response.json()["ref_count"] == 2

View File

@@ -1,488 +0,0 @@
"""
Integration tests for statistics endpoints.
Tests cover:
- Global stats endpoint
- Deduplication stats endpoint
- Cross-project deduplication
- Timeline stats
- Export and report endpoints
- Package and artifact stats
"""
import pytest
from tests.conftest import compute_sha256, upload_test_file
class TestGlobalStats:
"""Tests for GET /api/v1/stats endpoint."""
@pytest.mark.integration
def test_stats_returns_valid_response(self, integration_client):
"""Test stats endpoint returns expected fields."""
response = integration_client.get("/api/v1/stats")
assert response.status_code == 200
data = response.json()
# Check all required fields exist
assert "total_artifacts" in data
assert "total_size_bytes" in data
assert "unique_artifacts" in data
assert "orphaned_artifacts" in data
assert "orphaned_size_bytes" in data
assert "total_uploads" in data
assert "deduplicated_uploads" in data
assert "deduplication_ratio" in data
assert "storage_saved_bytes" in data
@pytest.mark.integration
def test_stats_values_are_non_negative(self, integration_client):
"""Test all stat values are non-negative."""
response = integration_client.get("/api/v1/stats")
assert response.status_code == 200
data = response.json()
assert data["total_artifacts"] >= 0
assert data["total_size_bytes"] >= 0
assert data["unique_artifacts"] >= 0
assert data["orphaned_artifacts"] >= 0
assert data["total_uploads"] >= 0
assert data["deduplicated_uploads"] >= 0
assert data["deduplication_ratio"] >= 0
assert data["storage_saved_bytes"] >= 0
@pytest.mark.integration
def test_stats_update_after_upload(
self, integration_client, test_package, unique_test_id
):
"""Test stats update after uploading an artifact."""
project, package = test_package
# Get initial stats
initial_response = integration_client.get("/api/v1/stats")
initial_stats = initial_response.json()
# Upload a new file
content = f"stats test content {unique_test_id}".encode()
upload_test_file(
integration_client, project, package, content, tag=f"stats-{unique_test_id}"
)
# Get updated stats
updated_response = integration_client.get("/api/v1/stats")
updated_stats = updated_response.json()
# Verify stats increased
assert updated_stats["total_uploads"] >= initial_stats["total_uploads"]
class TestDeduplicationStats:
"""Tests for GET /api/v1/stats/deduplication endpoint."""
@pytest.mark.integration
def test_dedup_stats_returns_valid_response(self, integration_client):
"""Test deduplication stats returns expected fields."""
response = integration_client.get("/api/v1/stats/deduplication")
assert response.status_code == 200
data = response.json()
assert "total_logical_bytes" in data
assert "total_physical_bytes" in data
assert "bytes_saved" in data
assert "savings_percentage" in data
assert "total_uploads" in data
assert "unique_artifacts" in data
assert "duplicate_uploads" in data
assert "average_ref_count" in data
assert "max_ref_count" in data
assert "most_referenced_artifacts" in data
@pytest.mark.integration
def test_most_referenced_artifacts_format(self, integration_client):
"""Test most_referenced_artifacts has correct structure."""
response = integration_client.get("/api/v1/stats/deduplication")
assert response.status_code == 200
data = response.json()
artifacts = data["most_referenced_artifacts"]
assert isinstance(artifacts, list)
if len(artifacts) > 0:
artifact = artifacts[0]
assert "artifact_id" in artifact
assert "ref_count" in artifact
assert "size" in artifact
assert "storage_saved" in artifact
@pytest.mark.integration
def test_dedup_stats_with_top_n_param(self, integration_client):
"""Test deduplication stats respects top_n parameter."""
response = integration_client.get("/api/v1/stats/deduplication?top_n=3")
assert response.status_code == 200
data = response.json()
assert len(data["most_referenced_artifacts"]) <= 3
@pytest.mark.integration
def test_savings_percentage_valid_range(self, integration_client):
"""Test savings percentage is between 0 and 100."""
response = integration_client.get("/api/v1/stats/deduplication")
assert response.status_code == 200
data = response.json()
assert 0 <= data["savings_percentage"] <= 100
class TestCrossProjectStats:
"""Tests for GET /api/v1/stats/cross-project endpoint."""
@pytest.mark.integration
def test_cross_project_returns_valid_response(self, integration_client):
"""Test cross-project stats returns expected fields."""
response = integration_client.get("/api/v1/stats/cross-project")
assert response.status_code == 200
data = response.json()
assert "shared_artifacts_count" in data
assert "total_cross_project_savings" in data
assert "shared_artifacts" in data
assert isinstance(data["shared_artifacts"], list)
@pytest.mark.integration
def test_cross_project_respects_limit(self, integration_client):
"""Test cross-project stats respects limit parameter."""
response = integration_client.get("/api/v1/stats/cross-project?limit=5")
assert response.status_code == 200
data = response.json()
assert len(data["shared_artifacts"]) <= 5
@pytest.mark.integration
def test_cross_project_detects_shared_artifacts(
self, integration_client, unique_test_id
):
"""Test cross-project deduplication is detected."""
content = f"shared across projects {unique_test_id}".encode()
# Create two projects
proj1 = f"cross-proj-a-{unique_test_id}"
proj2 = f"cross-proj-b-{unique_test_id}"
try:
# Create projects and packages
integration_client.post(
"/api/v1/projects",
json={"name": proj1, "description": "Test", "is_public": True},
)
integration_client.post(
"/api/v1/projects",
json={"name": proj2, "description": "Test", "is_public": True},
)
integration_client.post(
f"/api/v1/project/{proj1}/packages",
json={"name": "pkg", "description": "Test"},
)
integration_client.post(
f"/api/v1/project/{proj2}/packages",
json={"name": "pkg", "description": "Test"},
)
# Upload same content to both projects
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
# Check cross-project stats
response = integration_client.get("/api/v1/stats/cross-project")
assert response.status_code == 200
data = response.json()
assert data["shared_artifacts_count"] >= 1
finally:
# Cleanup
integration_client.delete(f"/api/v1/projects/{proj1}")
integration_client.delete(f"/api/v1/projects/{proj2}")
class TestTimelineStats:
"""Tests for GET /api/v1/stats/timeline endpoint."""
@pytest.mark.integration
def test_timeline_returns_valid_response(self, integration_client):
"""Test timeline stats returns expected fields."""
response = integration_client.get("/api/v1/stats/timeline")
assert response.status_code == 200
data = response.json()
assert "period" in data
assert "start_date" in data
assert "end_date" in data
assert "data_points" in data
assert isinstance(data["data_points"], list)
@pytest.mark.integration
def test_timeline_daily_period(self, integration_client):
"""Test timeline with daily period."""
response = integration_client.get("/api/v1/stats/timeline?period=daily")
assert response.status_code == 200
data = response.json()
assert data["period"] == "daily"
@pytest.mark.integration
def test_timeline_weekly_period(self, integration_client):
"""Test timeline with weekly period."""
response = integration_client.get("/api/v1/stats/timeline?period=weekly")
assert response.status_code == 200
data = response.json()
assert data["period"] == "weekly"
@pytest.mark.integration
def test_timeline_monthly_period(self, integration_client):
"""Test timeline with monthly period."""
response = integration_client.get("/api/v1/stats/timeline?period=monthly")
assert response.status_code == 200
data = response.json()
assert data["period"] == "monthly"
@pytest.mark.integration
def test_timeline_invalid_period_rejected(self, integration_client):
"""Test timeline rejects invalid period."""
response = integration_client.get("/api/v1/stats/timeline?period=invalid")
assert response.status_code == 422
@pytest.mark.integration
def test_timeline_data_point_structure(self, integration_client):
"""Test timeline data points have correct structure."""
response = integration_client.get("/api/v1/stats/timeline")
assert response.status_code == 200
data = response.json()
if len(data["data_points"]) > 0:
point = data["data_points"][0]
assert "date" in point
assert "total_uploads" in point
assert "unique_artifacts" in point
assert "duplicated_uploads" in point
assert "bytes_saved" in point
class TestExportEndpoint:
"""Tests for GET /api/v1/stats/export endpoint."""
@pytest.mark.integration
def test_export_json_format(self, integration_client):
"""Test export with JSON format."""
response = integration_client.get("/api/v1/stats/export?format=json")
assert response.status_code == 200
data = response.json()
assert "total_artifacts" in data
assert "generated_at" in data
@pytest.mark.integration
def test_export_csv_format(self, integration_client):
"""Test export with CSV format."""
response = integration_client.get("/api/v1/stats/export?format=csv")
assert response.status_code == 200
assert "text/csv" in response.headers.get("content-type", "")
content = response.text
assert "Metric,Value" in content
assert "total_artifacts" in content
@pytest.mark.integration
def test_export_invalid_format_rejected(self, integration_client):
"""Test export rejects invalid format."""
response = integration_client.get("/api/v1/stats/export?format=xml")
assert response.status_code == 422
class TestReportEndpoint:
"""Tests for GET /api/v1/stats/report endpoint."""
@pytest.mark.integration
def test_report_markdown_format(self, integration_client):
"""Test report with markdown format."""
response = integration_client.get("/api/v1/stats/report?format=markdown")
assert response.status_code == 200
data = response.json()
assert data["format"] == "markdown"
assert "generated_at" in data
assert "content" in data
assert "# Orchard Storage Report" in data["content"]
@pytest.mark.integration
def test_report_json_format(self, integration_client):
"""Test report with JSON format."""
response = integration_client.get("/api/v1/stats/report?format=json")
assert response.status_code == 200
data = response.json()
assert data["format"] == "json"
assert "content" in data
@pytest.mark.integration
def test_report_contains_sections(self, integration_client):
"""Test markdown report contains expected sections."""
response = integration_client.get("/api/v1/stats/report?format=markdown")
assert response.status_code == 200
content = response.json()["content"]
assert "## Overview" in content
assert "## Storage" in content
assert "## Uploads" in content
class TestProjectStats:
"""Tests for GET /api/v1/projects/:project/stats endpoint."""
@pytest.mark.integration
def test_project_stats_returns_valid_response(
self, integration_client, test_project
):
"""Test project stats returns expected fields."""
response = integration_client.get(f"/api/v1/projects/{test_project}/stats")
assert response.status_code == 200
data = response.json()
assert "project_id" in data
assert "project_name" in data
assert "package_count" in data
assert "tag_count" in data
assert "artifact_count" in data
assert "total_size_bytes" in data
assert "upload_count" in data
assert "deduplicated_uploads" in data
assert "storage_saved_bytes" in data
assert "deduplication_ratio" in data
@pytest.mark.integration
def test_project_stats_not_found(self, integration_client):
"""Test project stats returns 404 for non-existent project."""
response = integration_client.get("/api/v1/projects/nonexistent-project/stats")
assert response.status_code == 404
class TestPackageStats:
"""Tests for GET /api/v1/project/:project/packages/:package/stats endpoint."""
@pytest.mark.integration
def test_package_stats_returns_valid_response(
self, integration_client, test_package
):
"""Test package stats returns expected fields."""
project, package = test_package
response = integration_client.get(
f"/api/v1/project/{project}/packages/{package}/stats"
)
assert response.status_code == 200
data = response.json()
assert "package_id" in data
assert "package_name" in data
assert "project_name" in data
assert "tag_count" in data
assert "artifact_count" in data
assert "total_size_bytes" in data
assert "upload_count" in data
assert "deduplicated_uploads" in data
assert "storage_saved_bytes" in data
assert "deduplication_ratio" in data
@pytest.mark.integration
def test_package_stats_not_found(self, integration_client, test_project):
"""Test package stats returns 404 for non-existent package."""
response = integration_client.get(
f"/api/v1/project/{test_project}/packages/nonexistent-package/stats"
)
assert response.status_code == 404
class TestArtifactStats:
"""Tests for GET /api/v1/artifact/:id/stats endpoint."""
@pytest.mark.integration
def test_artifact_stats_returns_valid_response(
self, integration_client, test_package, unique_test_id
):
"""Test artifact stats returns expected fields."""
project, package = test_package
content = f"artifact stats test {unique_test_id}".encode()
expected_hash = compute_sha256(content)
# Upload artifact
upload_test_file(
integration_client, project, package, content, tag=f"art-{unique_test_id}"
)
# Get artifact stats
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
assert response.status_code == 200
data = response.json()
assert "artifact_id" in data
assert "sha256" in data
assert "size" in data
assert "ref_count" in data
assert "storage_savings" in data
assert "tags" in data
assert "projects" in data
assert "packages" in data
@pytest.mark.integration
def test_artifact_stats_not_found(self, integration_client):
"""Test artifact stats returns 404 for non-existent artifact."""
fake_hash = "0" * 64
response = integration_client.get(f"/api/v1/artifact/{fake_hash}/stats")
assert response.status_code == 404
@pytest.mark.integration
def test_artifact_stats_shows_correct_projects(
self, integration_client, unique_test_id
):
"""Test artifact stats shows all projects using the artifact."""
content = f"multi-project artifact {unique_test_id}".encode()
expected_hash = compute_sha256(content)
proj1 = f"art-stats-a-{unique_test_id}"
proj2 = f"art-stats-b-{unique_test_id}"
try:
# Create projects and packages
integration_client.post(
"/api/v1/projects",
json={"name": proj1, "description": "Test", "is_public": True},
)
integration_client.post(
"/api/v1/projects",
json={"name": proj2, "description": "Test", "is_public": True},
)
integration_client.post(
f"/api/v1/project/{proj1}/packages",
json={"name": "pkg", "description": "Test"},
)
integration_client.post(
f"/api/v1/project/{proj2}/packages",
json={"name": "pkg", "description": "Test"},
)
# Upload same content to both projects
upload_test_file(integration_client, proj1, "pkg", content, tag="v1")
upload_test_file(integration_client, proj2, "pkg", content, tag="v1")
# Check artifact stats
response = integration_client.get(f"/api/v1/artifact/{expected_hash}/stats")
assert response.status_code == 200
data = response.json()
assert len(data["projects"]) == 2
assert proj1 in data["projects"]
assert proj2 in data["projects"]
finally:
integration_client.delete(f"/api/v1/projects/{proj1}")
integration_client.delete(f"/api/v1/projects/{proj2}")

View File

View File

@@ -0,0 +1,271 @@
"""
Unit tests for SQLAlchemy models.
Tests cover:
- Model instantiation and defaults
- Property aliases (sha256, format_metadata)
- Relationship definitions
- Constraint definitions
"""
import pytest
import uuid
from datetime import datetime
class TestArtifactModel:
"""Tests for the Artifact model."""
@pytest.mark.unit
def test_artifact_sha256_property(self):
"""Test sha256 property is an alias for id."""
from app.models import Artifact
artifact = Artifact(
id="a" * 64,
size=1024,
created_by="test-user",
s3_key="fruits/aa/aa/test",
)
assert artifact.sha256 == artifact.id
assert artifact.sha256 == "a" * 64
@pytest.mark.unit
def test_artifact_format_metadata_alias(self):
"""Test format_metadata is an alias for artifact_metadata."""
from app.models import Artifact
test_metadata = {"format": "tarball", "version": "1.0.0"}
artifact = Artifact(
id="b" * 64,
size=2048,
created_by="test-user",
s3_key="fruits/bb/bb/test",
artifact_metadata=test_metadata,
)
assert artifact.format_metadata == test_metadata
assert artifact.format_metadata == artifact.artifact_metadata
@pytest.mark.unit
def test_artifact_format_metadata_setter(self):
"""Test format_metadata setter updates artifact_metadata."""
from app.models import Artifact
artifact = Artifact(
id="c" * 64,
size=512,
created_by="test-user",
s3_key="fruits/cc/cc/test",
)
new_metadata = {"type": "rpm", "arch": "x86_64"}
artifact.format_metadata = new_metadata
assert artifact.artifact_metadata == new_metadata
assert artifact.format_metadata == new_metadata
@pytest.mark.unit
def test_artifact_default_ref_count(self):
"""Test artifact ref_count column has default value of 1."""
from app.models import Artifact
# Check the column definition has the right default
ref_count_col = Artifact.__table__.columns["ref_count"]
assert ref_count_col.default is not None
assert ref_count_col.default.arg == 1
@pytest.mark.unit
def test_artifact_default_metadata_is_dict(self):
"""Test artifact default metadata is an empty dict."""
from app.models import Artifact
artifact = Artifact(
id="e" * 64,
size=100,
created_by="test-user",
s3_key="fruits/ee/ee/test",
)
# Default might be None until saved, but the column default is dict
assert artifact.artifact_metadata is None or isinstance(
artifact.artifact_metadata, dict
)
class TestProjectModel:
"""Tests for the Project model."""
@pytest.mark.unit
def test_project_default_is_public(self):
"""Test project is_public column has default value of True."""
from app.models import Project
# Check the column definition has the right default
is_public_col = Project.__table__.columns["is_public"]
assert is_public_col.default is not None
assert is_public_col.default.arg is True
@pytest.mark.unit
def test_project_uuid_generation(self):
"""Test project generates UUID by default."""
from app.models import Project
project = Project(
name="uuid-test-project",
created_by="test-user",
)
# UUID should be set by default function
assert project.id is not None or hasattr(Project.id, "default")
class TestPackageModel:
"""Tests for the Package model."""
@pytest.mark.unit
def test_package_default_format(self):
"""Test package format column has default value of 'generic'."""
from app.models import Package
# Check the column definition has the right default
format_col = Package.__table__.columns["format"]
assert format_col.default is not None
assert format_col.default.arg == "generic"
@pytest.mark.unit
def test_package_default_platform(self):
"""Test package platform column has default value of 'any'."""
from app.models import Package
# Check the column definition has the right default
platform_col = Package.__table__.columns["platform"]
assert platform_col.default is not None
assert platform_col.default.arg == "any"
class TestTagModel:
"""Tests for the Tag model."""
@pytest.mark.unit
def test_tag_requires_package_id(self):
"""Test tag requires package_id."""
from app.models import Tag
tag = Tag(
name="v1.0.0",
package_id=uuid.uuid4(),
artifact_id="f" * 64,
created_by="test-user",
)
assert tag.package_id is not None
assert tag.artifact_id == "f" * 64
class TestTagHistoryModel:
"""Tests for the TagHistory model."""
@pytest.mark.unit
def test_tag_history_default_change_type(self):
"""Test tag history change_type column has default value of 'update'."""
from app.models import TagHistory
# Check the column definition has the right default
change_type_col = TagHistory.__table__.columns["change_type"]
assert change_type_col.default is not None
assert change_type_col.default.arg == "update"
@pytest.mark.unit
def test_tag_history_allows_null_old_artifact(self):
"""Test tag history allows null old_artifact_id (for create events)."""
from app.models import TagHistory
history = TagHistory(
tag_id=uuid.uuid4(),
old_artifact_id=None,
new_artifact_id="h" * 64,
change_type="create",
changed_by="test-user",
)
assert history.old_artifact_id is None
class TestUploadModel:
"""Tests for the Upload model."""
@pytest.mark.unit
def test_upload_default_deduplicated_is_false(self):
"""Test upload deduplicated column has default value of False."""
from app.models import Upload
# Check the column definition has the right default
deduplicated_col = Upload.__table__.columns["deduplicated"]
assert deduplicated_col.default is not None
assert deduplicated_col.default.arg is False
@pytest.mark.unit
def test_upload_default_checksum_verified_is_true(self):
"""Test upload checksum_verified column has default value of True."""
from app.models import Upload
# Check the column definition has the right default
checksum_verified_col = Upload.__table__.columns["checksum_verified"]
assert checksum_verified_col.default is not None
assert checksum_verified_col.default.arg is True
class TestAccessPermissionModel:
"""Tests for the AccessPermission model."""
@pytest.mark.unit
def test_access_permission_levels(self):
"""Test valid access permission levels."""
from app.models import AccessPermission
# This tests the check constraint values
valid_levels = ["read", "write", "admin"]
for level in valid_levels:
permission = AccessPermission(
project_id=uuid.uuid4(),
user_id="test-user",
level=level,
)
assert permission.level == level
class TestAuditLogModel:
"""Tests for the AuditLog model."""
@pytest.mark.unit
def test_audit_log_required_fields(self):
"""Test audit log has all required fields."""
from app.models import AuditLog
log = AuditLog(
action="project.create",
resource="/projects/test-project",
user_id="test-user",
)
assert log.action == "project.create"
assert log.resource == "/projects/test-project"
assert log.user_id == "test-user"
@pytest.mark.unit
def test_audit_log_optional_details(self):
"""Test audit log can have optional details JSON."""
from app.models import AuditLog
details = {"old_value": "v1", "new_value": "v2"}
log = AuditLog(
action="tag.update",
resource="/projects/test/packages/pkg/tags/latest",
user_id="test-user",
details=details,
)
assert log.details == details

View File

@@ -0,0 +1,439 @@
"""
Unit tests for S3 storage layer.
Tests cover:
- SHA256 hash calculation and consistency
- Hash format validation (64-char hex)
- S3 key generation pattern
- Deduplication behavior (_exists method)
- Storage result computation (MD5, SHA1, size)
- Edge cases (empty files, large files, binary content)
"""
import pytest
import hashlib
import io
from tests.factories import (
compute_sha256,
TEST_CONTENT_HELLO,
TEST_HASH_HELLO,
TEST_CONTENT_BINARY,
TEST_HASH_BINARY,
)
# =============================================================================
# Hash Computation Tests
# =============================================================================
class TestHashComputation:
"""Unit tests for hash calculation functionality."""
@pytest.mark.unit
def test_sha256_consistent_results(self):
"""Test SHA256 hash produces consistent results for identical content."""
content = b"test content for hashing"
# Compute hash multiple times
hash1 = compute_sha256(content)
hash2 = compute_sha256(content)
hash3 = compute_sha256(content)
assert hash1 == hash2 == hash3
@pytest.mark.unit
def test_sha256_different_content_different_hash(self):
"""Test SHA256 produces different hashes for different content."""
content1 = b"content version 1"
content2 = b"content version 2"
hash1 = compute_sha256(content1)
hash2 = compute_sha256(content2)
assert hash1 != hash2
@pytest.mark.unit
def test_sha256_format_64_char_hex(self):
"""Test SHA256 hash is always 64 character lowercase hexadecimal."""
test_cases = [
b"", # Empty
b"a", # Single char
b"Hello, World!", # Normal string
bytes(range(256)), # All byte values
b"x" * 10000, # Larger content
]
for content in test_cases:
hash_value = compute_sha256(content)
# Check length
assert len(hash_value) == 64, (
f"Hash length should be 64, got {len(hash_value)}"
)
# Check lowercase
assert hash_value == hash_value.lower(), "Hash should be lowercase"
# Check hexadecimal
assert all(c in "0123456789abcdef" for c in hash_value), (
"Hash should be hex"
)
@pytest.mark.unit
def test_sha256_known_value(self):
"""Test SHA256 produces expected hash for known input."""
assert compute_sha256(TEST_CONTENT_HELLO) == TEST_HASH_HELLO
@pytest.mark.unit
def test_sha256_binary_content(self):
"""Test SHA256 handles binary content correctly."""
assert compute_sha256(TEST_CONTENT_BINARY) == TEST_HASH_BINARY
# Test with null bytes
content_with_nulls = b"\x00\x00test\x00\x00"
hash_value = compute_sha256(content_with_nulls)
assert len(hash_value) == 64
@pytest.mark.unit
def test_sha256_streaming_computation(self):
"""Test SHA256 can be computed in chunks (streaming)."""
# Large content
chunk_size = 8192
total_size = chunk_size * 10 # 80KB
content = b"x" * total_size
# Direct computation
direct_hash = compute_sha256(content)
# Streaming computation
hasher = hashlib.sha256()
for i in range(0, total_size, chunk_size):
hasher.update(content[i : i + chunk_size])
streaming_hash = hasher.hexdigest()
assert direct_hash == streaming_hash
@pytest.mark.unit
def test_sha256_order_matters(self):
"""Test that content order affects hash (not just content set)."""
content1 = b"AB"
content2 = b"BA"
assert compute_sha256(content1) != compute_sha256(content2)
# =============================================================================
# Storage Hash Computation Tests
# =============================================================================
class TestStorageHashComputation:
"""Tests for hash computation in the storage layer."""
@pytest.mark.unit
def test_storage_computes_sha256(self, mock_storage):
"""Test storage layer correctly computes SHA256 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
assert result.sha256 == TEST_HASH_HELLO
@pytest.mark.unit
def test_storage_computes_md5(self, mock_storage):
"""Test storage layer also computes MD5 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_md5 = hashlib.md5(content).hexdigest()
assert result.md5 == expected_md5
@pytest.mark.unit
def test_storage_computes_sha1(self, mock_storage):
"""Test storage layer also computes SHA1 hash."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_sha1 = hashlib.sha1(content).hexdigest()
assert result.sha1 == expected_sha1
@pytest.mark.unit
def test_storage_returns_correct_size(self, mock_storage):
"""Test storage layer returns correct file size."""
content = b"test content with known size"
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
assert result.size == len(content)
@pytest.mark.unit
def test_storage_generates_correct_s3_key(self, mock_storage):
"""Test storage layer generates correct S3 key pattern."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# Key should be: fruits/{hash[:2]}/{hash[2:4]}/{hash}
expected_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
assert result.s3_key == expected_key
# =============================================================================
# Hash Edge Cases
# =============================================================================
class TestHashEdgeCases:
"""Edge case tests for hash computation."""
@pytest.mark.unit
def test_hash_empty_content_rejected(self, mock_storage):
"""Test that empty content is rejected."""
from app.storage import HashComputationError
file_obj = io.BytesIO(b"")
with pytest.raises(HashComputationError):
mock_storage._store_simple(file_obj)
@pytest.mark.unit
def test_hash_large_file_streaming(self, mock_storage):
"""Test hash computation for large files uses streaming."""
# Create a 10MB file
size = 10 * 1024 * 1024
content = b"x" * size
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_hash = compute_sha256(content)
assert result.sha256 == expected_hash
@pytest.mark.unit
def test_hash_special_bytes(self):
"""Test hash handles all byte values correctly."""
# All possible byte values
content = bytes(range(256))
hash_value = compute_sha256(content)
assert len(hash_value) == 64
assert hash_value == TEST_HASH_BINARY
# =============================================================================
# S3 Existence Check Tests
# =============================================================================
class TestExistsMethod:
"""Tests for the _exists() method that checks S3 object existence."""
@pytest.mark.unit
def test_exists_returns_true_for_existing_key(self, mock_storage, mock_s3_client):
"""Test _exists() returns True when object exists."""
# Pre-populate the mock storage
test_key = "fruits/df/fd/test-hash"
mock_s3_client.objects[test_key] = b"content"
result = mock_storage._exists(test_key)
assert result is True
@pytest.mark.unit
def test_exists_returns_false_for_nonexistent_key(self, mock_storage):
"""Test _exists() returns False when object doesn't exist."""
result = mock_storage._exists("fruits/no/ne/nonexistent-key")
assert result is False
@pytest.mark.unit
def test_exists_handles_404_error(self, mock_storage):
"""Test _exists() handles 404 errors gracefully."""
# The mock client raises ClientError for nonexistent keys
result = mock_storage._exists("fruits/xx/yy/does-not-exist")
assert result is False
# =============================================================================
# S3 Key Generation Tests
# =============================================================================
class TestS3KeyGeneration:
"""Tests for S3 key pattern generation."""
@pytest.mark.unit
def test_s3_key_pattern(self):
"""Test S3 key follows pattern: fruits/{hash[:2]}/{hash[2:4]}/{hash}"""
test_hash = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
expected_key = f"fruits/{test_hash[:2]}/{test_hash[2:4]}/{test_hash}"
# Expected: fruits/ab/cd/abcdef1234567890...
assert expected_key == f"fruits/ab/cd/{test_hash}"
@pytest.mark.unit
def test_s3_key_generation_in_storage(self, mock_storage):
"""Test storage layer generates correct S3 key."""
content = TEST_CONTENT_HELLO
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
expected_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
assert result.s3_key == expected_key
@pytest.mark.unit
def test_s3_key_uses_sha256_hash(self, mock_storage):
"""Test S3 key is derived from SHA256 hash."""
content = b"unique test content for key test"
file_obj = io.BytesIO(content)
expected_hash = compute_sha256(content)
result = mock_storage._store_simple(file_obj)
# Key should contain the hash
assert expected_hash in result.s3_key
# =============================================================================
# Deduplication Behavior Tests
# =============================================================================
class TestDeduplicationBehavior:
"""Tests for deduplication (skip upload when exists)."""
@pytest.mark.unit
def test_skips_upload_when_exists(self, mock_storage, mock_s3_client):
"""Test storage skips S3 upload when artifact already exists."""
content = TEST_CONTENT_HELLO
s3_key = (
f"fruits/{TEST_HASH_HELLO[:2]}/{TEST_HASH_HELLO[2:4]}/{TEST_HASH_HELLO}"
)
# Pre-populate storage (simulate existing artifact)
mock_s3_client.objects[s3_key] = content
# Track put_object calls
original_put = mock_s3_client.put_object
put_called = []
def tracked_put(*args, **kwargs):
put_called.append(True)
return original_put(*args, **kwargs)
mock_s3_client.put_object = tracked_put
# Store the same content
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# put_object should NOT have been called (deduplication)
assert len(put_called) == 0
assert result.sha256 == TEST_HASH_HELLO
@pytest.mark.unit
def test_uploads_when_not_exists(self, mock_storage, mock_s3_client):
"""Test storage uploads to S3 when artifact doesn't exist."""
content = b"brand new unique content"
content_hash = compute_sha256(content)
s3_key = f"fruits/{content_hash[:2]}/{content_hash[2:4]}/{content_hash}"
# Ensure object doesn't exist
assert s3_key not in mock_s3_client.objects
# Store the content
file_obj = io.BytesIO(content)
result = mock_storage._store_simple(file_obj)
# Object should now exist in mock storage
assert s3_key in mock_s3_client.objects
assert mock_s3_client.objects[s3_key] == content
@pytest.mark.unit
def test_returns_same_hash_for_duplicate(self, mock_storage, mock_s3_client):
"""Test storing same content twice returns same hash."""
content = b"content to be stored twice"
# First store
file1 = io.BytesIO(content)
result1 = mock_storage._store_simple(file1)
# Second store (duplicate)
file2 = io.BytesIO(content)
result2 = mock_storage._store_simple(file2)
assert result1.sha256 == result2.sha256
assert result1.s3_key == result2.s3_key
@pytest.mark.unit
def test_different_content_different_keys(self, mock_storage):
"""Test different content produces different S3 keys."""
content1 = b"first content"
content2 = b"second content"
file1 = io.BytesIO(content1)
result1 = mock_storage._store_simple(file1)
file2 = io.BytesIO(content2)
result2 = mock_storage._store_simple(file2)
assert result1.sha256 != result2.sha256
assert result1.s3_key != result2.s3_key
# =============================================================================
# Deduplication Edge Cases
# =============================================================================
class TestDeduplicationEdgeCases:
"""Edge case tests for deduplication."""
@pytest.mark.unit
def test_same_content_different_filenames(self, mock_storage):
"""Test same content with different metadata is deduplicated."""
content = b"identical content"
# Store with "filename1"
file1 = io.BytesIO(content)
result1 = mock_storage._store_simple(file1)
# Store with "filename2" (same content)
file2 = io.BytesIO(content)
result2 = mock_storage._store_simple(file2)
# Both should have same hash (content-addressable)
assert result1.sha256 == result2.sha256
@pytest.mark.unit
def test_whitespace_only_difference(self, mock_storage):
"""Test content differing only by whitespace produces different hashes."""
content1 = b"test content"
content2 = b"test content" # Extra space
content3 = b"test content " # Trailing space
file1 = io.BytesIO(content1)
file2 = io.BytesIO(content2)
file3 = io.BytesIO(content3)
result1 = mock_storage._store_simple(file1)
result2 = mock_storage._store_simple(file2)
result3 = mock_storage._store_simple(file3)
# All should be different (content-addressable)
assert len({result1.sha256, result2.sha256, result3.sha256}) == 3

View File

@@ -24,6 +24,8 @@ services:
- ORCHARD_S3_USE_PATH_STYLE=true
- ORCHARD_REDIS_HOST=redis
- ORCHARD_REDIS_PORT=6379
# Higher rate limit for local development/testing
- ORCHARD_LOGIN_RATE_LIMIT=1000/minute
depends_on:
postgres:
condition: service_healthy

4451
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -6,18 +6,33 @@
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"preview": "vite preview"
"preview": "vite preview",
"test": "vitest",
"test:run": "vitest run",
"test:coverage": "vitest run --coverage"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-router-dom": "^6.21.3"
"react-router-dom": "6.28.0"
},
"devDependencies": {
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/react": "^14.2.1",
"@testing-library/user-event": "^14.5.2",
"@types/react": "^18.2.48",
"@types/react-dom": "^18.2.18",
"@vitejs/plugin-react": "^4.2.1",
"jsdom": "^24.0.0",
"typescript": "^5.3.3",
"vite": "^5.0.12"
"vite": "^5.0.12",
"vitest": "^1.3.1"
},
"overrides": {
"ws": "8.18.0",
"ufo": "1.5.4",
"rollup": "4.52.4",
"caniuse-lite": "1.0.30001692",
"baseline-browser-mapping": "2.9.5"
}
}

View File

@@ -1,20 +1,65 @@
import { Routes, Route } from 'react-router-dom';
import { Routes, Route, Navigate, useLocation } from 'react-router-dom';
import { AuthProvider, useAuth } from './contexts/AuthContext';
import Layout from './components/Layout';
import Home from './pages/Home';
import ProjectPage from './pages/ProjectPage';
import PackagePage from './pages/PackagePage';
import Dashboard from './pages/Dashboard';
import LoginPage from './pages/LoginPage';
import ChangePasswordPage from './pages/ChangePasswordPage';
import APIKeysPage from './pages/APIKeysPage';
import AdminUsersPage from './pages/AdminUsersPage';
import AdminOIDCPage from './pages/AdminOIDCPage';
function App() {
// Component that checks if user must change password
function RequirePasswordChange({ children }: { children: React.ReactNode }) {
const { user, loading } = useAuth();
const location = useLocation();
if (loading) {
return null;
}
// If user is logged in and must change password, redirect to change password page
if (user?.must_change_password && location.pathname !== '/change-password') {
return <Navigate to="/change-password" replace />;
}
return <>{children}</>;
}
function AppRoutes() {
return (
<Routes>
<Route path="/login" element={<LoginPage />} />
<Route path="/change-password" element={<ChangePasswordPage />} />
<Route
path="*"
element={
<RequirePasswordChange>
<Layout>
<Routes>
<Route path="/" element={<Home />} />
<Route path="/dashboard" element={<Dashboard />} />
<Route path="/settings/api-keys" element={<APIKeysPage />} />
<Route path="/admin/users" element={<AdminUsersPage />} />
<Route path="/admin/oidc" element={<AdminOIDCPage />} />
<Route path="/project/:projectName" element={<ProjectPage />} />
<Route path="/project/:projectName/:packageName" element={<PackagePage />} />
</Routes>
</Layout>
</RequirePasswordChange>
}
/>
</Routes>
);
}
function App() {
return (
<AuthProvider>
<AppRoutes />
</AuthProvider>
);
}

View File

@@ -17,14 +17,62 @@ import {
DeduplicationStats,
TimelineStats,
CrossProjectStats,
User,
LoginCredentials,
APIKey,
APIKeyCreate,
APIKeyCreateResponse,
AdminUser,
UserCreate,
UserUpdate,
AccessPermission,
AccessPermissionCreate,
AccessPermissionUpdate,
AccessLevel,
OIDCConfig,
OIDCConfigUpdate,
OIDCStatus,
} from './types';
const API_BASE = '/api/v1';
// Custom error classes for better error handling
export class ApiError extends Error {
status: number;
constructor(message: string, status: number) {
super(message);
this.name = 'ApiError';
this.status = status;
}
}
export class UnauthorizedError extends ApiError {
constructor(message: string = 'Not authenticated') {
super(message, 401);
this.name = 'UnauthorizedError';
}
}
export class ForbiddenError extends ApiError {
constructor(message: string = 'Access denied') {
super(message, 403);
this.name = 'ForbiddenError';
}
}
async function handleResponse<T>(response: Response): Promise<T> {
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
const message = error.detail || `HTTP ${response.status}`;
if (response.status === 401) {
throw new UnauthorizedError(message);
}
if (response.status === 403) {
throw new ForbiddenError(message);
}
throw new ApiError(message, response.status);
}
return response.json();
}
@@ -40,6 +88,55 @@ function buildQueryString(params: Record<string, unknown>): string {
return query ? `?${query}` : '';
}
// Auth API
export async function login(credentials: LoginCredentials): Promise<User> {
const response = await fetch(`${API_BASE}/auth/login`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(credentials),
credentials: 'include',
});
return handleResponse<User>(response);
}
export async function logout(): Promise<void> {
const response = await fetch(`${API_BASE}/auth/logout`, {
method: 'POST',
credentials: 'include',
});
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
}
}
export async function changePassword(currentPassword: string, newPassword: string): Promise<void> {
const response = await fetch(`${API_BASE}/auth/change-password`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ current_password: currentPassword, new_password: newPassword }),
credentials: 'include',
});
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
}
}
export async function getCurrentUser(): Promise<User | null> {
try {
const response = await fetch(`${API_BASE}/auth/me`, {
credentials: 'include',
});
if (response.status === 401) {
return null;
}
return handleResponse<User>(response);
} catch {
return null;
}
}
// Global Search API
export async function globalSearch(query: string, limit: number = 5): Promise<GlobalSearchResponse> {
const params = buildQueryString({ q: query, limit });
@@ -186,3 +283,163 @@ export async function getCrossProjectStats(): Promise<CrossProjectStats> {
const response = await fetch(`${API_BASE}/stats/cross-project`);
return handleResponse<CrossProjectStats>(response);
}
export async function listAPIKeys(): Promise<APIKey[]> {
const response = await fetch(`${API_BASE}/auth/keys`, {
credentials: 'include',
});
return handleResponse<APIKey[]>(response);
}
export async function createAPIKey(data: APIKeyCreate): Promise<APIKeyCreateResponse> {
const response = await fetch(`${API_BASE}/auth/keys`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<APIKeyCreateResponse>(response);
}
export async function deleteAPIKey(id: string): Promise<void> {
const response = await fetch(`${API_BASE}/auth/keys/${id}`, {
method: 'DELETE',
credentials: 'include',
});
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
}
}
// Admin User Management API
export async function listUsers(): Promise<AdminUser[]> {
const response = await fetch(`${API_BASE}/admin/users`, {
credentials: 'include',
});
return handleResponse<AdminUser[]>(response);
}
export async function createUser(data: UserCreate): Promise<AdminUser> {
const response = await fetch(`${API_BASE}/admin/users`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<AdminUser>(response);
}
export async function updateUser(username: string, data: UserUpdate): Promise<AdminUser> {
const response = await fetch(`${API_BASE}/admin/users/${username}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<AdminUser>(response);
}
export async function resetUserPassword(username: string, newPassword: string): Promise<void> {
const response = await fetch(`${API_BASE}/admin/users/${username}/reset-password`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ new_password: newPassword }),
credentials: 'include',
});
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
}
}
// Access Permission API
export interface MyAccessResponse {
project: string;
access_level: AccessLevel | null;
is_owner: boolean;
}
export async function getMyProjectAccess(projectName: string): Promise<MyAccessResponse> {
const response = await fetch(`${API_BASE}/project/${projectName}/my-access`, {
credentials: 'include',
});
return handleResponse<MyAccessResponse>(response);
}
export async function listProjectPermissions(projectName: string): Promise<AccessPermission[]> {
const response = await fetch(`${API_BASE}/project/${projectName}/permissions`, {
credentials: 'include',
});
return handleResponse<AccessPermission[]>(response);
}
export async function grantProjectAccess(
projectName: string,
data: AccessPermissionCreate
): Promise<AccessPermission> {
const response = await fetch(`${API_BASE}/project/${projectName}/permissions`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<AccessPermission>(response);
}
export async function updateProjectAccess(
projectName: string,
username: string,
data: AccessPermissionUpdate
): Promise<AccessPermission> {
const response = await fetch(`${API_BASE}/project/${projectName}/permissions/${username}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<AccessPermission>(response);
}
export async function revokeProjectAccess(projectName: string, username: string): Promise<void> {
const response = await fetch(`${API_BASE}/project/${projectName}/permissions/${username}`, {
method: 'DELETE',
credentials: 'include',
});
if (!response.ok) {
const error = await response.json().catch(() => ({ detail: 'Unknown error' }));
throw new Error(error.detail || `HTTP ${response.status}`);
}
}
// OIDC API
export async function getOIDCStatus(): Promise<OIDCStatus> {
const response = await fetch(`${API_BASE}/auth/oidc/status`);
return handleResponse<OIDCStatus>(response);
}
export async function getOIDCConfig(): Promise<OIDCConfig> {
const response = await fetch(`${API_BASE}/auth/oidc/config`, {
credentials: 'include',
});
return handleResponse<OIDCConfig>(response);
}
export async function updateOIDCConfig(data: OIDCConfigUpdate): Promise<OIDCConfig> {
const response = await fetch(`${API_BASE}/auth/oidc/config`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
credentials: 'include',
});
return handleResponse<OIDCConfig>(response);
}
export function getOIDCLoginUrl(returnTo?: string): string {
const params = new URLSearchParams();
if (returnTo) {
params.set('return_to', returnTo);
}
const query = params.toString();
return `${API_BASE}/auth/oidc/login${query ? `?${query}` : ''}`;
}

View File

@@ -0,0 +1,116 @@
.access-management {
margin-top: 1.5rem;
}
.access-management__header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 1rem;
}
.access-management__header h3 {
margin: 0;
}
.access-management__form {
background: var(--bg-tertiary);
padding: 1rem;
border-radius: 6px;
margin-bottom: 1rem;
}
.access-management__form .form-row {
display: flex;
gap: 1rem;
align-items: flex-end;
}
.access-management__form .form-group {
flex: 1;
}
.access-management__form .form-group:last-of-type {
flex: 0 0 auto;
}
.access-management__list {
margin-top: 1rem;
}
.access-table {
width: 100%;
border-collapse: collapse;
}
.access-table th,
.access-table td {
padding: 0.75rem;
text-align: left;
border-bottom: 1px solid var(--border-color);
}
.access-table th {
font-weight: 600;
color: var(--text-secondary);
font-size: 0.875rem;
}
.access-table td.actions {
display: flex;
gap: 0.5rem;
}
.access-badge {
display: inline-block;
padding: 0.25rem 0.5rem;
border-radius: 4px;
font-size: 0.75rem;
font-weight: 600;
text-transform: capitalize;
}
.access-badge--read {
background: var(--bg-tertiary);
color: var(--text-secondary);
}
.access-badge--write {
background: var(--color-info-bg);
color: var(--color-info);
}
.access-badge--admin {
background: var(--color-success-bg);
color: var(--color-success);
}
.btn-sm {
padding: 0.25rem 0.5rem;
font-size: 0.875rem;
}
.btn-danger {
background: var(--color-error);
color: white;
}
.btn-danger:hover {
background: #c0392b;
}
/* Expired permission styling */
.expired {
color: var(--color-error);
font-weight: 500;
}
/* Date input styling in table */
.access-table input[type="date"] {
padding: 0.25rem 0.5rem;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: 4px;
font-size: 0.875rem;
color: var(--text-primary);
}

View File

@@ -0,0 +1,296 @@
import { useState, useEffect, useCallback } from 'react';
import { AccessPermission, AccessLevel } from '../types';
import {
listProjectPermissions,
grantProjectAccess,
updateProjectAccess,
revokeProjectAccess,
} from '../api';
import './AccessManagement.css';
interface AccessManagementProps {
projectName: string;
}
export function AccessManagement({ projectName }: AccessManagementProps) {
const [permissions, setPermissions] = useState<AccessPermission[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [success, setSuccess] = useState<string | null>(null);
// Form state
const [showAddForm, setShowAddForm] = useState(false);
const [newUsername, setNewUsername] = useState('');
const [newLevel, setNewLevel] = useState<AccessLevel>('read');
const [newExpiresAt, setNewExpiresAt] = useState('');
const [submitting, setSubmitting] = useState(false);
// Edit state
const [editingUser, setEditingUser] = useState<string | null>(null);
const [editLevel, setEditLevel] = useState<AccessLevel>('read');
const [editExpiresAt, setEditExpiresAt] = useState('');
const loadPermissions = useCallback(async () => {
try {
setLoading(true);
const data = await listProjectPermissions(projectName);
setPermissions(data);
setError(null);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load permissions');
} finally {
setLoading(false);
}
}, [projectName]);
useEffect(() => {
loadPermissions();
}, [loadPermissions]);
const handleGrant = async (e: React.FormEvent) => {
e.preventDefault();
if (!newUsername.trim()) return;
try {
setSubmitting(true);
setError(null);
await grantProjectAccess(projectName, {
username: newUsername.trim(),
level: newLevel,
expires_at: newExpiresAt || undefined,
});
setSuccess(`Access granted to ${newUsername}`);
setNewUsername('');
setNewLevel('read');
setNewExpiresAt('');
setShowAddForm(false);
await loadPermissions();
setTimeout(() => setSuccess(null), 3000);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to grant access');
} finally {
setSubmitting(false);
}
};
const handleUpdate = async (username: string) => {
try {
setSubmitting(true);
setError(null);
await updateProjectAccess(projectName, username, {
level: editLevel,
expires_at: editExpiresAt || null,
});
setSuccess(`Updated access for ${username}`);
setEditingUser(null);
await loadPermissions();
setTimeout(() => setSuccess(null), 3000);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to update access');
} finally {
setSubmitting(false);
}
};
const handleRevoke = async (username: string) => {
if (!confirm(`Revoke access for ${username}?`)) return;
try {
setSubmitting(true);
setError(null);
await revokeProjectAccess(projectName, username);
setSuccess(`Access revoked for ${username}`);
await loadPermissions();
setTimeout(() => setSuccess(null), 3000);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to revoke access');
} finally {
setSubmitting(false);
}
};
const startEdit = (permission: AccessPermission) => {
setEditingUser(permission.user_id);
setEditLevel(permission.level as AccessLevel);
// Convert ISO date to local date format for date input
setEditExpiresAt(permission.expires_at ? permission.expires_at.split('T')[0] : '');
};
const cancelEdit = () => {
setEditingUser(null);
setEditExpiresAt('');
};
const formatExpiration = (expiresAt: string | null) => {
if (!expiresAt) return 'Never';
const date = new Date(expiresAt);
const now = new Date();
const isExpired = date < now;
return (
<span className={isExpired ? 'expired' : ''}>
{date.toLocaleDateString()}
{isExpired && ' (Expired)'}
</span>
);
};
if (loading) {
return <div className="access-management loading">Loading permissions...</div>;
}
return (
<div className="access-management card">
<div className="access-management__header">
<h3>Access Management</h3>
<button
className="btn btn-primary btn-sm"
onClick={() => setShowAddForm(!showAddForm)}
>
{showAddForm ? 'Cancel' : '+ Add User'}
</button>
</div>
{error && <div className="error-message">{error}</div>}
{success && <div className="success-message">{success}</div>}
{showAddForm && (
<form className="access-management__form" onSubmit={handleGrant}>
<div className="form-row">
<div className="form-group">
<label htmlFor="username">Username</label>
<input
id="username"
type="text"
value={newUsername}
onChange={(e) => setNewUsername(e.target.value)}
placeholder="Enter username"
required
disabled={submitting}
/>
</div>
<div className="form-group">
<label htmlFor="level">Access Level</label>
<select
id="level"
value={newLevel}
onChange={(e) => setNewLevel(e.target.value as AccessLevel)}
disabled={submitting}
>
<option value="read">Read</option>
<option value="write">Write</option>
<option value="admin">Admin</option>
</select>
</div>
<div className="form-group">
<label htmlFor="expires_at">Expires (optional)</label>
<input
id="expires_at"
type="date"
value={newExpiresAt}
onChange={(e) => setNewExpiresAt(e.target.value)}
disabled={submitting}
min={new Date().toISOString().split('T')[0]}
/>
</div>
<button type="submit" className="btn btn-primary" disabled={submitting}>
{submitting ? 'Granting...' : 'Grant Access'}
</button>
</div>
</form>
)}
<div className="access-management__list">
{permissions.length === 0 ? (
<p className="text-muted">No explicit permissions set. Only the project owner has access.</p>
) : (
<table className="access-table">
<thead>
<tr>
<th>User</th>
<th>Access Level</th>
<th>Granted</th>
<th>Expires</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{permissions.map((p) => (
<tr key={p.id}>
<td>{p.user_id}</td>
<td>
{editingUser === p.user_id ? (
<select
value={editLevel}
onChange={(e) => setEditLevel(e.target.value as AccessLevel)}
disabled={submitting}
>
<option value="read">Read</option>
<option value="write">Write</option>
<option value="admin">Admin</option>
</select>
) : (
<span className={`access-badge access-badge--${p.level}`}>
{p.level}
</span>
)}
</td>
<td>{new Date(p.created_at).toLocaleDateString()}</td>
<td>
{editingUser === p.user_id ? (
<input
type="date"
value={editExpiresAt}
onChange={(e) => setEditExpiresAt(e.target.value)}
disabled={submitting}
min={new Date().toISOString().split('T')[0]}
/>
) : (
formatExpiration(p.expires_at)
)}
</td>
<td className="actions">
{editingUser === p.user_id ? (
<>
<button
className="btn btn-sm btn-primary"
onClick={() => handleUpdate(p.user_id)}
disabled={submitting}
>
Save
</button>
<button
className="btn btn-sm"
onClick={cancelEdit}
disabled={submitting}
>
Cancel
</button>
</>
) : (
<>
<button
className="btn btn-sm"
onClick={() => startEdit(p)}
disabled={submitting}
>
Edit
</button>
<button
className="btn btn-sm btn-danger"
onClick={() => handleRevoke(p.user_id)}
disabled={submitting}
>
Revoke
</button>
</>
)}
</td>
</tr>
))}
</tbody>
</table>
)}
</div>
</div>
);
}

View File

@@ -0,0 +1,332 @@
.drag-drop-upload {
width: 100%;
}
/* Offline Banner */
.offline-banner {
display: flex;
align-items: center;
gap: 0.5rem;
padding: 0.75rem 1rem;
background: var(--warning-bg, #fff3cd);
border: 1px solid var(--warning-border, #ffc107);
border-radius: 8px;
margin-bottom: 1rem;
color: var(--warning-text, #856404);
font-size: 0.875rem;
}
.offline-banner svg {
flex-shrink: 0;
}
/* Drop Zone */
.drop-zone {
border: 2px dashed var(--border-color, #ddd);
border-radius: 8px;
padding: 2rem;
text-align: center;
cursor: pointer;
transition: all 0.2s ease;
background: var(--bg-secondary, #f9f9f9);
}
.drop-zone:hover {
border-color: var(--accent-color, #007bff);
background: var(--bg-hover, #f0f7ff);
}
.drop-zone--active {
border-color: var(--accent-color, #007bff);
background: var(--bg-active, #e6f0ff);
border-style: solid;
}
.drop-zone--disabled {
cursor: not-allowed;
opacity: 0.6;
background: var(--bg-disabled, #f5f5f5);
}
.drop-zone--disabled:hover {
border-color: var(--border-color, #ddd);
background: var(--bg-disabled, #f5f5f5);
}
.drop-zone__input {
display: none;
}
.drop-zone__content {
display: flex;
flex-direction: column;
align-items: center;
gap: 0.75rem;
color: var(--text-secondary, #666);
}
.drop-zone__content svg {
opacity: 0.5;
}
.drop-zone--active .drop-zone__content svg {
opacity: 1;
color: var(--accent-color, #007bff);
}
.drop-zone__text {
margin: 0;
font-size: 1rem;
}
.drop-zone__text strong {
color: var(--text-primary, #333);
}
.drop-zone__hint {
margin: 0;
font-size: 0.8rem;
opacity: 0.7;
}
/* Upload Queue */
.upload-queue {
margin-top: 1rem;
border: 1px solid var(--border-color, #ddd);
border-radius: 8px;
overflow: hidden;
}
.upload-queue__header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 0.75rem 1rem;
background: var(--bg-secondary, #f9f9f9);
border-bottom: 1px solid var(--border-color, #ddd);
}
.upload-queue__title {
font-size: 0.875rem;
font-weight: 500;
color: var(--text-primary, #333);
}
.upload-queue__clear {
padding: 0.25rem 0.5rem;
font-size: 0.75rem;
border: none;
background: none;
color: var(--accent-color, #007bff);
cursor: pointer;
}
.upload-queue__clear:hover {
text-decoration: underline;
}
.upload-queue__overall {
display: flex;
align-items: center;
gap: 0.75rem;
padding: 0.5rem 1rem;
background: var(--bg-secondary, #f9f9f9);
border-bottom: 1px solid var(--border-color, #ddd);
}
.upload-queue__overall .progress-bar {
flex: 1;
}
.upload-queue__overall .progress-bar__text {
font-size: 0.75rem;
font-weight: 500;
color: var(--text-secondary, #666);
min-width: 3rem;
text-align: right;
}
.upload-queue__list {
list-style: none;
margin: 0;
padding: 0;
max-height: 300px;
overflow-y: auto;
}
/* Upload Item */
.upload-item {
display: flex;
align-items: center;
gap: 0.75rem;
padding: 0.75rem 1rem;
border-bottom: 1px solid var(--border-color-light, #eee);
}
.upload-item:last-child {
border-bottom: none;
}
.upload-item__icon {
flex-shrink: 0;
width: 24px;
height: 24px;
display: flex;
align-items: center;
justify-content: center;
color: var(--text-secondary, #666);
}
.upload-item--complete .upload-item__icon {
color: var(--success-color, #28a745);
}
.upload-item--failed .upload-item__icon {
color: var(--error-color, #dc3545);
}
.upload-item--uploading .upload-item__icon {
color: var(--accent-color, #007bff);
}
.upload-item--paused .upload-item__icon {
color: var(--warning-color, #ffc107);
}
.upload-item--validating .upload-item__icon {
color: var(--accent-color, #007bff);
}
.spinner-icon {
animation: spin 1s linear infinite;
}
@keyframes spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
.upload-item__info {
flex: 1;
min-width: 0;
display: flex;
flex-direction: column;
gap: 0.25rem;
}
.upload-item__name {
font-size: 0.875rem;
font-weight: 500;
color: var(--text-primary, #333);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.upload-item__meta {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
font-size: 0.75rem;
color: var(--text-secondary, #666);
}
.upload-item__size {
color: var(--text-secondary, #666);
}
.upload-item__speed,
.upload-item__eta {
color: var(--accent-color, #007bff);
}
.upload-item__artifact {
color: var(--success-color, #28a745);
font-family: monospace;
}
.upload-item__error {
color: var(--error-color, #dc3545);
}
.upload-item__retry-count {
color: var(--warning-color, #ffc107);
}
.upload-item__validating {
color: var(--accent-color, #007bff);
font-style: italic;
}
.upload-item__actions {
display: flex;
gap: 0.25rem;
flex-shrink: 0;
}
.upload-item__btn {
width: 28px;
height: 28px;
border: none;
background: none;
cursor: pointer;
border-radius: 4px;
display: flex;
align-items: center;
justify-content: center;
color: var(--text-secondary, #666);
transition: all 0.15s ease;
}
.upload-item__btn:hover {
background: var(--bg-hover, #f0f0f0);
}
.upload-item__btn--retry:hover {
color: var(--accent-color, #007bff);
}
.upload-item__btn--remove:hover {
color: var(--error-color, #dc3545);
}
/* Progress Bar */
.progress-bar {
height: 8px;
background: var(--border-color, #ddd);
border-radius: 4px;
overflow: hidden;
}
.progress-bar--small {
height: 4px;
margin-top: 0.25rem;
}
.progress-bar__fill {
height: 100%;
background: var(--accent-color, #007bff);
border-radius: 4px;
transition: width 0.2s ease;
}
.upload-item--complete .progress-bar__fill {
background: var(--success-color, #28a745);
}
/* Responsive */
@media (max-width: 480px) {
.drop-zone {
padding: 1.5rem 1rem;
}
.upload-item__meta {
flex-direction: column;
gap: 0.125rem;
}
.upload-item__speed,
.upload-item__eta {
display: none;
}
}

View File

@@ -0,0 +1,545 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { DragDropUpload } from './DragDropUpload';
function createMockFile(name: string, size: number, type: string): File {
const content = new Array(size).fill('a').join('');
return new File([content], name, { type });
}
function createMockXHR(options: {
status?: number;
response?: object;
progressEvents?: { loaded: number; total: number }[];
shouldError?: boolean;
shouldTimeout?: boolean;
} = {}) {
const {
status = 200,
response = { artifact_id: 'abc123', size: 100 },
progressEvents = [],
shouldError = false,
shouldTimeout = false,
} = options;
return class MockXHR {
status = status;
responseText = JSON.stringify(response);
timeout = 0;
upload = {
addEventListener: vi.fn((event: string, handler: (e: ProgressEvent) => void) => {
if (event === 'progress') {
progressEvents.forEach((p, i) => {
setTimeout(() => {
handler({ lengthComputable: true, loaded: p.loaded, total: p.total } as ProgressEvent);
}, i * 10);
});
}
}),
};
addEventListener = vi.fn((event: string, handler: () => void) => {
if (event === 'load' && !shouldError && !shouldTimeout) {
setTimeout(handler, progressEvents.length * 10 + 10);
}
if (event === 'error' && shouldError) {
setTimeout(handler, 10);
}
if (event === 'timeout' && shouldTimeout) {
setTimeout(handler, 10);
}
});
open = vi.fn();
send = vi.fn();
};
}
describe('DragDropUpload', () => {
const defaultProps = {
projectName: 'test-project',
packageName: 'test-package',
};
beforeEach(() => {
vi.useFakeTimers({ shouldAdvanceTime: true });
});
afterEach(() => {
vi.useRealTimers();
vi.restoreAllMocks();
});
describe('Rendering', () => {
it('renders drop zone with instructional text', () => {
render(<DragDropUpload {...defaultProps} />);
expect(screen.getByText(/drag files here/i)).toBeInTheDocument();
expect(screen.getByText(/click to browse/i)).toBeInTheDocument();
});
it('renders hidden file input', () => {
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]');
expect(input).toBeInTheDocument();
expect(input).toHaveClass('drop-zone__input');
});
it('shows max file size hint when provided', () => {
render(<DragDropUpload {...defaultProps} maxFileSize={1024 * 1024} />);
expect(screen.getByText(/max file size: 1 mb/i)).toBeInTheDocument();
});
it('shows allowed types hint when provided', () => {
render(<DragDropUpload {...defaultProps} allowedTypes={['.zip', '.tar.gz']} allowAllTypes={false} />);
expect(screen.getByText(/\.zip, \.tar\.gz/i)).toBeInTheDocument();
});
});
describe('Click to Browse', () => {
it('opens file picker when drop zone is clicked', async () => {
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const clickSpy = vi.spyOn(input, 'click');
const dropZone = screen.getByRole('button');
await userEvent.click(dropZone);
expect(clickSpy).toHaveBeenCalled();
});
it('opens file picker on Enter key', () => {
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const clickSpy = vi.spyOn(input, 'click');
const dropZone = screen.getByRole('button');
fireEvent.keyDown(dropZone, { key: 'Enter' });
expect(clickSpy).toHaveBeenCalled();
});
});
describe('Drag and Drop Events', () => {
it('shows visual feedback on drag over', () => {
render(<DragDropUpload {...defaultProps} />);
const dropZone = screen.getByRole('button');
fireEvent.dragEnter(dropZone, {
dataTransfer: { items: [{}] },
});
expect(dropZone).toHaveClass('drop-zone--active');
});
it('removes visual feedback on drag leave', () => {
render(<DragDropUpload {...defaultProps} />);
const dropZone = screen.getByRole('button');
fireEvent.dragEnter(dropZone, { dataTransfer: { items: [{}] } });
expect(dropZone).toHaveClass('drop-zone--active');
fireEvent.dragLeave(dropZone);
expect(dropZone).not.toHaveClass('drop-zone--active');
});
it('accepts dropped files', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const dropZone = screen.getByRole('button');
const file = createMockFile('test.txt', 100, 'text/plain');
const dataTransfer = new DataTransfer();
Object.defineProperty(dataTransfer, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.drop(dropZone, { dataTransfer });
await waitFor(() => {
expect(screen.getByText('test.txt')).toBeInTheDocument();
});
});
});
describe('File Validation', () => {
it('rejects files exceeding max size', async () => {
render(<DragDropUpload {...defaultProps} maxFileSize={100} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('large.txt', 200, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText(/exceeds.*limit/i)).toBeInTheDocument();
});
});
it('rejects files with invalid type when allowAllTypes is false', async () => {
render(<DragDropUpload {...defaultProps} allowedTypes={['.zip']} allowAllTypes={false} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText(/not allowed/i)).toBeInTheDocument();
});
});
it('rejects empty files', async () => {
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('empty.txt', 0, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText(/empty file/i)).toBeInTheDocument();
});
});
it('accepts valid files when allowAllTypes is true', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} allowAllTypes={true} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText('test.txt')).toBeInTheDocument();
expect(screen.queryByText(/not allowed/i)).not.toBeInTheDocument();
});
});
});
describe('Upload Queue', () => {
it('shows file in queue after selection', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('document.pdf', 1024, 'application/pdf');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText('document.pdf')).toBeInTheDocument();
expect(screen.getByText('1 KB')).toBeInTheDocument();
});
});
it('handles multiple files', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const files = [
createMockFile('file1.txt', 100, 'text/plain'),
createMockFile('file2.txt', 200, 'text/plain'),
createMockFile('file3.txt', 300, 'text/plain'),
];
Object.defineProperty(input, 'files', {
value: Object.assign(files, { item: (i: number) => files[i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText('file1.txt')).toBeInTheDocument();
expect(screen.getByText('file2.txt')).toBeInTheDocument();
expect(screen.getByText('file3.txt')).toBeInTheDocument();
});
});
it('shows overall progress for multiple files', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const files = [
createMockFile('file1.txt', 100, 'text/plain'),
createMockFile('file2.txt', 100, 'text/plain'),
];
Object.defineProperty(input, 'files', {
value: Object.assign(files, { item: (i: number) => files[i] }),
});
fireEvent.change(input);
await waitFor(() => {
expect(screen.getByText(/uploading.*of.*files/i)).toBeInTheDocument();
});
});
});
describe('Upload Progress', () => {
it('shows progress bar during upload', async () => {
const MockXHR = createMockXHR({
progressEvents: [
{ loaded: 50, total: 100 },
],
});
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await waitFor(() => {
const progressBar = document.querySelector('.progress-bar__fill');
expect(progressBar).toBeInTheDocument();
});
});
});
describe('Upload Completion', () => {
it('shows success state when upload completes', async () => {
const MockXHR = createMockXHR({
response: { artifact_id: 'abc123def456', size: 100 },
});
vi.stubGlobal('XMLHttpRequest', MockXHR);
const onComplete = vi.fn();
render(<DragDropUpload {...defaultProps} onUploadComplete={onComplete} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(screen.getByText(/abc123def456/i)).toBeInTheDocument();
});
});
it('calls onUploadComplete callback with results', async () => {
const MockXHR = createMockXHR({
response: { artifact_id: 'test-artifact-id', size: 100 },
});
vi.stubGlobal('XMLHttpRequest', MockXHR);
const onComplete = vi.fn();
render(<DragDropUpload {...defaultProps} onUploadComplete={onComplete} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(onComplete).toHaveBeenCalledWith([
expect.objectContaining({ artifact_id: 'test-artifact-id' }),
]);
});
});
});
describe('Upload Errors', () => {
it('shows error state when upload fails after retries exhausted', async () => {
const MockXHR = createMockXHR({
status: 500,
response: { detail: 'Server error' },
shouldError: true,
});
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} maxRetries={0} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(screen.getByText(/network error/i)).toBeInTheDocument();
});
});
it('calls onUploadError callback when retries exhausted', async () => {
const MockXHR = createMockXHR({ shouldError: true });
vi.stubGlobal('XMLHttpRequest', MockXHR);
const onError = vi.fn();
render(<DragDropUpload {...defaultProps} maxRetries={0} onUploadError={onError} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(onError).toHaveBeenCalled();
});
});
});
describe('Queue Actions', () => {
it('removes item from queue when remove button clicked', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(screen.getByText('test.txt')).toBeInTheDocument();
});
const removeButton = screen.getByTitle('Remove');
fireEvent.click(removeButton);
await waitFor(() => {
expect(screen.queryByText('test.txt')).not.toBeInTheDocument();
});
});
it('clears completed items when clear button clicked', async () => {
const MockXHR = createMockXHR();
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
const clearButton = screen.queryByText(/clear finished/i);
if (clearButton) {
fireEvent.click(clearButton);
}
});
});
});
describe('Tag Support', () => {
it('includes tag in upload request', async () => {
let capturedFormData: FormData | null = null;
class MockXHR {
status = 200;
responseText = JSON.stringify({ artifact_id: 'abc123', size: 100 });
timeout = 0;
upload = { addEventListener: vi.fn() };
addEventListener = vi.fn((event: string, handler: () => void) => {
if (event === 'load') setTimeout(handler, 10);
});
open = vi.fn();
send = vi.fn((data: FormData) => {
capturedFormData = data;
});
}
vi.stubGlobal('XMLHttpRequest', MockXHR);
render(<DragDropUpload {...defaultProps} tag="v1.0.0" />);
const input = document.querySelector('input[type="file"]') as HTMLInputElement;
const file = createMockFile('test.txt', 100, 'text/plain');
Object.defineProperty(input, 'files', {
value: Object.assign([file], { item: (i: number) => [file][i] }),
});
fireEvent.change(input);
await vi.advanceTimersByTimeAsync(100);
await waitFor(() => {
expect(capturedFormData?.get('tag')).toBe('v1.0.0');
});
});
});
});

View File

@@ -0,0 +1,912 @@
import { useState, useRef, useCallback, useEffect } from 'react';
import './DragDropUpload.css';
const CHUNK_SIZE = 10 * 1024 * 1024;
const CHUNKED_UPLOAD_THRESHOLD = 100 * 1024 * 1024;
const UPLOAD_STATE_PREFIX = 'orchard_upload_';
interface StoredUploadState {
uploadId: string;
fileHash: string;
filename: string;
fileSize: number;
completedParts: number[];
project: string;
package: string;
tag?: string;
createdAt: number;
}
function getUploadStateKey(project: string, pkg: string, fileHash: string): string {
return `${UPLOAD_STATE_PREFIX}${project}_${pkg}_${fileHash}`;
}
function saveUploadState(state: StoredUploadState): void {
try {
const key = getUploadStateKey(state.project, state.package, state.fileHash);
localStorage.setItem(key, JSON.stringify(state));
} catch {
// localStorage might be full or unavailable
}
}
function loadUploadState(project: string, pkg: string, fileHash: string): StoredUploadState | null {
try {
const key = getUploadStateKey(project, pkg, fileHash);
const stored = localStorage.getItem(key);
if (!stored) return null;
const state = JSON.parse(stored) as StoredUploadState;
const oneDay = 24 * 60 * 60 * 1000;
if (Date.now() - state.createdAt > oneDay) {
localStorage.removeItem(key);
return null;
}
return state;
} catch {
return null;
}
}
function clearUploadState(project: string, pkg: string, fileHash: string): void {
try {
const key = getUploadStateKey(project, pkg, fileHash);
localStorage.removeItem(key);
} catch {
// ignore
}
}
// Types
export type UploadStatus = 'pending' | 'uploading' | 'complete' | 'failed' | 'validating' | 'paused';
export interface UploadItem {
id: string;
file: File;
status: UploadStatus;
progress: number;
speed: number; // bytes per second
error?: string;
artifactId?: string;
retryCount: number;
startTime?: number;
}
export interface UploadResult {
artifact_id: string;
size: number;
deduplicated?: boolean;
}
export interface DragDropUploadProps {
projectName: string;
packageName: string;
onUploadComplete?: (results: UploadResult[]) => void;
onUploadError?: (error: string) => void;
allowedTypes?: string[]; // e.g., ['.tar.gz', '.zip', '.deb']
allowAllTypes?: boolean;
maxFileSize?: number; // in bytes
maxConcurrentUploads?: number;
maxRetries?: number;
tag?: string;
className?: string;
disabled?: boolean;
disabledReason?: string;
}
// Utility functions
function generateId(): string {
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
}
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
}
function formatSpeed(bytesPerSecond: number): string {
return `${formatBytes(bytesPerSecond)}/s`;
}
function formatTimeRemaining(seconds: number): string {
if (!isFinite(seconds) || seconds < 0) return '--:--';
if (seconds < 60) return `${Math.round(seconds)}s`;
if (seconds < 3600) return `${Math.floor(seconds / 60)}m ${Math.round(seconds % 60)}s`;
return `${Math.floor(seconds / 3600)}h ${Math.floor((seconds % 3600) / 60)}m`;
}
function getFileExtension(filename: string): string {
const parts = filename.toLowerCase().split('.');
if (parts.length >= 3 && parts[parts.length - 2] === 'tar') {
return `.${parts.slice(-2).join('.')}`;
}
return parts.length > 1 ? `.${parts[parts.length - 1]}` : '';
}
async function computeSHA256(file: File): Promise<string> {
const buffer = await file.arrayBuffer();
const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
}
// Icons
function UploadIcon() {
return (
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4" />
<polyline points="17 8 12 3 7 8" />
<line x1="12" y1="3" x2="12" y2="15" />
</svg>
);
}
function CheckIcon() {
return (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<polyline points="20 6 9 17 4 12" />
</svg>
);
}
function ErrorIcon() {
return (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10" />
<line x1="15" y1="9" x2="9" y2="15" />
<line x1="9" y1="9" x2="15" y2="15" />
</svg>
);
}
function RetryIcon() {
return (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<polyline points="23 4 23 10 17 10" />
<path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10" />
</svg>
);
}
function RemoveIcon() {
return (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18" />
<line x1="6" y1="6" x2="18" y2="18" />
</svg>
);
}
function FileIcon() {
return (
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z" />
<polyline points="14 2 14 8 20 8" />
</svg>
);
}
function PauseIcon() {
return (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<rect x="6" y="4" width="4" height="16" />
<rect x="14" y="4" width="4" height="16" />
</svg>
);
}
function WifiOffIcon() {
return (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="1" y1="1" x2="23" y2="23" />
<path d="M16.72 11.06A10.94 10.94 0 0 1 19 12.55" />
<path d="M5 12.55a10.94 10.94 0 0 1 5.17-2.39" />
<path d="M10.71 5.05A16 16 0 0 1 22.58 9" />
<path d="M1.42 9a15.91 15.91 0 0 1 4.7-2.88" />
<path d="M8.53 16.11a6 6 0 0 1 6.95 0" />
<line x1="12" y1="20" x2="12.01" y2="20" />
</svg>
);
}
function SpinnerIcon() {
return (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" className="spinner-icon">
<circle cx="12" cy="12" r="10" strokeOpacity="0.25" />
<path d="M12 2a10 10 0 0 1 10 10" />
</svg>
);
}
export function DragDropUpload({
projectName,
packageName,
onUploadComplete,
onUploadError,
allowedTypes,
allowAllTypes = true,
maxFileSize,
maxConcurrentUploads = 3,
maxRetries = 3,
tag,
className = '',
disabled = false,
disabledReason,
}: DragDropUploadProps) {
const [isDragOver, setIsDragOver] = useState(false);
const [uploadQueue, setUploadQueue] = useState<UploadItem[]>([]);
const [isOnline, setIsOnline] = useState(navigator.onLine);
const fileInputRef = useRef<HTMLInputElement>(null);
const dragCounterRef = useRef(0);
const activeUploadsRef = useRef(0);
const xhrMapRef = useRef<Map<string, XMLHttpRequest>>(new Map());
// Online/Offline detection
useEffect(() => {
const handleOnline = () => {
setIsOnline(true);
// Resume paused uploads
setUploadQueue(prev => prev.map(item =>
item.status === 'paused'
? { ...item, status: 'pending' as UploadStatus, error: undefined }
: item
));
};
const handleOffline = () => {
setIsOnline(false);
// Pause uploading items and cancel their XHR requests
setUploadQueue(prev => prev.map(item => {
if (item.status === 'uploading') {
// Abort the XHR request
const xhr = xhrMapRef.current.get(item.id);
if (xhr) {
xhr.abort();
xhrMapRef.current.delete(item.id);
}
return { ...item, status: 'paused' as UploadStatus, error: 'Network offline - will resume when connection is restored', progress: 0 };
}
if (item.status === 'pending') {
return { ...item, status: 'paused' as UploadStatus, error: 'Network offline - waiting for connection' };
}
return item;
}));
};
window.addEventListener('online', handleOnline);
window.addEventListener('offline', handleOffline);
return () => {
window.removeEventListener('online', handleOnline);
window.removeEventListener('offline', handleOffline);
};
}, []);
// Validate a single file
const validateFile = useCallback((file: File): string | null => {
// Check file size
if (maxFileSize && file.size > maxFileSize) {
return `File exceeds ${formatBytes(maxFileSize)} limit`;
}
// Check file type if not allowing all types
if (!allowAllTypes && allowedTypes && allowedTypes.length > 0) {
const ext = getFileExtension(file.name);
if (!allowedTypes.some(t => t.toLowerCase() === ext)) {
return `File type ${ext || 'unknown'} not allowed. Accepted: ${allowedTypes.join(', ')}`;
}
}
// Check for empty file
if (file.size === 0) {
return 'Cannot upload empty file';
}
return null;
}, [allowedTypes, allowAllTypes, maxFileSize]);
// Add files to queue
const addFiles = useCallback((files: FileList | File[]) => {
const newItems: UploadItem[] = Array.from(files).map(file => {
const validationError = validateFile(file);
return {
id: generateId(),
file,
status: validationError ? 'failed' : 'pending',
progress: 0,
speed: 0,
error: validationError || undefined,
retryCount: 0,
};
});
setUploadQueue(prev => [...prev, ...newItems]);
}, [validateFile]);
const uploadFileChunked = useCallback(async (item: UploadItem): Promise<UploadResult> => {
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'validating' as UploadStatus, startTime: Date.now() }
: u
));
const fileHash = await computeSHA256(item.file);
const storedState = loadUploadState(projectName, packageName, fileHash);
let uploadId: string;
let completedParts: number[] = [];
if (storedState && storedState.fileSize === item.file.size && storedState.filename === item.file.name) {
try {
const statusResponse = await fetch(
`/api/v1/project/${projectName}/${packageName}/upload/${storedState.uploadId}/status`
);
if (statusResponse.ok) {
const statusData = await statusResponse.json();
uploadId = storedState.uploadId;
completedParts = statusData.uploaded_parts || [];
} else {
throw new Error('Stored upload no longer valid');
}
} catch {
clearUploadState(projectName, packageName, fileHash);
uploadId = await initNewUpload();
}
} else {
uploadId = await initNewUpload();
}
async function initNewUpload(): Promise<string> {
const initResponse = await fetch(
`/api/v1/project/${projectName}/${packageName}/upload/init`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
expected_hash: fileHash,
filename: item.file.name,
size: item.file.size,
tag: tag || undefined,
}),
}
);
if (!initResponse.ok) {
const error = await initResponse.json().catch(() => ({}));
throw new Error(error.detail || `Init failed: ${initResponse.status}`);
}
const initData = await initResponse.json();
if (initData.already_exists) {
throw { deduplicated: true, artifact_id: initData.artifact_id };
}
saveUploadState({
uploadId: initData.upload_id,
fileHash,
filename: item.file.name,
fileSize: item.file.size,
completedParts: [],
project: projectName,
package: packageName,
tag: tag || undefined,
createdAt: Date.now(),
});
return initData.upload_id;
}
const totalChunks = Math.ceil(item.file.size / CHUNK_SIZE);
let uploadedBytes = completedParts.length * CHUNK_SIZE;
if (uploadedBytes > item.file.size) uploadedBytes = item.file.size - (item.file.size % CHUNK_SIZE);
const startTime = Date.now();
for (let partNumber = 1; partNumber <= totalChunks; partNumber++) {
if (completedParts.includes(partNumber)) {
continue;
}
if (!isOnline) {
throw new Error('Network offline');
}
const start = (partNumber - 1) * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, item.file.size);
const chunk = item.file.slice(start, end);
const partResponse = await fetch(
`/api/v1/project/${projectName}/${packageName}/upload/${uploadId}/part/${partNumber}`,
{
method: 'PUT',
body: chunk,
}
);
if (!partResponse.ok) {
throw new Error(`Part ${partNumber} upload failed: ${partResponse.status}`);
}
completedParts.push(partNumber);
saveUploadState({
uploadId,
fileHash,
filename: item.file.name,
fileSize: item.file.size,
completedParts,
project: projectName,
package: packageName,
tag: tag || undefined,
createdAt: Date.now(),
});
uploadedBytes += chunk.size;
const elapsed = (Date.now() - startTime) / 1000;
const speed = elapsed > 0 ? uploadedBytes / elapsed : 0;
const progress = Math.round((uploadedBytes / item.file.size) * 100);
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, progress, speed, status: 'uploading' as UploadStatus }
: u
));
}
const completeResponse = await fetch(
`/api/v1/project/${projectName}/${packageName}/upload/${uploadId}/complete`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ tag: tag || undefined }),
}
);
if (!completeResponse.ok) {
throw new Error(`Complete failed: ${completeResponse.status}`);
}
clearUploadState(projectName, packageName, fileHash);
const completeData = await completeResponse.json();
return {
artifact_id: completeData.artifact_id,
size: completeData.size,
deduplicated: false,
};
}, [projectName, packageName, tag, isOnline]);
const uploadFileSimple = useCallback((item: UploadItem): Promise<UploadResult> => {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhrMapRef.current.set(item.id, xhr);
const formData = new FormData();
formData.append('file', item.file);
if (tag) {
formData.append('tag', tag);
}
let lastLoaded = 0;
let lastTime = Date.now();
xhr.upload.addEventListener('progress', (e) => {
if (e.lengthComputable) {
const now = Date.now();
const timeDiff = (now - lastTime) / 1000;
const loadedDiff = e.loaded - lastLoaded;
const speed = timeDiff > 0 ? loadedDiff / timeDiff : 0;
const progress = Math.round((e.loaded / e.total) * 100);
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, progress, speed, status: 'uploading' as UploadStatus }
: u
));
lastLoaded = e.loaded;
lastTime = now;
}
});
xhr.addEventListener('load', () => {
xhrMapRef.current.delete(item.id);
if (xhr.status >= 200 && xhr.status < 300) {
try {
const result = JSON.parse(xhr.responseText) as UploadResult;
resolve(result);
} catch {
reject(new Error('Invalid response from server'));
}
} else {
try {
const error = JSON.parse(xhr.responseText);
reject(new Error(error.detail || `Upload failed: ${xhr.status}`));
} catch {
reject(new Error(`Upload failed: ${xhr.status}`));
}
}
});
xhr.addEventListener('error', () => {
xhrMapRef.current.delete(item.id);
reject(new Error('Network error - check your connection'));
});
xhr.addEventListener('timeout', () => {
xhrMapRef.current.delete(item.id);
reject(new Error('Upload timed out'));
});
xhr.addEventListener('abort', () => {
xhrMapRef.current.delete(item.id);
reject(new Error('Upload cancelled'));
});
xhr.open('POST', `/api/v1/project/${projectName}/${packageName}/upload`);
xhr.timeout = 300000;
xhr.send(formData);
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'uploading' as UploadStatus, startTime: Date.now() }
: u
));
});
}, [projectName, packageName, tag]);
const uploadFile = useCallback((item: UploadItem): Promise<UploadResult> => {
if (item.file.size >= CHUNKED_UPLOAD_THRESHOLD) {
return uploadFileChunked(item);
}
return uploadFileSimple(item);
}, [uploadFileChunked, uploadFileSimple]);
const processQueue = useCallback(async () => {
if (!isOnline) return;
const pendingItems = uploadQueue.filter(item => item.status === 'pending');
for (const item of pendingItems) {
if (activeUploadsRef.current >= maxConcurrentUploads) {
break;
}
activeUploadsRef.current++;
// Start upload
setUploadQueue(prev => prev.map(u =>
u.id === item.id ? { ...u, status: 'uploading' as UploadStatus } : u
));
try {
const result = await uploadFile(item);
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'complete' as UploadStatus, progress: 100, artifactId: result.artifact_id }
: u
));
} catch (err: unknown) {
const dedupErr = err as { deduplicated?: boolean; artifact_id?: string };
if (dedupErr.deduplicated && dedupErr.artifact_id) {
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'complete' as UploadStatus, progress: 100, artifactId: dedupErr.artifact_id }
: u
));
} else {
const errorMessage = err instanceof Error ? err.message : 'Upload failed';
const shouldRetry = item.retryCount < maxRetries &&
(errorMessage.includes('Network') || errorMessage.includes('timeout'));
if (shouldRetry) {
const delay = Math.pow(2, item.retryCount) * 1000;
setTimeout(() => {
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'pending' as UploadStatus, retryCount: u.retryCount + 1, progress: 0 }
: u
));
}, delay);
} else {
setUploadQueue(prev => prev.map(u =>
u.id === item.id
? { ...u, status: 'failed' as UploadStatus, error: errorMessage }
: u
));
onUploadError?.(errorMessage);
}
}
} finally {
activeUploadsRef.current--;
}
}
}, [uploadQueue, maxConcurrentUploads, maxRetries, uploadFile, onUploadError, isOnline]);
useEffect(() => {
const hasPending = uploadQueue.some(item => item.status === 'pending');
if (hasPending && activeUploadsRef.current < maxConcurrentUploads && isOnline) {
processQueue();
}
const allComplete = uploadQueue.length > 0 &&
uploadQueue.every(item => item.status === 'complete' || item.status === 'failed');
if (allComplete) {
const completedResults = uploadQueue
.filter(item => item.status === 'complete' && item.artifactId)
.map(item => ({
artifact_id: item.artifactId!,
size: item.file.size,
}));
if (completedResults.length > 0) {
onUploadComplete?.(completedResults);
}
}
}, [uploadQueue, maxConcurrentUploads, processQueue, onUploadComplete, isOnline]);
// Drag event handlers
const handleDragEnter = useCallback((e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
if (disabled) return;
dragCounterRef.current++;
if (e.dataTransfer.items && e.dataTransfer.items.length > 0) {
setIsDragOver(true);
}
}, [disabled]);
const handleDragLeave = useCallback((e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
if (disabled) return;
dragCounterRef.current--;
if (dragCounterRef.current === 0) {
setIsDragOver(false);
}
}, [disabled]);
const handleDragOver = useCallback((e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
}, []);
const handleDrop = useCallback((e: React.DragEvent) => {
e.preventDefault();
e.stopPropagation();
setIsDragOver(false);
dragCounterRef.current = 0;
if (disabled) return;
const files = e.dataTransfer.files;
if (files && files.length > 0) {
addFiles(files);
}
}, [addFiles, disabled]);
// Click to browse
const handleClick = useCallback(() => {
if (disabled) return;
fileInputRef.current?.click();
}, [disabled]);
const handleFileChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
if (disabled) return;
const files = e.target.files;
if (files && files.length > 0) {
addFiles(files);
}
// Reset input so same file can be selected again
if (fileInputRef.current) {
fileInputRef.current.value = '';
}
}, [addFiles, disabled]);
// Remove item from queue
const removeItem = useCallback((id: string) => {
setUploadQueue(prev => prev.filter(item => item.id !== id));
}, []);
// Retry failed upload
const retryItem = useCallback((id: string) => {
setUploadQueue(prev => prev.map(item =>
item.id === id
? { ...item, status: 'pending' as UploadStatus, error: undefined, progress: 0, retryCount: 0 }
: item
));
}, []);
// Clear completed/failed items
const clearCompleted = useCallback(() => {
setUploadQueue(prev => prev.filter(item =>
item.status !== 'complete' && item.status !== 'failed'
));
}, []);
// Calculate overall progress
const overallProgress = uploadQueue.length > 0
? Math.round(uploadQueue.reduce((sum, item) => sum + item.progress, 0) / uploadQueue.length)
: 0;
const completedCount = uploadQueue.filter(item => item.status === 'complete').length;
const failedCount = uploadQueue.filter(item => item.status === 'failed').length;
const uploadingCount = uploadQueue.filter(item => item.status === 'uploading').length;
const pausedCount = uploadQueue.filter(item => item.status === 'paused').length;
return (
<div className={`drag-drop-upload ${className}`}>
{!isOnline && (
<div className="offline-banner">
<WifiOffIcon />
<span>You're offline. Uploads will resume when connection is restored.</span>
</div>
)}
<div
className={`drop-zone ${isDragOver ? 'drop-zone--active' : ''} ${disabled ? 'drop-zone--disabled' : ''}`}
onDragEnter={handleDragEnter}
onDragLeave={handleDragLeave}
onDragOver={handleDragOver}
onDrop={handleDrop}
onClick={handleClick}
role="button"
tabIndex={disabled ? -1 : 0}
onKeyDown={(e) => e.key === 'Enter' && handleClick()}
aria-disabled={disabled}
title={disabled ? disabledReason : undefined}
>
<input
ref={fileInputRef}
type="file"
multiple
onChange={handleFileChange}
className="drop-zone__input"
accept={!allowAllTypes && allowedTypes ? allowedTypes.join(',') : undefined}
disabled={disabled}
/>
<div className="drop-zone__content">
<UploadIcon />
<p className="drop-zone__text">
{disabled ? (
<span>{disabledReason || 'Upload disabled'}</span>
) : (
<><strong>Drag files here</strong> or click to browse</>
)}
</p>
{!disabled && (
<p className="drop-zone__hint">
{maxFileSize && `Max file size: ${formatBytes(maxFileSize)}`}
{!allowAllTypes && allowedTypes && ` • Accepted: ${allowedTypes.join(', ')}`}
</p>
)}
</div>
</div>
{/* Upload Queue */}
{uploadQueue.length > 0 && (
<div className="upload-queue">
<div className="upload-queue__header">
<span className="upload-queue__title">
{pausedCount > 0 && !isOnline
? `${pausedCount} uploads paused (offline)`
: uploadingCount > 0
? `Uploading ${uploadingCount} of ${uploadQueue.length} files`
: `${completedCount} of ${uploadQueue.length} files uploaded`
}
{failedCount > 0 && ` (${failedCount} failed)`}
</span>
{(completedCount > 0 || failedCount > 0) && (
<button
className="upload-queue__clear"
onClick={clearCompleted}
type="button"
>
Clear finished
</button>
)}
</div>
{/* Overall progress bar */}
{uploadingCount > 0 && (
<div className="upload-queue__overall">
<div className="progress-bar">
<div
className="progress-bar__fill"
style={{ width: `${overallProgress}%` }}
/>
</div>
<span className="progress-bar__text">{overallProgress}%</span>
</div>
)}
{/* Individual file items */}
<ul className="upload-queue__list">
{uploadQueue.map(item => (
<li key={item.id} className={`upload-item upload-item--${item.status}`}>
<div className="upload-item__icon">
{item.status === 'complete' ? <CheckIcon /> :
item.status === 'failed' ? <ErrorIcon /> :
item.status === 'paused' ? <PauseIcon /> :
item.status === 'validating' ? <SpinnerIcon /> :
<FileIcon />}
</div>
<div className="upload-item__info">
<div className="upload-item__name" title={item.file.name}>
{item.file.name}
</div>
<div className="upload-item__meta">
<span className="upload-item__size">{formatBytes(item.file.size)}</span>
{item.status === 'uploading' && item.speed > 0 && (
<>
<span className="upload-item__speed">{formatSpeed(item.speed)}</span>
{item.startTime && (
<span className="upload-item__eta">
{formatTimeRemaining(
(item.file.size - (item.file.size * item.progress / 100)) / item.speed
)} remaining
</span>
)}
</>
)}
{item.status === 'complete' && item.artifactId && (
<span className="upload-item__artifact">
ID: {item.artifactId.substring(0, 12)}...
</span>
)}
{item.error && (
<span className="upload-item__error">{item.error}</span>
)}
{item.retryCount > 0 && item.status === 'uploading' && (
<span className="upload-item__retry-count">Retry {item.retryCount}</span>
)}
{item.status === 'validating' && (
<span className="upload-item__validating">Computing hash...</span>
)}
</div>
{item.status === 'uploading' && (
<div className="progress-bar progress-bar--small">
<div
className="progress-bar__fill"
style={{ width: `${item.progress}%` }}
/>
</div>
)}
</div>
<div className="upload-item__actions">
{(item.status === 'failed' || (item.status === 'paused' && isOnline)) && (
<button
className="upload-item__btn upload-item__btn--retry"
onClick={() => retryItem(item.id)}
title="Retry upload"
type="button"
>
<RetryIcon />
</button>
)}
{(item.status === 'complete' || item.status === 'failed' || item.status === 'pending' || item.status === 'paused') && (
<button
className="upload-item__btn upload-item__btn--remove"
onClick={() => removeItem(item.id)}
title="Remove"
type="button"
>
<RemoveIcon />
</button>
)}
</div>
</li>
))}
</ul>
</div>
)}
</div>
);
}

View File

@@ -98,6 +98,170 @@
opacity: 0.7;
}
/* Login link */
.nav-login {
display: flex;
align-items: center;
gap: 8px;
padding: 8px 16px;
color: var(--text-primary);
font-size: 0.875rem;
font-weight: 500;
border-radius: var(--radius-md);
transition: all var(--transition-fast);
margin-left: 8px;
border: 1px solid var(--border-primary);
}
.nav-login:hover {
color: var(--text-primary);
background: var(--bg-hover);
border-color: var(--border-secondary);
}
/* User Menu */
.user-menu {
position: relative;
margin-left: 8px;
}
.user-menu-trigger {
display: flex;
align-items: center;
gap: 8px;
padding: 6px 12px;
background: transparent;
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
color: var(--text-primary);
font-size: 0.875rem;
font-weight: 500;
cursor: pointer;
transition: all var(--transition-fast);
}
.user-menu-trigger:hover {
background: var(--bg-hover);
border-color: var(--border-secondary);
}
.user-avatar {
width: 28px;
height: 28px;
display: flex;
align-items: center;
justify-content: center;
background: var(--accent-gradient);
border-radius: var(--radius-sm);
color: white;
font-weight: 600;
font-size: 0.8125rem;
}
.user-name {
max-width: 120px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.user-menu-dropdown {
position: absolute;
top: 100%;
right: 0;
margin-top: 8px;
min-width: 200px;
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
box-shadow: var(--shadow-lg);
z-index: 200;
overflow: hidden;
}
.user-menu-header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 12px 16px;
}
.user-menu-username {
font-size: 0.875rem;
font-weight: 500;
color: var(--text-primary);
}
.user-menu-badge {
padding: 2px 8px;
background: var(--accent-gradient);
border-radius: 100px;
font-size: 0.6875rem;
font-weight: 600;
color: white;
text-transform: uppercase;
letter-spacing: 0.02em;
}
.user-menu-divider {
height: 1px;
background: var(--border-primary);
}
.user-menu-item {
display: flex;
align-items: center;
gap: 10px;
width: 100%;
padding: 12px 16px;
background: transparent;
border: none;
color: var(--text-secondary);
font-size: 0.875rem;
cursor: pointer;
transition: all var(--transition-fast);
text-align: left;
text-decoration: none;
}
.user-menu-item:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
.user-menu-item svg {
opacity: 0.7;
}
.user-menu-item:hover svg {
opacity: 1;
}
/* User menu loading state */
.user-menu-loading {
display: flex;
align-items: center;
justify-content: center;
width: 40px;
height: 40px;
margin-left: 8px;
}
.user-menu-spinner {
width: 16px;
height: 16px;
border: 2px solid var(--border-secondary);
border-top-color: var(--accent-primary);
border-radius: 50%;
animation: user-menu-spin 0.6s linear infinite;
}
@keyframes user-menu-spin {
to {
transform: rotate(360deg);
}
}
/* Main content */
.main {
flex: 1;

View File

@@ -1,5 +1,6 @@
import { ReactNode } from 'react';
import { Link, useLocation } from 'react-router-dom';
import { ReactNode, useState, useRef, useEffect } from 'react';
import { Link, NavLink, useLocation, useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { GlobalSearch } from './GlobalSearch';
import './Layout.css';
@@ -9,6 +10,31 @@ interface LayoutProps {
function Layout({ children }: LayoutProps) {
const location = useLocation();
const navigate = useNavigate();
const { user, loading, logout } = useAuth();
const [showUserMenu, setShowUserMenu] = useState(false);
const menuRef = useRef<HTMLDivElement>(null);
// Close menu when clicking outside
useEffect(() => {
function handleClickOutside(event: MouseEvent) {
if (menuRef.current && !menuRef.current.contains(event.target as Node)) {
setShowUserMenu(false);
}
}
document.addEventListener('mousedown', handleClickOutside);
return () => document.removeEventListener('mousedown', handleClickOutside);
}, []);
async function handleLogout() {
try {
await logout();
setShowUserMenu(false);
navigate('/');
} catch {
// Error handled in context
}
}
return (
<div className="layout">
@@ -60,6 +86,97 @@ function Layout({ children }: LayoutProps) {
</svg>
Docs
</a>
{/* User Menu */}
{loading ? (
<div className="user-menu-loading">
<div className="user-menu-spinner"></div>
</div>
) : user ? (
<div className="user-menu" ref={menuRef}>
<button
className="user-menu-trigger"
onClick={() => setShowUserMenu(!showUserMenu)}
aria-expanded={showUserMenu}
aria-haspopup="true"
>
<div className="user-avatar">
{user.username.charAt(0).toUpperCase()}
</div>
<span className="user-name">{user.display_name || user.username}</span>
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<polyline points="6 9 12 15 18 9"/>
</svg>
</button>
{showUserMenu && (
<div className="user-menu-dropdown">
<div className="user-menu-header">
<span className="user-menu-username">{user.username}</span>
{user.is_admin && (
<span className="user-menu-badge">Admin</span>
)}
</div>
<div className="user-menu-divider"></div>
<NavLink
to="/settings/api-keys"
className="user-menu-item"
onClick={() => setShowUserMenu(false)}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"/>
</svg>
API Keys
</NavLink>
{user.is_admin && (
<>
<NavLink
to="/admin/users"
className="user-menu-item"
onClick={() => setShowUserMenu(false)}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
<circle cx="9" cy="7" r="4"/>
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
</svg>
User Management
</NavLink>
<NavLink
to="/admin/oidc"
className="user-menu-item"
onClick={() => setShowUserMenu(false)}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
</svg>
SSO Configuration
</NavLink>
</>
)}
<div className="user-menu-divider"></div>
<button className="user-menu-item" onClick={handleLogout}>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M9 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h4"/>
<polyline points="16 17 21 12 16 7"/>
<line x1="21" y1="12" x2="9" y2="12"/>
</svg>
Sign out
</button>
</div>
)}
</div>
) : (
<Link to="/login" className="nav-login">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
<polyline points="10 17 15 12 10 7"/>
<line x1="15" y1="12" x2="3" y2="12"/>
</svg>
Login
</Link>
)}
</nav>
</div>
</header>

View File

@@ -10,3 +10,5 @@ export { FilterChip, FilterChipGroup } from './FilterChip';
export { DataTable } from './DataTable';
export { Pagination } from './Pagination';
export { GlobalSearch } from './GlobalSearch';
export { DragDropUpload } from './DragDropUpload';
export type { DragDropUploadProps, UploadItem, UploadResult, UploadStatus } from './DragDropUpload';

View File

@@ -0,0 +1,166 @@
import { createContext, useContext, useState, useEffect, useCallback, useRef, ReactNode } from 'react';
import { User, AccessLevel } from '../types';
import { getCurrentUser, login as apiLogin, logout as apiLogout, getMyProjectAccess } from '../api';
interface PermissionCacheEntry {
accessLevel: AccessLevel | null;
timestamp: number;
}
interface AuthContextType {
user: User | null;
loading: boolean;
error: string | null;
login: (username: string, password: string) => Promise<void>;
logout: () => Promise<void>;
refreshUser: () => Promise<void>;
clearError: () => void;
getProjectPermission: (projectName: string) => Promise<AccessLevel | null>;
invalidatePermissionCache: (projectName?: string) => void;
}
const AuthContext = createContext<AuthContextType | undefined>(undefined);
interface AuthProviderProps {
children: ReactNode;
}
// Cache TTL in milliseconds (5 minutes)
const PERMISSION_CACHE_TTL = 5 * 60 * 1000;
export function AuthProvider({ children }: AuthProviderProps) {
const [user, setUser] = useState<User | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const permissionCacheRef = useRef<Map<string, PermissionCacheEntry>>(new Map());
// Clear permission cache
const clearPermissionCache = useCallback(() => {
permissionCacheRef.current.clear();
}, []);
// Check session on initial load
useEffect(() => {
async function checkAuth() {
try {
const currentUser = await getCurrentUser();
setUser(currentUser);
} catch {
setUser(null);
} finally {
setLoading(false);
}
}
checkAuth();
}, []);
const login = useCallback(async (username: string, password: string) => {
setLoading(true);
setError(null);
try {
const loggedInUser = await apiLogin({ username, password });
setUser(loggedInUser);
// Clear permission cache on login - permissions may have changed
clearPermissionCache();
} catch (err) {
const message = err instanceof Error ? err.message : 'Login failed';
setError(message);
throw err;
} finally {
setLoading(false);
}
}, [clearPermissionCache]);
const logout = useCallback(async () => {
setLoading(true);
setError(null);
try {
await apiLogout();
setUser(null);
// Clear permission cache on logout
clearPermissionCache();
} catch (err) {
const message = err instanceof Error ? err.message : 'Logout failed';
setError(message);
throw err;
} finally {
setLoading(false);
}
}, [clearPermissionCache]);
const clearError = useCallback(() => {
setError(null);
}, []);
const refreshUser = useCallback(async () => {
try {
const currentUser = await getCurrentUser();
setUser(currentUser);
} catch {
setUser(null);
}
}, []);
// Get project permission with caching
const getProjectPermission = useCallback(async (projectName: string): Promise<AccessLevel | null> => {
const cached = permissionCacheRef.current.get(projectName);
const now = Date.now();
// Return cached value if still valid
if (cached && (now - cached.timestamp) < PERMISSION_CACHE_TTL) {
return cached.accessLevel;
}
// Fetch fresh permission
try {
const result = await getMyProjectAccess(projectName);
const entry: PermissionCacheEntry = {
accessLevel: result.access_level,
timestamp: now,
};
permissionCacheRef.current.set(projectName, entry);
return result.access_level;
} catch {
// On error, cache null to avoid repeated failed requests
const entry: PermissionCacheEntry = {
accessLevel: null,
timestamp: now,
};
permissionCacheRef.current.set(projectName, entry);
return null;
}
}, []);
// Invalidate permission cache for a specific project or all projects
const invalidatePermissionCache = useCallback((projectName?: string) => {
if (projectName) {
permissionCacheRef.current.delete(projectName);
} else {
clearPermissionCache();
}
}, [clearPermissionCache]);
return (
<AuthContext.Provider value={{
user,
loading,
error,
login,
logout,
refreshUser,
clearError,
getProjectPermission,
invalidatePermissionCache,
}}>
{children}
</AuthContext.Provider>
);
}
export function useAuth() {
const context = useContext(AuthContext);
if (context === undefined) {
throw new Error('useAuth must be used within an AuthProvider');
}
return context;
}

View File

@@ -0,0 +1,580 @@
.api-keys-page {
max-width: 900px;
margin: 0 auto;
}
.api-keys-header {
display: flex;
justify-content: space-between;
align-items: flex-start;
margin-bottom: 32px;
gap: 24px;
}
.api-keys-header-content h1 {
font-size: 1.75rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
letter-spacing: -0.02em;
}
.api-keys-subtitle {
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.api-keys-create-button {
display: flex;
align-items: center;
gap: 8px;
padding: 12px 20px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
flex-shrink: 0;
}
.api-keys-create-button:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
}
.api-keys-create-button:disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
}
.api-keys-error {
display: flex;
align-items: center;
gap: 10px;
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
}
.api-keys-error svg {
flex-shrink: 0;
}
.api-keys-error span {
flex: 1;
}
.api-keys-error-dismiss {
background: transparent;
border: none;
padding: 4px;
color: var(--error);
cursor: pointer;
opacity: 0.7;
transition: opacity var(--transition-fast);
}
.api-keys-error-dismiss:hover {
opacity: 1;
}
.api-keys-new-key-banner {
background: linear-gradient(135deg, rgba(16, 185, 129, 0.12) 0%, rgba(5, 150, 105, 0.08) 100%);
border: 1px solid rgba(16, 185, 129, 0.3);
border-radius: var(--radius-lg);
padding: 24px;
margin-bottom: 24px;
}
.api-keys-new-key-header {
display: flex;
align-items: center;
gap: 10px;
margin-bottom: 12px;
color: var(--accent-primary);
}
.api-keys-new-key-title {
font-size: 1rem;
font-weight: 600;
}
.api-keys-new-key-warning {
background: var(--warning-bg);
border: 1px solid rgba(245, 158, 11, 0.3);
color: var(--warning);
padding: 10px 14px;
border-radius: var(--radius-md);
font-size: 0.8125rem;
font-weight: 500;
margin-bottom: 16px;
}
.api-keys-new-key-value-container {
display: flex;
align-items: center;
gap: 12px;
margin-bottom: 16px;
}
.api-keys-new-key-value {
flex: 1;
background: var(--bg-primary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
padding: 14px 16px;
font-family: 'JetBrains Mono', 'Fira Code', 'SF Mono', Monaco, monospace;
font-size: 0.8125rem;
color: var(--text-primary);
word-break: break-all;
line-height: 1.5;
}
.api-keys-copy-button {
display: flex;
align-items: center;
gap: 6px;
padding: 10px 16px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
color: var(--text-secondary);
font-size: 0.8125rem;
font-weight: 500;
cursor: pointer;
transition: all var(--transition-fast);
flex-shrink: 0;
}
.api-keys-copy-button:hover {
background: var(--bg-hover);
border-color: var(--border-secondary);
color: var(--text-primary);
}
.api-keys-done-button {
padding: 10px 20px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
}
.api-keys-done-button:hover {
transform: translateY(-1px);
box-shadow: var(--shadow-sm);
}
.api-keys-create-form-card {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
padding: 24px;
margin-bottom: 24px;
}
.api-keys-create-form-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 20px;
}
.api-keys-create-form-header h2 {
font-size: 1.125rem;
font-weight: 600;
color: var(--text-primary);
}
.api-keys-create-form-close {
background: transparent;
border: none;
padding: 4px;
color: var(--text-tertiary);
cursor: pointer;
border-radius: var(--radius-sm);
transition: all var(--transition-fast);
}
.api-keys-create-form-close:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
.api-keys-create-error {
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 10px 14px;
border-radius: var(--radius-md);
font-size: 0.8125rem;
margin-bottom: 16px;
}
.api-keys-create-form {
display: flex;
flex-direction: column;
gap: 16px;
}
.api-keys-form-group {
display: flex;
flex-direction: column;
gap: 6px;
}
.api-keys-form-group label {
font-size: 0.8125rem;
font-weight: 500;
color: var(--text-secondary);
}
.api-keys-form-group input {
padding: 12px 14px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
color: var(--text-primary);
transition: all var(--transition-fast);
}
.api-keys-form-group input::placeholder {
color: var(--text-muted);
}
.api-keys-form-group input:hover:not(:disabled) {
border-color: var(--border-secondary);
background: var(--bg-elevated);
}
.api-keys-form-group input:focus {
outline: none;
border-color: var(--accent-primary);
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
background: var(--bg-elevated);
}
.api-keys-form-group input:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.api-keys-form-actions {
display: flex;
justify-content: flex-end;
gap: 12px;
margin-top: 8px;
}
.api-keys-cancel-button {
padding: 10px 18px;
background: transparent;
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
}
.api-keys-cancel-button:hover:not(:disabled) {
background: var(--bg-hover);
border-color: var(--border-secondary);
color: var(--text-primary);
}
.api-keys-cancel-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.api-keys-submit-button {
display: flex;
align-items: center;
justify-content: center;
gap: 8px;
padding: 10px 18px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
min-width: 110px;
}
.api-keys-submit-button:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
}
.api-keys-submit-button:disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
}
.api-keys-button-spinner {
width: 14px;
height: 14px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-top-color: white;
border-radius: 50%;
animation: api-keys-spin 0.6s linear infinite;
}
@keyframes api-keys-spin {
to {
transform: rotate(360deg);
}
}
.api-keys-list-container {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
overflow: hidden;
}
.api-keys-list-loading,
.api-keys-loading {
display: flex;
align-items: center;
justify-content: center;
gap: 12px;
padding: 64px 24px;
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.api-keys-spinner {
width: 20px;
height: 20px;
border: 2px solid var(--border-secondary);
border-top-color: var(--accent-primary);
border-radius: 50%;
animation: api-keys-spin 0.6s linear infinite;
}
.api-keys-empty {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 64px 24px;
text-align: center;
}
.api-keys-empty-icon {
color: var(--text-muted);
margin-bottom: 16px;
opacity: 0.5;
}
.api-keys-empty h3 {
font-size: 1.125rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
}
.api-keys-empty p {
color: var(--text-tertiary);
font-size: 0.875rem;
}
.api-keys-list {
display: flex;
flex-direction: column;
}
.api-keys-list-header {
display: grid;
grid-template-columns: 1fr 160px 160px 140px;
gap: 16px;
padding: 14px 20px;
background: var(--bg-tertiary);
border-bottom: 1px solid var(--border-primary);
font-size: 0.75rem;
font-weight: 600;
color: var(--text-tertiary);
text-transform: uppercase;
letter-spacing: 0.04em;
}
.api-keys-list-item {
display: grid;
grid-template-columns: 1fr 160px 160px 140px;
gap: 16px;
padding: 16px 20px;
align-items: center;
border-bottom: 1px solid var(--border-primary);
transition: background var(--transition-fast);
}
.api-keys-list-item:last-child {
border-bottom: none;
}
.api-keys-list-item:hover {
background: var(--bg-tertiary);
}
.api-keys-item-name {
font-weight: 500;
color: var(--text-primary);
font-size: 0.9375rem;
}
.api-keys-item-description {
color: var(--text-tertiary);
font-size: 0.8125rem;
margin-top: 4px;
}
.api-keys-col-created,
.api-keys-col-used {
color: var(--text-secondary);
font-size: 0.8125rem;
}
.api-keys-col-actions {
display: flex;
justify-content: flex-end;
}
.api-keys-revoke-button {
padding: 6px 14px;
background: transparent;
border: 1px solid rgba(239, 68, 68, 0.3);
border-radius: var(--radius-md);
font-size: 0.8125rem;
font-weight: 500;
color: var(--error);
cursor: pointer;
transition: all var(--transition-fast);
}
.api-keys-revoke-button:hover {
background: var(--error-bg);
border-color: rgba(239, 68, 68, 0.5);
}
.api-keys-delete-confirm {
display: flex;
align-items: center;
gap: 8px;
font-size: 0.8125rem;
color: var(--text-secondary);
}
.api-keys-confirm-yes {
padding: 4px 12px;
background: var(--error);
border: none;
border-radius: var(--radius-sm);
font-size: 0.75rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
}
.api-keys-confirm-yes:hover:not(:disabled) {
opacity: 0.9;
}
.api-keys-confirm-yes:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.api-keys-confirm-no {
padding: 4px 12px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-sm);
font-size: 0.75rem;
font-weight: 500;
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
}
.api-keys-confirm-no:hover:not(:disabled) {
background: var(--bg-hover);
}
.api-keys-confirm-no:disabled {
opacity: 0.5;
cursor: not-allowed;
}
@media (max-width: 768px) {
.api-keys-header {
flex-direction: column;
align-items: stretch;
}
.api-keys-create-button {
align-self: flex-start;
}
.api-keys-list-header {
display: none;
}
.api-keys-list-item {
grid-template-columns: 1fr;
gap: 8px;
}
.api-keys-col-name {
order: 1;
}
.api-keys-col-created,
.api-keys-col-used {
font-size: 0.75rem;
}
.api-keys-col-created::before {
content: 'Created: ';
color: var(--text-muted);
}
.api-keys-col-used::before {
content: 'Last used: ';
color: var(--text-muted);
}
.api-keys-col-actions {
justify-content: flex-start;
margin-top: 8px;
}
.api-keys-new-key-value-container {
flex-direction: column;
}
.api-keys-copy-button {
align-self: flex-start;
}
}

View File

@@ -0,0 +1,371 @@
import { useState, useEffect } from 'react';
import { useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { listAPIKeys, createAPIKey, deleteAPIKey } from '../api';
import { APIKey, APIKeyCreateResponse } from '../types';
import './APIKeysPage.css';
function APIKeysPage() {
const { user, loading: authLoading } = useAuth();
const navigate = useNavigate();
const [keys, setKeys] = useState<APIKey[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [showCreateForm, setShowCreateForm] = useState(false);
const [createName, setCreateName] = useState('');
const [createDescription, setCreateDescription] = useState('');
const [isCreating, setIsCreating] = useState(false);
const [createError, setCreateError] = useState<string | null>(null);
const [newlyCreatedKey, setNewlyCreatedKey] = useState<APIKeyCreateResponse | null>(null);
const [copied, setCopied] = useState(false);
const [deleteConfirmId, setDeleteConfirmId] = useState<string | null>(null);
const [isDeleting, setIsDeleting] = useState(false);
useEffect(() => {
if (!authLoading && !user) {
navigate('/login', { state: { from: '/settings/api-keys' } });
}
}, [user, authLoading, navigate]);
useEffect(() => {
if (user) {
loadKeys();
}
}, [user]);
async function loadKeys() {
setLoading(true);
setError(null);
try {
const data = await listAPIKeys();
setKeys(data);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load API keys');
} finally {
setLoading(false);
}
}
async function handleCreate(e: React.FormEvent) {
e.preventDefault();
if (!createName.trim()) {
setCreateError('Name is required');
return;
}
setIsCreating(true);
setCreateError(null);
try {
const response = await createAPIKey({
name: createName.trim(),
description: createDescription.trim() || undefined,
});
setNewlyCreatedKey(response);
setShowCreateForm(false);
setCreateName('');
setCreateDescription('');
await loadKeys();
} catch (err) {
setCreateError(err instanceof Error ? err.message : 'Failed to create API key');
} finally {
setIsCreating(false);
}
}
async function handleDelete(id: string) {
setIsDeleting(true);
try {
await deleteAPIKey(id);
setDeleteConfirmId(null);
await loadKeys();
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to revoke API key');
} finally {
setIsDeleting(false);
}
}
async function handleCopyKey() {
if (newlyCreatedKey) {
try {
await navigator.clipboard.writeText(newlyCreatedKey.key);
setCopied(true);
setTimeout(() => setCopied(false), 2000);
} catch {
setError('Failed to copy to clipboard');
}
}
}
function handleDismissNewKey() {
setNewlyCreatedKey(null);
setCopied(false);
}
function formatDate(dateString: string | null): string {
if (!dateString) return 'Never';
return new Date(dateString).toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
});
}
if (authLoading) {
return (
<div className="api-keys-page">
<div className="api-keys-loading">
<div className="api-keys-spinner"></div>
<span>Loading...</span>
</div>
</div>
);
}
if (!user) {
return null;
}
return (
<div className="api-keys-page">
<div className="api-keys-header">
<div className="api-keys-header-content">
<h1>API Keys</h1>
<p className="api-keys-subtitle">
Manage API keys for programmatic access to Orchard
</p>
</div>
<button
className="api-keys-create-button"
onClick={() => setShowCreateForm(true)}
disabled={showCreateForm}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="12" y1="5" x2="12" y2="19"/>
<line x1="5" y1="12" x2="19" y2="12"/>
</svg>
Create New Key
</button>
</div>
{error && (
<div className="api-keys-error">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="12" y1="8" x2="12" y2="12"/>
<line x1="12" y1="16" x2="12.01" y2="16"/>
</svg>
<span>{error}</span>
<button onClick={() => setError(null)} className="api-keys-error-dismiss">
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
)}
{newlyCreatedKey && (
<div className="api-keys-new-key-banner">
<div className="api-keys-new-key-header">
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
</svg>
<span className="api-keys-new-key-title">New API Key Created</span>
</div>
<div className="api-keys-new-key-warning">
Copy this key now! It won't be shown again.
</div>
<div className="api-keys-new-key-value-container">
<code className="api-keys-new-key-value">{newlyCreatedKey.key}</code>
<button
className="api-keys-copy-button"
onClick={handleCopyKey}
title="Copy to clipboard"
>
{copied ? (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<polyline points="20 6 9 17 4 12"/>
</svg>
) : (
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<rect x="9" y="9" width="13" height="13" rx="2" ry="2"/>
<path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1"/>
</svg>
)}
{copied ? 'Copied!' : 'Copy'}
</button>
</div>
<button className="api-keys-done-button" onClick={handleDismissNewKey}>
Done
</button>
</div>
)}
{showCreateForm && (
<div className="api-keys-create-form-card">
<div className="api-keys-create-form-header">
<h2>Create New API Key</h2>
<button
className="api-keys-create-form-close"
onClick={() => {
setShowCreateForm(false);
setCreateName('');
setCreateDescription('');
setCreateError(null);
}}
>
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
{createError && (
<div className="api-keys-create-error">
{createError}
</div>
)}
<form onSubmit={handleCreate} className="api-keys-create-form">
<div className="api-keys-form-group">
<label htmlFor="key-name">Name</label>
<input
id="key-name"
type="text"
value={createName}
onChange={(e) => setCreateName(e.target.value)}
placeholder="e.g., CI/CD Pipeline, Local Development"
autoFocus
disabled={isCreating}
/>
</div>
<div className="api-keys-form-group">
<label htmlFor="key-description">Description (optional)</label>
<input
id="key-description"
type="text"
value={createDescription}
onChange={(e) => setCreateDescription(e.target.value)}
placeholder="What will this key be used for?"
disabled={isCreating}
/>
</div>
<div className="api-keys-form-actions">
<button
type="button"
className="api-keys-cancel-button"
onClick={() => {
setShowCreateForm(false);
setCreateName('');
setCreateDescription('');
setCreateError(null);
}}
disabled={isCreating}
>
Cancel
</button>
<button
type="submit"
className="api-keys-submit-button"
disabled={isCreating || !createName.trim()}
>
{isCreating ? (
<>
<span className="api-keys-button-spinner"></span>
Creating...
</>
) : (
'Create Key'
)}
</button>
</div>
</form>
</div>
)}
<div className="api-keys-list-container">
{loading ? (
<div className="api-keys-list-loading">
<div className="api-keys-spinner"></div>
<span>Loading API keys...</span>
</div>
) : keys.length === 0 ? (
<div className="api-keys-empty">
<div className="api-keys-empty-icon">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<path d="M21 2l-2 2m-7.61 7.61a5.5 5.5 0 1 1-7.778 7.778 5.5 5.5 0 0 1 7.777-7.777zm0 0L15.5 7.5m0 0l3 3L22 7l-3-3m-3.5 3.5L19 4"/>
</svg>
</div>
<h3>No API Keys</h3>
<p>Create an API key to access Orchard programmatically</p>
</div>
) : (
<div className="api-keys-list">
<div className="api-keys-list-header">
<span className="api-keys-col-name">Name</span>
<span className="api-keys-col-created">Created</span>
<span className="api-keys-col-used">Last Used</span>
<span className="api-keys-col-actions">Actions</span>
</div>
{keys.map((key) => (
<div key={key.id} className="api-keys-list-item">
<div className="api-keys-col-name">
<div className="api-keys-item-name">{key.name}</div>
{key.description && (
<div className="api-keys-item-description">{key.description}</div>
)}
</div>
<div className="api-keys-col-created">
{formatDate(key.created_at)}
</div>
<div className="api-keys-col-used">
{formatDate(key.last_used)}
</div>
<div className="api-keys-col-actions">
{deleteConfirmId === key.id ? (
<div className="api-keys-delete-confirm">
<span>Revoke?</span>
<button
className="api-keys-confirm-yes"
onClick={() => handleDelete(key.id)}
disabled={isDeleting}
>
{isDeleting ? 'Revoking...' : 'Yes'}
</button>
<button
className="api-keys-confirm-no"
onClick={() => setDeleteConfirmId(null)}
disabled={isDeleting}
>
No
</button>
</div>
) : (
<button
className="api-keys-revoke-button"
onClick={() => setDeleteConfirmId(key.id)}
>
Revoke
</button>
)}
</div>
</div>
))}
</div>
)}
</div>
</div>
);
}
export default APIKeysPage;

View File

@@ -0,0 +1,405 @@
.admin-oidc-page {
max-width: 800px;
margin: 0 auto;
}
.admin-oidc-header {
margin-bottom: 32px;
}
.admin-oidc-header-content h1 {
font-size: 1.75rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
letter-spacing: -0.02em;
}
.admin-oidc-subtitle {
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.admin-oidc-success {
display: flex;
align-items: center;
gap: 10px;
background: var(--success-bg);
border: 1px solid rgba(34, 197, 94, 0.2);
color: var(--success);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
animation: admin-oidc-fade-in 0.2s ease;
}
@keyframes admin-oidc-fade-in {
from {
opacity: 0;
transform: translateY(-8px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.admin-oidc-error {
display: flex;
align-items: center;
gap: 10px;
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
}
.admin-oidc-error svg {
flex-shrink: 0;
}
.admin-oidc-error span {
flex: 1;
}
.admin-oidc-error-dismiss {
background: transparent;
border: none;
padding: 4px;
color: var(--error);
cursor: pointer;
opacity: 0.7;
transition: opacity var(--transition-fast);
}
.admin-oidc-error-dismiss:hover {
opacity: 1;
}
.admin-oidc-access-denied {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 80px 24px;
text-align: center;
}
.admin-oidc-access-denied-icon {
color: var(--error);
margin-bottom: 24px;
opacity: 0.8;
}
.admin-oidc-access-denied h2 {
font-size: 1.5rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 12px;
}
.admin-oidc-access-denied p {
color: var(--text-tertiary);
font-size: 0.9375rem;
max-width: 400px;
}
.admin-oidc-card {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
padding: 24px;
margin-bottom: 24px;
}
.admin-oidc-section {
margin-bottom: 32px;
padding-bottom: 24px;
border-bottom: 1px solid var(--border-primary);
}
.admin-oidc-section:last-of-type {
margin-bottom: 0;
padding-bottom: 0;
border-bottom: none;
}
.admin-oidc-section h2 {
font-size: 1rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 16px;
}
.admin-oidc-form-group {
margin-bottom: 16px;
}
.admin-oidc-form-group:last-child {
margin-bottom: 0;
}
.admin-oidc-form-group label {
display: block;
font-size: 0.8125rem;
font-weight: 500;
color: var(--text-secondary);
margin-bottom: 6px;
}
.admin-oidc-form-group input[type="text"],
.admin-oidc-form-group input[type="password"],
.admin-oidc-form-group input[type="url"] {
width: 100%;
padding: 12px 14px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
color: var(--text-primary);
transition: all var(--transition-fast);
}
.admin-oidc-form-group input::placeholder {
color: var(--text-muted);
}
.admin-oidc-form-group input:hover:not(:disabled) {
border-color: var(--border-secondary);
background: var(--bg-elevated);
}
.admin-oidc-form-group input:focus {
outline: none;
border-color: var(--accent-primary);
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
background: var(--bg-elevated);
}
.admin-oidc-form-group input:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.admin-oidc-form-row {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 16px;
}
.admin-oidc-field-help {
margin-top: 6px;
font-size: 0.75rem;
color: var(--text-muted);
line-height: 1.4;
}
.admin-oidc-field-help code {
background: var(--bg-tertiary);
padding: 1px 4px;
border-radius: 3px;
font-size: 0.6875rem;
}
.admin-oidc-secret-status {
color: var(--success);
font-weight: 400;
font-size: 0.75rem;
}
.admin-oidc-toggle-group {
margin-bottom: 16px;
}
.admin-oidc-toggle-label {
display: flex;
align-items: center;
gap: 12px;
cursor: pointer;
font-size: 0.875rem;
font-weight: 500;
color: var(--text-primary);
user-select: none;
}
.admin-oidc-toggle-label input[type="checkbox"] {
position: absolute;
opacity: 0;
width: 0;
height: 0;
}
.admin-oidc-toggle-custom {
width: 44px;
height: 24px;
background: var(--bg-tertiary);
border: 1px solid var(--border-secondary);
border-radius: 12px;
transition: all var(--transition-fast);
position: relative;
flex-shrink: 0;
}
.admin-oidc-toggle-custom::after {
content: '';
position: absolute;
left: 2px;
top: 2px;
width: 18px;
height: 18px;
background: var(--text-muted);
border-radius: 50%;
transition: all var(--transition-fast);
}
.admin-oidc-toggle-label input[type="checkbox"]:checked + .admin-oidc-toggle-custom {
background: var(--accent-primary);
border-color: var(--accent-primary);
}
.admin-oidc-toggle-label input[type="checkbox"]:checked + .admin-oidc-toggle-custom::after {
left: 22px;
background: white;
}
.admin-oidc-toggle-label input[type="checkbox"]:focus + .admin-oidc-toggle-custom {
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
}
.admin-oidc-toggle-label:hover .admin-oidc-toggle-custom {
border-color: var(--accent-primary);
}
.admin-oidc-form-actions {
display: flex;
justify-content: flex-end;
gap: 12px;
margin-top: 24px;
padding-top: 24px;
border-top: 1px solid var(--border-primary);
}
.admin-oidc-cancel-button {
padding: 10px 18px;
background: transparent;
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
}
.admin-oidc-cancel-button:hover:not(:disabled) {
background: var(--bg-hover);
border-color: var(--border-secondary);
color: var(--text-primary);
}
.admin-oidc-cancel-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.admin-oidc-submit-button {
display: flex;
align-items: center;
justify-content: center;
gap: 8px;
padding: 10px 18px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
min-width: 160px;
}
.admin-oidc-submit-button:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
}
.admin-oidc-submit-button:disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
}
.admin-oidc-button-spinner {
width: 14px;
height: 14px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-top-color: white;
border-radius: 50%;
animation: admin-oidc-spin 0.6s linear infinite;
}
@keyframes admin-oidc-spin {
to {
transform: rotate(360deg);
}
}
.admin-oidc-loading {
display: flex;
align-items: center;
justify-content: center;
gap: 12px;
padding: 64px 24px;
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.admin-oidc-spinner {
width: 20px;
height: 20px;
border: 2px solid var(--border-secondary);
border-top-color: var(--accent-primary);
border-radius: 50%;
animation: admin-oidc-spin 0.6s linear infinite;
}
.admin-oidc-info-card {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
padding: 20px 24px;
}
.admin-oidc-info-card h3 {
font-size: 0.875rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
}
.admin-oidc-info-card p {
font-size: 0.8125rem;
color: var(--text-tertiary);
margin-bottom: 12px;
}
.admin-oidc-callback-url {
display: block;
background: var(--bg-tertiary);
padding: 12px 16px;
border-radius: var(--radius-md);
font-size: 0.8125rem;
color: var(--text-primary);
word-break: break-all;
}
@media (max-width: 640px) {
.admin-oidc-form-row {
grid-template-columns: 1fr;
}
}

View File

@@ -0,0 +1,342 @@
import { useState, useEffect } from 'react';
import { useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { getOIDCConfig, updateOIDCConfig } from '../api';
import { OIDCConfig } from '../types';
import './AdminOIDCPage.css';
function AdminOIDCPage() {
const { user, loading: authLoading } = useAuth();
const navigate = useNavigate();
const [config, setConfig] = useState<OIDCConfig | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [successMessage, setSuccessMessage] = useState<string | null>(null);
// Form state
const [enabled, setEnabled] = useState(false);
const [issuerUrl, setIssuerUrl] = useState('');
const [clientId, setClientId] = useState('');
const [clientSecret, setClientSecret] = useState('');
const [scopes, setScopes] = useState('openid profile email');
const [autoCreateUsers, setAutoCreateUsers] = useState(true);
const [adminGroup, setAdminGroup] = useState('');
const [isSaving, setIsSaving] = useState(false);
useEffect(() => {
if (!authLoading && !user) {
navigate('/login', { state: { from: '/admin/oidc' } });
}
}, [user, authLoading, navigate]);
useEffect(() => {
if (user && user.is_admin) {
loadConfig();
}
}, [user]);
useEffect(() => {
if (successMessage) {
const timer = setTimeout(() => setSuccessMessage(null), 3000);
return () => clearTimeout(timer);
}
}, [successMessage]);
async function loadConfig() {
setLoading(true);
setError(null);
try {
const data = await getOIDCConfig();
setConfig(data);
setEnabled(data.enabled);
setIssuerUrl(data.issuer_url);
setClientId(data.client_id);
setScopes(data.scopes.join(' '));
setAutoCreateUsers(data.auto_create_users);
setAdminGroup(data.admin_group);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load OIDC configuration');
} finally {
setLoading(false);
}
}
async function handleSave(e: React.FormEvent) {
e.preventDefault();
if (enabled && !issuerUrl.trim()) {
setError('Issuer URL is required when OIDC is enabled');
return;
}
if (enabled && !clientId.trim()) {
setError('Client ID is required when OIDC is enabled');
return;
}
setIsSaving(true);
setError(null);
try {
const scopesList = scopes.split(/\s+/).filter(s => s.length > 0);
const updateData: Record<string, unknown> = {
enabled,
issuer_url: issuerUrl.trim(),
client_id: clientId.trim(),
scopes: scopesList,
auto_create_users: autoCreateUsers,
admin_group: adminGroup.trim(),
};
if (clientSecret) {
updateData.client_secret = clientSecret;
}
await updateOIDCConfig(updateData);
setSuccessMessage('OIDC configuration saved successfully');
setClientSecret('');
await loadConfig();
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to save OIDC configuration');
} finally {
setIsSaving(false);
}
}
if (authLoading) {
return (
<div className="admin-oidc-page">
<div className="admin-oidc-loading">
<div className="admin-oidc-spinner"></div>
<span>Loading...</span>
</div>
</div>
);
}
if (!user) {
return null;
}
if (!user.is_admin) {
return (
<div className="admin-oidc-page">
<div className="admin-oidc-access-denied">
<div className="admin-oidc-access-denied-icon">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<circle cx="12" cy="12" r="10"/>
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
</svg>
</div>
<h2>Access Denied</h2>
<p>You do not have permission to access this page. Admin privileges are required.</p>
</div>
</div>
);
}
return (
<div className="admin-oidc-page">
<div className="admin-oidc-header">
<div className="admin-oidc-header-content">
<h1>Single Sign-On (OIDC)</h1>
<p className="admin-oidc-subtitle">
Configure OpenID Connect for SSO authentication
</p>
</div>
</div>
{successMessage && (
<div className="admin-oidc-success">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
<polyline points="22 4 12 14.01 9 11.01"/>
</svg>
<span>{successMessage}</span>
</div>
)}
{error && (
<div className="admin-oidc-error">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="12" y1="8" x2="12" y2="12"/>
<line x1="12" y1="16" x2="12.01" y2="16"/>
</svg>
<span>{error}</span>
<button onClick={() => setError(null)} className="admin-oidc-error-dismiss">
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
)}
{loading ? (
<div className="admin-oidc-card">
<div className="admin-oidc-loading">
<div className="admin-oidc-spinner"></div>
<span>Loading configuration...</span>
</div>
</div>
) : (
<form onSubmit={handleSave} className="admin-oidc-card">
<div className="admin-oidc-section">
<h2>Status</h2>
<div className="admin-oidc-toggle-group">
<label className="admin-oidc-toggle-label">
<input
type="checkbox"
checked={enabled}
onChange={(e) => setEnabled(e.target.checked)}
disabled={isSaving}
/>
<span className="admin-oidc-toggle-custom"></span>
Enable OIDC Authentication
</label>
<p className="admin-oidc-field-help">
When enabled, users can sign in using your organization's identity provider.
</p>
</div>
</div>
<div className="admin-oidc-section">
<h2>Provider Configuration</h2>
<div className="admin-oidc-form-group">
<label htmlFor="issuer-url">Issuer URL</label>
<input
id="issuer-url"
type="url"
value={issuerUrl}
onChange={(e) => setIssuerUrl(e.target.value)}
placeholder="https://your-provider.com"
disabled={isSaving}
/>
<p className="admin-oidc-field-help">
The base URL of your OIDC provider. Discovery document will be fetched from <code>/.well-known/openid-configuration</code>.
</p>
</div>
<div className="admin-oidc-form-row">
<div className="admin-oidc-form-group">
<label htmlFor="client-id">Client ID</label>
<input
id="client-id"
type="text"
value={clientId}
onChange={(e) => setClientId(e.target.value)}
placeholder="your-client-id"
disabled={isSaving}
/>
</div>
<div className="admin-oidc-form-group">
<label htmlFor="client-secret">
Client Secret
{config?.has_client_secret && (
<span className="admin-oidc-secret-status"> (configured)</span>
)}
</label>
<input
id="client-secret"
type="password"
value={clientSecret}
onChange={(e) => setClientSecret(e.target.value)}
placeholder={config?.has_client_secret ? 'Leave blank to keep current' : 'Enter client secret'}
disabled={isSaving}
/>
</div>
</div>
<div className="admin-oidc-form-group">
<label htmlFor="scopes">Scopes</label>
<input
id="scopes"
type="text"
value={scopes}
onChange={(e) => setScopes(e.target.value)}
placeholder="openid profile email"
disabled={isSaving}
/>
<p className="admin-oidc-field-help">
Space-separated list of OIDC scopes to request. Common scopes: openid, profile, email, groups.
</p>
</div>
</div>
<div className="admin-oidc-section">
<h2>User Provisioning</h2>
<div className="admin-oidc-toggle-group">
<label className="admin-oidc-toggle-label">
<input
type="checkbox"
checked={autoCreateUsers}
onChange={(e) => setAutoCreateUsers(e.target.checked)}
disabled={isSaving}
/>
<span className="admin-oidc-toggle-custom"></span>
Auto-create users on first login
</label>
<p className="admin-oidc-field-help">
When enabled, new users will be created automatically when they sign in via OIDC for the first time.
</p>
</div>
<div className="admin-oidc-form-group">
<label htmlFor="admin-group">Admin Group (optional)</label>
<input
id="admin-group"
type="text"
value={adminGroup}
onChange={(e) => setAdminGroup(e.target.value)}
placeholder="admin, orchard-admins"
disabled={isSaving}
/>
<p className="admin-oidc-field-help">
Users in this group (from the groups claim) will be granted admin privileges. Leave blank to disable automatic admin assignment.
</p>
</div>
</div>
<div className="admin-oidc-form-actions">
<button
type="button"
className="admin-oidc-cancel-button"
onClick={loadConfig}
disabled={isSaving}
>
Reset
</button>
<button
type="submit"
className="admin-oidc-submit-button"
disabled={isSaving}
>
{isSaving ? (
<>
<span className="admin-oidc-button-spinner"></span>
Saving...
</>
) : (
'Save Configuration'
)}
</button>
</div>
</form>
)}
<div className="admin-oidc-info-card">
<h3>Callback URL</h3>
<p>Configure your identity provider with the following callback URL:</p>
<code className="admin-oidc-callback-url">
{window.location.origin}/api/v1/auth/oidc/callback
</code>
</div>
</div>
);
}
export default AdminOIDCPage;

View File

@@ -0,0 +1,667 @@
.admin-users-page {
max-width: 1100px;
margin: 0 auto;
}
.admin-users-header {
display: flex;
justify-content: space-between;
align-items: flex-start;
margin-bottom: 32px;
gap: 24px;
}
.admin-users-header-content h1 {
font-size: 1.75rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
letter-spacing: -0.02em;
}
.admin-users-subtitle {
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.admin-users-create-button {
display: flex;
align-items: center;
gap: 8px;
padding: 12px 20px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
flex-shrink: 0;
}
.admin-users-create-button:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
}
.admin-users-create-button:disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
}
.admin-users-success {
display: flex;
align-items: center;
gap: 10px;
background: var(--success-bg);
border: 1px solid rgba(34, 197, 94, 0.2);
color: var(--success);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
animation: admin-users-fade-in 0.2s ease;
}
@keyframes admin-users-fade-in {
from {
opacity: 0;
transform: translateY(-8px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.admin-users-error {
display: flex;
align-items: center;
gap: 10px;
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
}
.admin-users-error svg {
flex-shrink: 0;
}
.admin-users-error span {
flex: 1;
}
.admin-users-error-dismiss {
background: transparent;
border: none;
padding: 4px;
color: var(--error);
cursor: pointer;
opacity: 0.7;
transition: opacity var(--transition-fast);
}
.admin-users-error-dismiss:hover {
opacity: 1;
}
.admin-users-access-denied {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 80px 24px;
text-align: center;
}
.admin-users-access-denied-icon {
color: var(--error);
margin-bottom: 24px;
opacity: 0.8;
}
.admin-users-access-denied h2 {
font-size: 1.5rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 12px;
}
.admin-users-access-denied p {
color: var(--text-tertiary);
font-size: 0.9375rem;
max-width: 400px;
}
.admin-users-create-form-card,
.admin-users-reset-password-card {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
padding: 24px;
margin-bottom: 24px;
}
.admin-users-create-form-header,
.admin-users-reset-password-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 20px;
}
.admin-users-create-form-header h2,
.admin-users-reset-password-header h2 {
font-size: 1.125rem;
font-weight: 600;
color: var(--text-primary);
}
.admin-users-create-form-close {
background: transparent;
border: none;
padding: 4px;
color: var(--text-tertiary);
cursor: pointer;
border-radius: var(--radius-sm);
transition: all var(--transition-fast);
}
.admin-users-create-form-close:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
.admin-users-reset-password-info {
color: var(--text-secondary);
font-size: 0.875rem;
margin-bottom: 16px;
}
.admin-users-reset-password-info strong {
color: var(--text-primary);
}
.admin-users-create-error {
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 10px 14px;
border-radius: var(--radius-md);
font-size: 0.8125rem;
margin-bottom: 16px;
}
.admin-users-create-form,
.admin-users-reset-password-form {
display: flex;
flex-direction: column;
gap: 16px;
}
.admin-users-form-group {
display: flex;
flex-direction: column;
gap: 6px;
}
.admin-users-form-group label {
font-size: 0.8125rem;
font-weight: 500;
color: var(--text-secondary);
}
.admin-users-form-group input[type="text"],
.admin-users-form-group input[type="password"],
.admin-users-form-group input[type="email"] {
padding: 12px 14px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
color: var(--text-primary);
transition: all var(--transition-fast);
}
.admin-users-form-group input::placeholder {
color: var(--text-muted);
}
.admin-users-form-group input:hover:not(:disabled) {
border-color: var(--border-secondary);
background: var(--bg-elevated);
}
.admin-users-form-group input:focus {
outline: none;
border-color: var(--accent-primary);
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
background: var(--bg-elevated);
}
.admin-users-form-group input:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.admin-users-checkbox-group {
flex-direction: row;
align-items: center;
}
.admin-users-checkbox-label {
display: flex;
align-items: center;
gap: 10px;
cursor: pointer;
font-size: 0.875rem;
font-weight: 400;
color: var(--text-secondary);
user-select: none;
}
.admin-users-checkbox-label input[type="checkbox"] {
position: absolute;
opacity: 0;
width: 0;
height: 0;
}
.admin-users-checkbox-custom {
width: 18px;
height: 18px;
background: var(--bg-tertiary);
border: 1px solid var(--border-secondary);
border-radius: var(--radius-sm);
transition: all var(--transition-fast);
position: relative;
}
.admin-users-checkbox-label input[type="checkbox"]:checked + .admin-users-checkbox-custom {
background: var(--accent-primary);
border-color: var(--accent-primary);
}
.admin-users-checkbox-label input[type="checkbox"]:checked + .admin-users-checkbox-custom::after {
content: '';
position: absolute;
left: 5px;
top: 2px;
width: 5px;
height: 9px;
border: solid white;
border-width: 0 2px 2px 0;
transform: rotate(45deg);
}
.admin-users-checkbox-label input[type="checkbox"]:focus + .admin-users-checkbox-custom {
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
}
.admin-users-checkbox-label:hover .admin-users-checkbox-custom {
border-color: var(--accent-primary);
}
.admin-users-form-actions {
display: flex;
justify-content: flex-end;
gap: 12px;
margin-top: 8px;
}
.admin-users-cancel-button {
padding: 10px 18px;
background: transparent;
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
}
.admin-users-cancel-button:hover:not(:disabled) {
background: var(--bg-hover);
border-color: var(--border-secondary);
color: var(--text-primary);
}
.admin-users-cancel-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.admin-users-submit-button {
display: flex;
align-items: center;
justify-content: center;
gap: 8px;
padding: 10px 18px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.875rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
min-width: 120px;
}
.admin-users-submit-button:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
}
.admin-users-submit-button:disabled {
opacity: 0.5;
cursor: not-allowed;
transform: none;
}
.admin-users-button-spinner {
width: 14px;
height: 14px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-top-color: white;
border-radius: 50%;
animation: admin-users-spin 0.6s linear infinite;
}
@keyframes admin-users-spin {
to {
transform: rotate(360deg);
}
}
.admin-users-list-container {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-lg);
overflow: hidden;
}
.admin-users-list-loading,
.admin-users-loading {
display: flex;
align-items: center;
justify-content: center;
gap: 12px;
padding: 64px 24px;
color: var(--text-tertiary);
font-size: 0.9375rem;
}
.admin-users-spinner {
width: 20px;
height: 20px;
border: 2px solid var(--border-secondary);
border-top-color: var(--accent-primary);
border-radius: 50%;
animation: admin-users-spin 0.6s linear infinite;
}
.admin-users-empty {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 64px 24px;
text-align: center;
}
.admin-users-empty-icon {
color: var(--text-muted);
margin-bottom: 16px;
opacity: 0.5;
}
.admin-users-empty h3 {
font-size: 1.125rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
}
.admin-users-empty p {
color: var(--text-tertiary);
font-size: 0.875rem;
}
.admin-users-list {
display: flex;
flex-direction: column;
}
.admin-users-list-header {
display: grid;
grid-template-columns: 2fr 100px 140px 140px 1fr;
gap: 16px;
padding: 14px 20px;
background: var(--bg-tertiary);
border-bottom: 1px solid var(--border-primary);
font-size: 0.75rem;
font-weight: 600;
color: var(--text-tertiary);
text-transform: uppercase;
letter-spacing: 0.04em;
}
.admin-users-list-item {
display: grid;
grid-template-columns: 2fr 100px 140px 140px 1fr;
gap: 16px;
padding: 16px 20px;
align-items: center;
border-bottom: 1px solid var(--border-primary);
transition: background var(--transition-fast);
}
.admin-users-list-item:last-child {
border-bottom: none;
}
.admin-users-list-item:hover {
background: var(--bg-tertiary);
}
.admin-users-list-item.admin-users-inactive {
opacity: 0.6;
}
.admin-users-col-user {
display: flex;
align-items: center;
gap: 12px;
}
.admin-users-item-avatar {
width: 36px;
height: 36px;
border-radius: 50%;
background: var(--accent-gradient);
display: flex;
align-items: center;
justify-content: center;
color: white;
font-weight: 600;
font-size: 0.875rem;
flex-shrink: 0;
}
.admin-users-item-info {
display: flex;
flex-direction: column;
min-width: 0;
}
.admin-users-item-username {
font-weight: 500;
color: var(--text-primary);
font-size: 0.9375rem;
display: flex;
align-items: center;
gap: 8px;
}
.admin-users-admin-badge {
display: inline-flex;
padding: 2px 8px;
background: linear-gradient(135deg, rgba(16, 185, 129, 0.15) 0%, rgba(5, 150, 105, 0.1) 100%);
border: 1px solid rgba(16, 185, 129, 0.3);
border-radius: 20px;
font-size: 0.6875rem;
font-weight: 600;
color: var(--accent-primary);
text-transform: uppercase;
letter-spacing: 0.03em;
}
.admin-users-item-email {
color: var(--text-tertiary);
font-size: 0.8125rem;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.admin-users-col-status {
display: flex;
align-items: center;
}
.admin-users-status-badge {
display: inline-flex;
padding: 4px 10px;
border-radius: 20px;
font-size: 0.75rem;
font-weight: 500;
}
.admin-users-status-badge.active {
background: var(--success-bg);
color: var(--success);
}
.admin-users-status-badge.inactive {
background: var(--error-bg);
color: var(--error);
}
.admin-users-col-created,
.admin-users-col-login {
color: var(--text-secondary);
font-size: 0.8125rem;
}
.admin-users-col-actions {
display: flex;
justify-content: flex-end;
}
.admin-users-actions-menu {
display: flex;
gap: 6px;
}
.admin-users-action-button {
display: flex;
align-items: center;
gap: 4px;
padding: 6px 10px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-sm);
font-size: 0.75rem;
font-weight: 500;
color: var(--text-secondary);
cursor: pointer;
transition: all var(--transition-fast);
white-space: nowrap;
}
.admin-users-action-button:hover:not(:disabled) {
background: var(--bg-hover);
border-color: var(--border-secondary);
color: var(--text-primary);
}
.admin-users-action-button:disabled {
opacity: 0.4;
cursor: not-allowed;
}
.admin-users-action-spinner {
width: 12px;
height: 12px;
border: 2px solid var(--border-secondary);
border-top-color: var(--accent-primary);
border-radius: 50%;
animation: admin-users-spin 0.6s linear infinite;
}
@media (max-width: 1024px) {
.admin-users-list-header {
grid-template-columns: 2fr 100px 1fr;
}
.admin-users-list-item {
grid-template-columns: 2fr 100px 1fr;
}
.admin-users-col-created,
.admin-users-col-login {
display: none;
}
.admin-users-list-header .admin-users-col-created,
.admin-users-list-header .admin-users-col-login {
display: none;
}
}
@media (max-width: 768px) {
.admin-users-header {
flex-direction: column;
align-items: stretch;
}
.admin-users-create-button {
align-self: flex-start;
}
.admin-users-list-header {
display: none;
}
.admin-users-list-item {
grid-template-columns: 1fr;
gap: 12px;
padding: 16px;
}
.admin-users-col-user {
order: 1;
}
.admin-users-col-status {
order: 2;
}
.admin-users-col-actions {
order: 3;
justify-content: flex-start;
}
.admin-users-actions-menu {
flex-wrap: wrap;
}
}

View File

@@ -0,0 +1,529 @@
import { useState, useEffect } from 'react';
import { useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { listUsers, createUser, updateUser, resetUserPassword } from '../api';
import { AdminUser } from '../types';
import './AdminUsersPage.css';
function AdminUsersPage() {
const { user, loading: authLoading } = useAuth();
const navigate = useNavigate();
const [users, setUsers] = useState<AdminUser[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [showCreateForm, setShowCreateForm] = useState(false);
const [createUsername, setCreateUsername] = useState('');
const [createPassword, setCreatePassword] = useState('');
const [createEmail, setCreateEmail] = useState('');
const [createIsAdmin, setCreateIsAdmin] = useState(false);
const [isCreating, setIsCreating] = useState(false);
const [createError, setCreateError] = useState<string | null>(null);
const [resetPasswordUsername, setResetPasswordUsername] = useState<string | null>(null);
const [newPassword, setNewPassword] = useState('');
const [isResetting, setIsResetting] = useState(false);
const [togglingUser, setTogglingUser] = useState<string | null>(null);
const [successMessage, setSuccessMessage] = useState<string | null>(null);
useEffect(() => {
if (!authLoading && !user) {
navigate('/login', { state: { from: '/admin/users' } });
}
}, [user, authLoading, navigate]);
useEffect(() => {
if (user && user.is_admin) {
loadUsers();
}
}, [user]);
useEffect(() => {
if (successMessage) {
const timer = setTimeout(() => setSuccessMessage(null), 3000);
return () => clearTimeout(timer);
}
}, [successMessage]);
async function loadUsers() {
setLoading(true);
setError(null);
try {
const data = await listUsers();
setUsers(data);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load users');
} finally {
setLoading(false);
}
}
async function handleCreate(e: React.FormEvent) {
e.preventDefault();
if (!createUsername.trim()) {
setCreateError('Username is required');
return;
}
if (!createPassword.trim()) {
setCreateError('Password is required');
return;
}
setIsCreating(true);
setCreateError(null);
try {
await createUser({
username: createUsername.trim(),
password: createPassword,
email: createEmail.trim() || undefined,
is_admin: createIsAdmin,
});
setShowCreateForm(false);
setCreateUsername('');
setCreatePassword('');
setCreateEmail('');
setCreateIsAdmin(false);
setSuccessMessage('User created successfully');
await loadUsers();
} catch (err) {
setCreateError(err instanceof Error ? err.message : 'Failed to create user');
} finally {
setIsCreating(false);
}
}
async function handleToggleAdmin(targetUser: AdminUser) {
setTogglingUser(targetUser.username);
try {
await updateUser(targetUser.username, { is_admin: !targetUser.is_admin });
setSuccessMessage(`${targetUser.username} is ${!targetUser.is_admin ? 'now' : 'no longer'} an admin`);
await loadUsers();
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to update user');
} finally {
setTogglingUser(null);
}
}
async function handleToggleActive(targetUser: AdminUser) {
setTogglingUser(targetUser.username);
try {
await updateUser(targetUser.username, { is_active: !targetUser.is_active });
setSuccessMessage(`${targetUser.username} has been ${!targetUser.is_active ? 'enabled' : 'disabled'}`);
await loadUsers();
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to update user');
} finally {
setTogglingUser(null);
}
}
async function handleResetPassword(e: React.FormEvent) {
e.preventDefault();
if (!resetPasswordUsername || !newPassword.trim()) {
return;
}
setIsResetting(true);
try {
await resetUserPassword(resetPasswordUsername, newPassword);
setResetPasswordUsername(null);
setNewPassword('');
setSuccessMessage(`Password reset for ${resetPasswordUsername}`);
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to reset password');
} finally {
setIsResetting(false);
}
}
function formatDate(dateString: string | null): string {
if (!dateString) return 'Never';
return new Date(dateString).toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
});
}
if (authLoading) {
return (
<div className="admin-users-page">
<div className="admin-users-loading">
<div className="admin-users-spinner"></div>
<span>Loading...</span>
</div>
</div>
);
}
if (!user) {
return null;
}
if (!user.is_admin) {
return (
<div className="admin-users-page">
<div className="admin-users-access-denied">
<div className="admin-users-access-denied-icon">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<circle cx="12" cy="12" r="10"/>
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
</svg>
</div>
<h2>Access Denied</h2>
<p>You do not have permission to access this page. Admin privileges are required.</p>
</div>
</div>
);
}
return (
<div className="admin-users-page">
<div className="admin-users-header">
<div className="admin-users-header-content">
<h1>User Management</h1>
<p className="admin-users-subtitle">
Manage user accounts and permissions
</p>
</div>
<button
className="admin-users-create-button"
onClick={() => setShowCreateForm(true)}
disabled={showCreateForm}
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="12" y1="5" x2="12" y2="19"/>
<line x1="5" y1="12" x2="19" y2="12"/>
</svg>
Create User
</button>
</div>
{successMessage && (
<div className="admin-users-success">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
<polyline points="22 4 12 14.01 9 11.01"/>
</svg>
<span>{successMessage}</span>
</div>
)}
{error && (
<div className="admin-users-error">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="12" y1="8" x2="12" y2="12"/>
<line x1="12" y1="16" x2="12.01" y2="16"/>
</svg>
<span>{error}</span>
<button onClick={() => setError(null)} className="admin-users-error-dismiss">
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
)}
{showCreateForm && (
<div className="admin-users-create-form-card">
<div className="admin-users-create-form-header">
<h2>Create New User</h2>
<button
className="admin-users-create-form-close"
onClick={() => {
setShowCreateForm(false);
setCreateUsername('');
setCreatePassword('');
setCreateEmail('');
setCreateIsAdmin(false);
setCreateError(null);
}}
>
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
{createError && (
<div className="admin-users-create-error">
{createError}
</div>
)}
<form onSubmit={handleCreate} className="admin-users-create-form">
<div className="admin-users-form-group">
<label htmlFor="username">Username</label>
<input
id="username"
type="text"
value={createUsername}
onChange={(e) => setCreateUsername(e.target.value)}
placeholder="Enter username"
autoFocus
disabled={isCreating}
/>
</div>
<div className="admin-users-form-group">
<label htmlFor="password">Password</label>
<input
id="password"
type="password"
value={createPassword}
onChange={(e) => setCreatePassword(e.target.value)}
placeholder="Enter password"
disabled={isCreating}
/>
</div>
<div className="admin-users-form-group">
<label htmlFor="email">Email (optional)</label>
<input
id="email"
type="email"
value={createEmail}
onChange={(e) => setCreateEmail(e.target.value)}
placeholder="user@example.com"
disabled={isCreating}
/>
</div>
<div className="admin-users-form-group admin-users-checkbox-group">
<label className="admin-users-checkbox-label">
<input
type="checkbox"
checked={createIsAdmin}
onChange={(e) => setCreateIsAdmin(e.target.checked)}
disabled={isCreating}
/>
<span className="admin-users-checkbox-custom"></span>
Grant admin privileges
</label>
</div>
<div className="admin-users-form-actions">
<button
type="button"
className="admin-users-cancel-button"
onClick={() => {
setShowCreateForm(false);
setCreateUsername('');
setCreatePassword('');
setCreateEmail('');
setCreateIsAdmin(false);
setCreateError(null);
}}
disabled={isCreating}
>
Cancel
</button>
<button
type="submit"
className="admin-users-submit-button"
disabled={isCreating || !createUsername.trim() || !createPassword.trim()}
>
{isCreating ? (
<>
<span className="admin-users-button-spinner"></span>
Creating...
</>
) : (
'Create User'
)}
</button>
</div>
</form>
</div>
)}
{resetPasswordUsername && (
<div className="admin-users-reset-password-card">
<div className="admin-users-reset-password-header">
<h2>Reset Password</h2>
<button
className="admin-users-create-form-close"
onClick={() => {
setResetPasswordUsername(null);
setNewPassword('');
}}
>
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<line x1="18" y1="6" x2="6" y2="18"/>
<line x1="6" y1="6" x2="18" y2="18"/>
</svg>
</button>
</div>
<p className="admin-users-reset-password-info">
Set a new password for <strong>{resetPasswordUsername}</strong>
</p>
<form onSubmit={handleResetPassword} className="admin-users-reset-password-form">
<div className="admin-users-form-group">
<label htmlFor="new-password">New Password</label>
<input
id="new-password"
type="password"
value={newPassword}
onChange={(e) => setNewPassword(e.target.value)}
placeholder="Enter new password"
autoFocus
disabled={isResetting}
/>
</div>
<div className="admin-users-form-actions">
<button
type="button"
className="admin-users-cancel-button"
onClick={() => {
setResetPasswordUsername(null);
setNewPassword('');
}}
disabled={isResetting}
>
Cancel
</button>
<button
type="submit"
className="admin-users-submit-button"
disabled={isResetting || !newPassword.trim()}
>
{isResetting ? (
<>
<span className="admin-users-button-spinner"></span>
Resetting...
</>
) : (
'Reset Password'
)}
</button>
</div>
</form>
</div>
)}
<div className="admin-users-list-container">
{loading ? (
<div className="admin-users-list-loading">
<div className="admin-users-spinner"></div>
<span>Loading users...</span>
</div>
) : users.length === 0 ? (
<div className="admin-users-empty">
<div className="admin-users-empty-icon">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.5">
<path d="M17 21v-2a4 4 0 0 0-4-4H5a4 4 0 0 0-4 4v2"/>
<circle cx="9" cy="7" r="4"/>
<path d="M23 21v-2a4 4 0 0 0-3-3.87"/>
<path d="M16 3.13a4 4 0 0 1 0 7.75"/>
</svg>
</div>
<h3>No Users</h3>
<p>Create a user to get started</p>
</div>
) : (
<div className="admin-users-list">
<div className="admin-users-list-header">
<span className="admin-users-col-user">User</span>
<span className="admin-users-col-status">Status</span>
<span className="admin-users-col-created">Created</span>
<span className="admin-users-col-login">Last Login</span>
<span className="admin-users-col-actions">Actions</span>
</div>
{users.map((u) => (
<div key={u.id} className={`admin-users-list-item ${!u.is_active ? 'admin-users-inactive' : ''}`}>
<div className="admin-users-col-user">
<div className="admin-users-item-avatar">
{u.username.charAt(0).toUpperCase()}
</div>
<div className="admin-users-item-info">
<div className="admin-users-item-username">
{u.username}
{u.is_admin && <span className="admin-users-admin-badge">Admin</span>}
</div>
{u.email && (
<div className="admin-users-item-email">{u.email}</div>
)}
</div>
</div>
<div className="admin-users-col-status">
<span className={`admin-users-status-badge ${u.is_active ? 'active' : 'inactive'}`}>
{u.is_active ? 'Active' : 'Disabled'}
</span>
</div>
<div className="admin-users-col-created">
{formatDate(u.created_at)}
</div>
<div className="admin-users-col-login">
{formatDate(u.last_login)}
</div>
<div className="admin-users-col-actions">
<div className="admin-users-actions-menu">
<button
className="admin-users-action-button"
onClick={() => handleToggleAdmin(u)}
disabled={togglingUser === u.username || u.username === user.username}
title={u.is_admin ? 'Remove admin' : 'Make admin'}
>
{togglingUser === u.username ? (
<span className="admin-users-action-spinner"></span>
) : (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/>
</svg>
)}
{u.is_admin ? 'Revoke' : 'Admin'}
</button>
<button
className="admin-users-action-button"
onClick={() => handleToggleActive(u)}
disabled={togglingUser === u.username || u.username === user.username}
title={u.is_active ? 'Disable user' : 'Enable user'}
>
{togglingUser === u.username ? (
<span className="admin-users-action-spinner"></span>
) : u.is_active ? (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="4.93" y1="4.93" x2="19.07" y2="19.07"/>
</svg>
) : (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/>
<polyline points="22 4 12 14.01 9 11.01"/>
</svg>
)}
{u.is_active ? 'Disable' : 'Enable'}
</button>
<button
className="admin-users-action-button"
onClick={() => setResetPasswordUsername(u.username)}
disabled={togglingUser === u.username}
title="Reset password"
>
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<rect x="3" y="11" width="18" height="11" rx="2" ry="2"/>
<path d="M7 11V7a5 5 0 0 1 10 0v4"/>
</svg>
Reset
</button>
</div>
</div>
</div>
))}
</div>
)}
</div>
</div>
);
}
export default AdminUsersPage;

View File

@@ -0,0 +1,156 @@
import { useState } from 'react';
import { useNavigate } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { changePassword } from '../api';
import './LoginPage.css';
function ChangePasswordPage() {
const [currentPassword, setCurrentPassword] = useState('');
const [newPassword, setNewPassword] = useState('');
const [confirmPassword, setConfirmPassword] = useState('');
const [isSubmitting, setIsSubmitting] = useState(false);
const [error, setError] = useState<string | null>(null);
const { user, refreshUser } = useAuth();
const navigate = useNavigate();
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
if (!currentPassword || !newPassword || !confirmPassword) {
setError('Please fill in all fields');
return;
}
if (newPassword !== confirmPassword) {
setError('New passwords do not match');
return;
}
if (newPassword.length < 8) {
setError('New password must be at least 8 characters');
return;
}
if (newPassword === currentPassword) {
setError('New password must be different from current password');
return;
}
setIsSubmitting(true);
setError(null);
try {
await changePassword(currentPassword, newPassword);
// Refresh user to clear must_change_password flag
await refreshUser();
navigate('/', { replace: true });
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to change password');
} finally {
setIsSubmitting(false);
}
}
return (
<div className="login-page">
<div className="login-container">
<div className="login-card">
<div className="login-header">
<div className="login-logo">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
</svg>
</div>
<h1>Change Password</h1>
{user?.must_change_password && (
<p className="login-subtitle login-warning">
You must change your password before continuing
</p>
)}
</div>
{error && (
<div className="login-error">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="12" y1="8" x2="12" y2="12"/>
<line x1="12" y1="16" x2="12.01" y2="16"/>
</svg>
<span>{error}</span>
</div>
)}
<form onSubmit={handleSubmit} className="login-form">
<div className="login-form-group">
<label htmlFor="currentPassword">Current Password</label>
<input
id="currentPassword"
type="password"
value={currentPassword}
onChange={(e) => setCurrentPassword(e.target.value)}
placeholder="Enter current password"
autoComplete="current-password"
autoFocus
disabled={isSubmitting}
/>
</div>
<div className="login-form-group">
<label htmlFor="newPassword">New Password</label>
<input
id="newPassword"
type="password"
value={newPassword}
onChange={(e) => setNewPassword(e.target.value)}
placeholder="Enter new password (min 8 characters)"
autoComplete="new-password"
disabled={isSubmitting}
/>
</div>
<div className="login-form-group">
<label htmlFor="confirmPassword">Confirm New Password</label>
<input
id="confirmPassword"
type="password"
value={confirmPassword}
onChange={(e) => setConfirmPassword(e.target.value)}
placeholder="Confirm new password"
autoComplete="new-password"
disabled={isSubmitting}
/>
</div>
<button
type="submit"
className="login-submit"
disabled={isSubmitting}
>
{isSubmitting ? (
<>
<span className="login-spinner"></span>
Changing password...
</>
) : (
'Change Password'
)}
</button>
</form>
</div>
<div className="login-footer">
<p>Artifact storage and management system</p>
</div>
</div>
</div>
);
}
export default ChangePasswordPage;

View File

@@ -474,3 +474,16 @@
margin-top: 4px;
font-size: 0.9375rem;
}
/* Lock icon for private projects */
.lock-icon {
color: var(--warning);
flex-shrink: 0;
}
/* Project badges container */
.project-badges {
display: flex;
gap: 6px;
flex-wrap: wrap;
}

View File

@@ -7,8 +7,19 @@ import { SortDropdown, SortOption } from '../components/SortDropdown';
import { FilterDropdown, FilterOption } from '../components/FilterDropdown';
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
import { Pagination } from '../components/Pagination';
import { useAuth } from '../contexts/AuthContext';
import './Home.css';
// Lock icon SVG component
function LockIcon() {
return (
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className="lock-icon">
<rect x="3" y="11" width="18" height="11" rx="2" ry="2" />
<path d="M7 11V7a5 5 0 0 1 10 0v4" />
</svg>
);
}
const SORT_OPTIONS: SortOption[] = [
{ value: 'name', label: 'Name' },
{ value: 'created_at', label: 'Created' },
@@ -23,6 +34,7 @@ const VISIBILITY_OPTIONS: FilterOption[] = [
function Home() {
const [searchParams, setSearchParams] = useSearchParams();
const { user } = useAuth();
const [projectsData, setProjectsData] = useState<PaginatedResponse<Project> | null>(null);
const [loading, setLoading] = useState(true);
@@ -117,9 +129,15 @@ function Home() {
<div className="home">
<div className="page-header">
<h1>Projects</h1>
{user ? (
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
{showForm ? 'Cancel' : '+ New Project'}
</button>
) : (
<Link to="/login" className="btn btn-secondary">
Login to create projects
</Link>
)}
</div>
{error && <div className="error-message">{error}</div>}
@@ -199,12 +217,32 @@ function Home() {
<div className="project-grid">
{projects.map((project) => (
<Link to={`/project/${project.name}`} key={project.id} className="project-card card">
<h3>{project.name}</h3>
<h3>
{!project.is_public && <LockIcon />}
{project.name}
</h3>
{project.description && <p>{project.description}</p>}
<div className="project-meta">
<div className="project-badges">
<Badge variant={project.is_public ? 'public' : 'private'}>
{project.is_public ? 'Public' : 'Private'}
</Badge>
{user && project.access_level && (
<Badge
variant={
project.is_owner
? 'success'
: project.access_level === 'admin'
? 'success'
: project.access_level === 'write'
? 'info'
: 'default'
}
>
{project.is_owner ? 'Owner' : project.access_level.charAt(0).toUpperCase() + project.access_level.slice(1)}
</Badge>
)}
</div>
<div className="project-meta__dates">
<span className="date">Created {new Date(project.created_at).toLocaleDateString()}</span>
{project.updated_at !== project.created_at && (

View File

@@ -0,0 +1,292 @@
/* Login Page - Full viewport centered layout */
.login-page {
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: var(--bg-primary);
padding: 24px;
position: relative;
overflow: hidden;
}
/* Subtle background pattern */
.login-page::before {
content: '';
position: absolute;
inset: 0;
background:
radial-gradient(circle at 20% 50%, rgba(16, 185, 129, 0.08) 0%, transparent 50%),
radial-gradient(circle at 80% 50%, rgba(16, 185, 129, 0.05) 0%, transparent 50%);
pointer-events: none;
}
.login-container {
width: 100%;
max-width: 400px;
position: relative;
z-index: 1;
}
/* Card styling */
.login-card {
background: var(--bg-secondary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-xl);
padding: 40px;
box-shadow: var(--shadow-lg);
}
/* Header section */
.login-header {
text-align: center;
margin-bottom: 32px;
}
.login-logo {
display: inline-flex;
align-items: center;
justify-content: center;
width: 80px;
height: 80px;
background: var(--accent-gradient);
border-radius: var(--radius-lg);
color: white;
margin-bottom: 24px;
box-shadow: var(--shadow-glow);
}
.login-header h1 {
font-size: 1.5rem;
font-weight: 600;
color: var(--text-primary);
margin-bottom: 8px;
letter-spacing: -0.02em;
}
.login-subtitle {
color: var(--text-tertiary);
font-size: 0.875rem;
}
.login-subtitle.login-warning {
color: var(--warning);
font-weight: 500;
}
/* Error message */
.login-error {
display: flex;
align-items: center;
gap: 10px;
background: var(--error-bg);
border: 1px solid rgba(239, 68, 68, 0.2);
color: var(--error);
padding: 12px 16px;
border-radius: var(--radius-md);
margin-bottom: 24px;
font-size: 0.875rem;
}
.login-error svg {
flex-shrink: 0;
}
/* Form styling */
.login-form {
display: flex;
flex-direction: column;
gap: 20px;
}
.login-form-group {
display: flex;
flex-direction: column;
gap: 8px;
}
.login-form-group label {
font-size: 0.875rem;
font-weight: 500;
color: var(--text-secondary);
}
.login-form-group input {
width: 100%;
padding: 14px 16px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.9375rem;
color: var(--text-primary);
transition: all var(--transition-fast);
}
.login-form-group input::placeholder {
color: var(--text-muted);
}
.login-form-group input:hover:not(:disabled) {
border-color: var(--border-secondary);
background: var(--bg-elevated);
}
.login-form-group input:focus {
outline: none;
border-color: var(--accent-primary);
box-shadow: 0 0 0 3px rgba(16, 185, 129, 0.15);
background: var(--bg-elevated);
}
.login-form-group input:disabled {
opacity: 0.6;
cursor: not-allowed;
}
/* Submit button */
.login-submit {
display: flex;
align-items: center;
justify-content: center;
gap: 10px;
width: 100%;
padding: 14px 20px;
background: var(--accent-gradient);
border: none;
border-radius: var(--radius-md);
font-size: 0.9375rem;
font-weight: 500;
color: white;
cursor: pointer;
transition: all var(--transition-fast);
margin-top: 8px;
box-shadow: var(--shadow-sm), 0 0 20px rgba(16, 185, 129, 0.2);
}
.login-submit:hover:not(:disabled) {
transform: translateY(-1px);
box-shadow: var(--shadow-md), 0 0 30px rgba(16, 185, 129, 0.3);
}
.login-submit:active:not(:disabled) {
transform: translateY(0);
}
.login-submit:disabled {
opacity: 0.7;
cursor: not-allowed;
transform: none;
}
/* Loading spinner */
.login-spinner {
width: 16px;
height: 16px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-top-color: white;
border-radius: 50%;
animation: spin 0.6s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
/* Loading state */
.login-loading {
text-align: center;
padding: 64px 32px;
color: var(--text-tertiary);
font-size: 0.9375rem;
}
/* Footer */
.login-footer {
text-align: center;
margin-top: 24px;
padding-top: 24px;
}
.login-footer p {
color: var(--text-muted);
font-size: 0.8125rem;
}
/* SSO Divider */
.login-divider {
display: flex;
align-items: center;
gap: 16px;
margin: 24px 0;
}
.login-divider::before,
.login-divider::after {
content: '';
flex: 1;
height: 1px;
background: var(--border-primary);
}
.login-divider span {
font-size: 0.8125rem;
color: var(--text-muted);
text-transform: lowercase;
}
/* SSO Button */
.login-sso-button {
display: flex;
align-items: center;
justify-content: center;
gap: 10px;
width: 100%;
padding: 14px 20px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-size: 0.9375rem;
font-weight: 500;
color: var(--text-primary);
text-decoration: none;
cursor: pointer;
transition: all var(--transition-fast);
}
.login-sso-button:hover {
background: var(--bg-hover);
border-color: var(--border-secondary);
transform: translateY(-1px);
box-shadow: var(--shadow-sm);
}
.login-sso-button:active {
transform: translateY(0);
}
.login-sso-button svg {
color: var(--accent-primary);
}
/* Responsive adjustments */
@media (max-width: 480px) {
.login-card {
padding: 32px 24px;
}
.login-logo {
width: 64px;
height: 64px;
}
.login-logo svg {
width: 36px;
height: 36px;
}
.login-header h1 {
font-size: 1.25rem;
}
}

View File

@@ -0,0 +1,186 @@
import { useState, useEffect } from 'react';
import { useNavigate, useLocation, useSearchParams } from 'react-router-dom';
import { useAuth } from '../contexts/AuthContext';
import { getOIDCStatus, getOIDCLoginUrl } from '../api';
import { OIDCStatus } from '../types';
import './LoginPage.css';
function LoginPage() {
const [username, setUsername] = useState('');
const [password, setPassword] = useState('');
const [isSubmitting, setIsSubmitting] = useState(false);
const [error, setError] = useState<string | null>(null);
const [oidcStatus, setOidcStatus] = useState<OIDCStatus | null>(null);
const [searchParams] = useSearchParams();
const { user, login, loading: authLoading, refreshUser } = useAuth();
const navigate = useNavigate();
const location = useLocation();
// Get the return URL from location state, default to home
const from = (location.state as { from?: string })?.from || '/';
// Load OIDC status on mount
useEffect(() => {
getOIDCStatus()
.then(setOidcStatus)
.catch(() => setOidcStatus({ enabled: false }));
}, []);
// Handle SSO callback - check for oidc_success or oidc_error params
useEffect(() => {
const oidcSuccess = searchParams.get('oidc_success');
const oidcError = searchParams.get('oidc_error');
if (oidcSuccess === 'true') {
refreshUser().then(() => {
navigate(from, { replace: true });
});
} else if (oidcError) {
setError(decodeURIComponent(oidcError));
}
}, [searchParams, refreshUser, navigate, from]);
// Redirect if already logged in
useEffect(() => {
if (user && !authLoading) {
navigate(from, { replace: true });
}
}, [user, authLoading, navigate, from]);
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
if (!username.trim() || !password) {
setError('Please enter both username and password');
return;
}
setIsSubmitting(true);
setError(null);
try {
await login(username, password);
navigate(from, { replace: true });
} catch (err) {
setError(err instanceof Error ? err.message : 'Login failed. Please try again.');
} finally {
setIsSubmitting(false);
}
}
// Show loading while checking auth state
if (authLoading) {
return (
<div className="login-page">
<div className="login-container">
<div className="login-loading">Checking session...</div>
</div>
</div>
);
}
return (
<div className="login-page">
<div className="login-container">
<div className="login-card">
<div className="login-header">
<div className="login-logo">
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6 14 Q6 8 3 8 Q6 4 6 4 Q6 4 9 8 Q6 8 6 14" fill="currentColor" opacity="0.6"/>
<rect x="5.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
<path d="M12 12 Q12 5 8 5 Q12 1 12 1 Q12 1 16 5 Q12 5 12 12" fill="currentColor"/>
<rect x="11.25" y="11" width="1.5" height="5" fill="currentColor"/>
<path d="M18 14 Q18 8 15 8 Q18 4 18 4 Q18 4 21 8 Q18 8 18 14" fill="currentColor" opacity="0.6"/>
<rect x="17.25" y="13" width="1.5" height="4" fill="currentColor" opacity="0.6"/>
<ellipse cx="12" cy="19" rx="9" ry="1.5" fill="currentColor" opacity="0.3"/>
</svg>
</div>
<h1>Sign in to Orchard</h1>
<p className="login-subtitle">Content-Addressable Storage</p>
</div>
{error && (
<div className="login-error">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<circle cx="12" cy="12" r="10"/>
<line x1="12" y1="8" x2="12" y2="12"/>
<line x1="12" y1="16" x2="12.01" y2="16"/>
</svg>
<span>{error}</span>
</div>
)}
<form onSubmit={handleSubmit} className="login-form">
<div className="login-form-group">
<label htmlFor="username">Username</label>
<input
id="username"
type="text"
value={username}
onChange={(e) => setUsername(e.target.value)}
placeholder="Enter your username"
autoComplete="username"
autoFocus
disabled={isSubmitting}
/>
</div>
<div className="login-form-group">
<label htmlFor="password">Password</label>
<input
id="password"
type="password"
value={password}
onChange={(e) => setPassword(e.target.value)}
placeholder="Enter your password"
autoComplete="current-password"
disabled={isSubmitting}
/>
</div>
<button
type="submit"
className="login-submit"
disabled={isSubmitting}
>
{isSubmitting ? (
<>
<span className="login-spinner"></span>
Signing in...
</>
) : (
'Sign in'
)}
</button>
</form>
{oidcStatus?.enabled && (
<>
<div className="login-divider">
<span>or</span>
</div>
<a
href={getOIDCLoginUrl(from !== '/' ? from : undefined)}
className="login-sso-button"
>
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
<path d="M15 3h4a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2h-4"/>
<polyline points="10 17 15 12 10 7"/>
<line x1="15" y1="12" x2="3" y2="12"/>
</svg>
Sign in with SSO
</a>
</>
)}
</div>
<div className="login-footer">
<p>Artifact storage and management system</p>
</div>
</div>
</div>
);
}
export default LoginPage;

View File

@@ -127,6 +127,58 @@ h2 {
font-size: 0.75rem;
}
/* Download by Artifact ID Section */
.download-by-id-section {
margin-top: 32px;
background: var(--bg-secondary);
}
.download-by-id-section h3 {
margin-bottom: 12px;
color: var(--text-primary);
font-size: 1rem;
font-weight: 600;
}
.download-by-id-form {
display: flex;
gap: 12px;
align-items: center;
}
.artifact-id-input {
flex: 1;
padding: 10px 16px;
background: var(--bg-tertiary);
border: 1px solid var(--border-primary);
border-radius: var(--radius-md);
font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace;
font-size: 0.8125rem;
color: var(--text-primary);
}
.artifact-id-input::placeholder {
color: var(--text-muted);
}
.artifact-id-input:focus {
outline: none;
border-color: var(--accent-primary);
}
.btn-disabled {
opacity: 0.5;
cursor: not-allowed;
pointer-events: none;
}
.validation-hint {
margin-top: 8px;
margin-bottom: 0;
font-size: 0.75rem;
color: var(--warning-color, #f59e0b);
}
/* Usage Section */
.usage-section {
margin-top: 32px;

View File

@@ -1,7 +1,7 @@
import { useState, useEffect, useRef, useCallback } from 'react';
import { useParams, useSearchParams, useNavigate } from 'react-router-dom';
import { TagDetail, Package, PaginatedResponse } from '../types';
import { listTags, uploadArtifact, getDownloadUrl, getPackage } from '../api';
import { useState, useEffect, useCallback } from 'react';
import { useParams, useSearchParams, useNavigate, useLocation } from 'react-router-dom';
import { TagDetail, Package, PaginatedResponse, AccessLevel } from '../types';
import { listTags, getDownloadUrl, getPackage, getMyProjectAccess, UnauthorizedError, ForbiddenError } from '../api';
import { Breadcrumb } from '../components/Breadcrumb';
import { Badge } from '../components/Badge';
import { SearchInput } from '../components/SearchInput';
@@ -9,6 +9,8 @@ import { SortDropdown, SortOption } from '../components/SortDropdown';
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
import { DataTable } from '../components/DataTable';
import { Pagination } from '../components/Pagination';
import { DragDropUpload, UploadResult } from '../components/DragDropUpload';
import { useAuth } from '../contexts/AuthContext';
import './Home.css';
import './PackagePage.css';
@@ -55,16 +57,22 @@ function CopyButton({ text }: { text: string }) {
function PackagePage() {
const { projectName, packageName } = useParams<{ projectName: string; packageName: string }>();
const navigate = useNavigate();
const location = useLocation();
const [searchParams, setSearchParams] = useSearchParams();
const { user } = useAuth();
const [pkg, setPkg] = useState<Package | null>(null);
const [tagsData, setTagsData] = useState<PaginatedResponse<TagDetail> | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [uploading, setUploading] = useState(false);
const [uploadResult, setUploadResult] = useState<string | null>(null);
const [tag, setTag] = useState('');
const fileInputRef = useRef<HTMLInputElement>(null);
const [accessDenied, setAccessDenied] = useState(false);
const [uploadTag, setUploadTag] = useState('');
const [uploadSuccess, setUploadSuccess] = useState<string | null>(null);
const [artifactIdInput, setArtifactIdInput] = useState('');
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
// Derived permissions
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
// Get params from URL
const page = parseInt(searchParams.get('page') || '1', 10);
@@ -92,19 +100,32 @@ function PackagePage() {
try {
setLoading(true);
const [pkgData, tagsResult] = await Promise.all([
setAccessDenied(false);
const [pkgData, tagsResult, accessResult] = await Promise.all([
getPackage(projectName, packageName),
listTags(projectName, packageName, { page, search, sort, order }),
getMyProjectAccess(projectName),
]);
setPkg(pkgData);
setTagsData(tagsResult);
setAccessLevel(accessResult.access_level);
setError(null);
} catch (err) {
if (err instanceof UnauthorizedError) {
navigate('/login', { state: { from: location.pathname } });
return;
}
if (err instanceof ForbiddenError) {
setAccessDenied(true);
setError('You do not have access to this package');
setLoading(false);
return;
}
setError(err instanceof Error ? err.message : 'Failed to load data');
} finally {
setLoading(false);
}
}, [projectName, packageName, page, search, sort, order]);
}, [projectName, packageName, page, search, sort, order, navigate, location.pathname]);
useEffect(() => {
loadData();
@@ -122,30 +143,22 @@ function PackagePage() {
return () => window.removeEventListener('keydown', handleKeyDown);
}, [navigate, projectName]);
async function handleUpload(e: React.FormEvent) {
e.preventDefault();
const file = fileInputRef.current?.files?.[0];
if (!file) {
setError('Please select a file');
return;
}
try {
setUploading(true);
setError(null);
const result = await uploadArtifact(projectName!, packageName!, file, tag || undefined);
setUploadResult(`Uploaded successfully! Artifact ID: ${result.artifact_id}`);
setTag('');
if (fileInputRef.current) {
fileInputRef.current.value = '';
}
const handleUploadComplete = useCallback((results: UploadResult[]) => {
const count = results.length;
const message = count === 1
? `Uploaded successfully! Artifact ID: ${results[0].artifact_id}`
: `${count} files uploaded successfully!`;
setUploadSuccess(message);
setUploadTag('');
loadData();
} catch (err) {
setError(err instanceof Error ? err.message : 'Upload failed');
} finally {
setUploading(false);
}
}
// Auto-dismiss success message after 5 seconds
setTimeout(() => setUploadSuccess(null), 5000);
}, [loadData]);
const handleUploadError = useCallback((errorMsg: string) => {
setError(errorMsg);
}, []);
const handleSearchChange = (value: string) => {
updateParams({ search: value, page: '1' });
@@ -234,6 +247,28 @@ function PackagePage() {
return <div className="loading">Loading...</div>;
}
if (accessDenied) {
return (
<div className="home">
<Breadcrumb
items={[
{ label: 'Projects', href: '/' },
{ label: projectName!, href: `/project/${projectName}` },
]}
/>
<div className="error-message" style={{ textAlign: 'center', padding: '48px 24px' }}>
<h2>Access Denied</h2>
<p>You do not have permission to view this package.</p>
{!user && (
<p style={{ marginTop: '16px' }}>
<a href="/login" className="btn btn-primary">Sign in</a>
</p>
)}
</div>
</div>
);
}
return (
<div className="home">
<Breadcrumb
@@ -292,30 +327,43 @@ function PackagePage() {
</div>
{error && <div className="error-message">{error}</div>}
{uploadResult && <div className="success-message">{uploadResult}</div>}
{uploadSuccess && <div className="success-message">{uploadSuccess}</div>}
{user && (
<div className="upload-section card">
<h3>Upload Artifact</h3>
<form onSubmit={handleUpload} className="upload-form">
{canWrite ? (
<div className="upload-form">
<div className="form-group">
<label htmlFor="file">File</label>
<input id="file" type="file" ref={fileInputRef} required />
</div>
<div className="form-group">
<label htmlFor="tag">Tag (optional)</label>
<label htmlFor="upload-tag">Tag (optional)</label>
<input
id="tag"
id="upload-tag"
type="text"
value={tag}
onChange={(e) => setTag(e.target.value)}
value={uploadTag}
onChange={(e) => setUploadTag(e.target.value)}
placeholder="v1.0.0, latest, stable..."
/>
</div>
<button type="submit" className="btn btn-primary" disabled={uploading}>
{uploading ? 'Uploading...' : 'Upload'}
</button>
</form>
<DragDropUpload
projectName={projectName!}
packageName={packageName!}
tag={uploadTag || undefined}
onUploadComplete={handleUploadComplete}
onUploadError={handleUploadError}
/>
</div>
) : (
<DragDropUpload
projectName={projectName!}
packageName={packageName!}
disabled={true}
disabledReason="You have read-only access to this project and cannot upload artifacts."
onUploadComplete={handleUploadComplete}
onUploadError={handleUploadError}
/>
)}
</div>
)}
<div className="section-header">
<h2>Tags / Versions</h2>
@@ -367,6 +415,34 @@ function PackagePage() {
/>
)}
<div className="download-by-id-section card">
<h3>Download by Artifact ID</h3>
<div className="download-by-id-form">
<input
type="text"
value={artifactIdInput}
onChange={(e) => setArtifactIdInput(e.target.value.toLowerCase().replace(/[^a-f0-9]/g, '').slice(0, 64))}
placeholder="Enter SHA256 artifact ID (64 hex characters)"
className="artifact-id-input"
/>
<a
href={artifactIdInput.length === 64 ? getDownloadUrl(projectName!, packageName!, `artifact:${artifactIdInput}`) : '#'}
className={`btn btn-primary ${artifactIdInput.length !== 64 ? 'btn-disabled' : ''}`}
download
onClick={(e) => {
if (artifactIdInput.length !== 64) {
e.preventDefault();
}
}}
>
Download
</a>
</div>
{artifactIdInput.length > 0 && artifactIdInput.length !== 64 && (
<p className="validation-hint">Artifact ID must be exactly 64 hex characters ({artifactIdInput.length}/64)</p>
)}
</div>
<div className="usage-section card">
<h3>Usage</h3>
<p>Download artifacts using:</p>

View File

@@ -1,13 +1,15 @@
import { useState, useEffect, useCallback } from 'react';
import { useParams, Link, useSearchParams, useNavigate } from 'react-router-dom';
import { Project, Package, PaginatedResponse } from '../types';
import { getProject, listPackages, createPackage } from '../api';
import { useParams, Link, useSearchParams, useNavigate, useLocation } from 'react-router-dom';
import { Project, Package, PaginatedResponse, AccessLevel } from '../types';
import { getProject, listPackages, createPackage, getMyProjectAccess, UnauthorizedError, ForbiddenError } from '../api';
import { Breadcrumb } from '../components/Breadcrumb';
import { Badge } from '../components/Badge';
import { SearchInput } from '../components/SearchInput';
import { SortDropdown, SortOption } from '../components/SortDropdown';
import { FilterChip, FilterChipGroup } from '../components/FilterChip';
import { Pagination } from '../components/Pagination';
import { AccessManagement } from '../components/AccessManagement';
import { useAuth } from '../contexts/AuthContext';
import './Home.css';
const SORT_OPTIONS: SortOption[] = [
@@ -29,15 +31,24 @@ function formatBytes(bytes: number): string {
function ProjectPage() {
const { projectName } = useParams<{ projectName: string }>();
const navigate = useNavigate();
const location = useLocation();
const [searchParams, setSearchParams] = useSearchParams();
const { user } = useAuth();
const [project, setProject] = useState<Project | null>(null);
const [packagesData, setPackagesData] = useState<PaginatedResponse<Package> | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [accessDenied, setAccessDenied] = useState(false);
const [showForm, setShowForm] = useState(false);
const [newPackage, setNewPackage] = useState({ name: '', description: '', format: 'generic', platform: 'any' });
const [creating, setCreating] = useState(false);
const [accessLevel, setAccessLevel] = useState<AccessLevel | null>(null);
const [isOwner, setIsOwner] = useState(false);
// Derived permissions
const canWrite = accessLevel === 'write' || accessLevel === 'admin';
const canAdmin = accessLevel === 'admin';
// Get params from URL
const page = parseInt(searchParams.get('page') || '1', 10);
@@ -66,19 +77,33 @@ function ProjectPage() {
try {
setLoading(true);
const [projectData, packagesResult] = await Promise.all([
setAccessDenied(false);
const [projectData, packagesResult, accessResult] = await Promise.all([
getProject(projectName),
listPackages(projectName, { page, search, sort, order, format: format || undefined }),
getMyProjectAccess(projectName),
]);
setProject(projectData);
setPackagesData(packagesResult);
setAccessLevel(accessResult.access_level);
setIsOwner(accessResult.is_owner);
setError(null);
} catch (err) {
if (err instanceof UnauthorizedError) {
navigate('/login', { state: { from: location.pathname } });
return;
}
if (err instanceof ForbiddenError) {
setAccessDenied(true);
setError('You do not have access to this project');
setLoading(false);
return;
}
setError(err instanceof Error ? err.message : 'Failed to load data');
} finally {
setLoading(false);
}
}, [projectName, page, search, sort, order, format]);
}, [projectName, page, search, sort, order, format, navigate, location.pathname]);
useEffect(() => {
loadData();
@@ -139,6 +164,23 @@ function ProjectPage() {
return <div className="loading">Loading...</div>;
}
if (accessDenied) {
return (
<div className="home">
<Breadcrumb items={[{ label: 'Projects', href: '/' }]} />
<div className="error-message" style={{ textAlign: 'center', padding: '48px 24px' }}>
<h2>Access Denied</h2>
<p>You do not have permission to view this project.</p>
{!user && (
<p style={{ marginTop: '16px' }}>
<a href="/login" className="btn btn-primary">Sign in</a>
</p>
)}
</div>
</div>
);
}
if (!project) {
return <div className="error-message">Project not found</div>;
}
@@ -159,6 +201,11 @@ function ProjectPage() {
<Badge variant={project.is_public ? 'public' : 'private'}>
{project.is_public ? 'Public' : 'Private'}
</Badge>
{accessLevel && (
<Badge variant={accessLevel === 'admin' ? 'success' : accessLevel === 'write' ? 'info' : 'default'}>
{isOwner ? 'Owner' : accessLevel.charAt(0).toUpperCase() + accessLevel.slice(1)}
</Badge>
)}
</div>
{project.description && <p className="description">{project.description}</p>}
<div className="page-header__meta">
@@ -169,14 +216,20 @@ function ProjectPage() {
<span className="meta-item">by {project.created_by}</span>
</div>
</div>
{canWrite ? (
<button className="btn btn-primary" onClick={() => setShowForm(!showForm)}>
{showForm ? 'Cancel' : '+ New Package'}
</button>
) : user ? (
<span className="text-muted" title="You have read-only access to this project">
Read-only access
</span>
) : null}
</div>
{error && <div className="error-message">{error}</div>}
{showForm && (
{showForm && canWrite && (
<form className="form card" onSubmit={handleCreatePackage}>
<h3>Create New Package</h3>
<div className="form-row">
@@ -316,6 +369,10 @@ function ProjectPage() {
)}
</>
)}
{canAdmin && projectName && (
<AccessManagement projectName={projectName} />
)}
</div>
);
}

View File

@@ -0,0 +1,37 @@
import '@testing-library/jest-dom';
class MockDataTransfer implements DataTransfer {
dropEffect: DataTransfer['dropEffect'] = 'none';
effectAllowed: DataTransfer['effectAllowed'] = 'all';
files: FileList = Object.assign([], { item: (i: number) => this.files[i] || null });
items: DataTransferItemList = Object.assign([], {
add: () => null,
remove: () => {},
clear: () => {},
item: () => null,
}) as unknown as DataTransferItemList;
types: readonly string[] = [];
clearData(): void {}
getData(): string { return ''; }
setData(): void {}
setDragImage(): void {}
}
Object.defineProperty(globalThis, 'DataTransfer', {
value: MockDataTransfer,
});
Object.defineProperty(window, 'matchMedia', {
writable: true,
value: (query: string) => ({
matches: false,
media: query,
onchange: null,
addListener: () => {},
removeListener: () => {},
addEventListener: () => {},
removeEventListener: () => {},
dispatchEvent: () => false,
}),
});

View File

@@ -1,3 +1,6 @@
// Access Control types (moved to top for use in Project interface)
export type AccessLevel = 'read' | 'write' | 'admin';
export interface Project {
id: string;
name: string;
@@ -6,6 +9,9 @@ export interface Project {
created_at: string;
updated_at: string;
created_by: string;
// Access level info (populated when listing projects)
access_level?: AccessLevel | null;
is_owner?: boolean;
}
export interface TagSummary {
@@ -225,3 +231,127 @@ export interface CrossProjectStats {
bytes_saved_cross_project: number;
duplicates: CrossProjectDuplicate[];
}
// Auth types
export interface User {
id: string;
username: string;
display_name: string | null;
is_admin: boolean;
must_change_password?: boolean;
}
export interface LoginCredentials {
username: string;
password: string;
}
// API Key types
export interface APIKey {
id: string;
name: string;
description: string | null;
scopes: string[];
created_at: string;
expires_at: string | null;
last_used: string | null;
}
export interface APIKeyCreate {
name: string;
description?: string;
}
export interface APIKeyCreateResponse {
id: string;
name: string;
description: string | null;
scopes: string[];
key: string;
created_at: string;
expires_at: string | null;
}
// Admin User Management types
export interface AdminUser {
id: string;
username: string;
email: string | null;
display_name: string | null;
is_admin: boolean;
is_active: boolean;
created_at: string;
last_login: string | null;
}
export interface UserCreate {
username: string;
password: string;
email?: string;
is_admin?: boolean;
}
export interface UserUpdate {
email?: string;
is_admin?: boolean;
is_active?: boolean;
}
// Access Permission types
export interface AccessPermission {
id: string;
project_id: string;
user_id: string;
level: AccessLevel;
created_at: string;
expires_at: string | null;
}
export interface AccessPermissionCreate {
username: string;
level: AccessLevel;
expires_at?: string;
}
export interface AccessPermissionUpdate {
level?: AccessLevel;
expires_at?: string | null;
}
// Extended Project with user's access level
export interface ProjectWithAccess extends Project {
user_access_level?: AccessLevel;
}
// Current user with permissions context
export interface CurrentUser extends User {
permissions?: {
[projectId: string]: AccessLevel;
};
}
// OIDC types
export interface OIDCConfig {
enabled: boolean;
issuer_url: string;
client_id: string;
has_client_secret: boolean;
scopes: string[];
auto_create_users: boolean;
admin_group: string;
}
export interface OIDCConfigUpdate {
enabled?: boolean;
issuer_url?: string;
client_id?: string;
client_secret?: string;
scopes?: string[];
auto_create_users?: boolean;
admin_group?: string;
}
export interface OIDCStatus {
enabled: boolean;
issuer_url?: string;
}

View File

@@ -1,3 +1,4 @@
/// <reference types="vitest" />
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
@@ -9,5 +10,11 @@ export default defineConfig({
'/health': 'http://localhost:8080',
'/project': 'http://localhost:8080',
}
},
test: {
globals: true,
environment: 'jsdom',
setupFiles: './src/test/setup.ts',
css: true,
}
})

View File

@@ -0,0 +1,165 @@
# Values for feature branch deployments (ephemeral dev environments)
# Hostnames are overridden by CI pipeline via --set flags
replicaCount: 1
image:
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
pullPolicy: Always
tag: "latest" # Overridden by CI
imagePullSecrets:
- name: orchard-pull-secret
initContainer:
image:
repository: containers.global.bsf.tools/busybox
tag: "1.36"
pullPolicy: IfNotPresent
serviceAccount:
create: true
automount: true
annotations: {}
name: "" # Auto-generated based on release name
podAnnotations: {}
podLabels: {}
podSecurityContext: {}
securityContext:
readOnlyRootFilesystem: false
runAsNonRoot: true
runAsUser: 1000
service:
type: ClusterIP
port: 8080
# Ingress - hostnames overridden by CI pipeline
ingress:
enabled: true
className: "nginx"
annotations:
cert-manager.io/cluster-issuer: "letsencrypt"
hosts:
- host: orchard-dev.common.global.bsf.tools # Overridden by CI
paths:
- path: /
pathType: Prefix
tls:
- secretName: orchard-tls # Overridden by CI
hosts:
- orchard-dev.common.global.bsf.tools # Overridden by CI
# Lighter resources for ephemeral environments
resources:
limits:
cpu: 250m
memory: 256Mi
requests:
cpu: 100m
memory: 128Mi
livenessProbe:
httpGet:
path: /health
port: http
initialDelaySeconds: 10
periodSeconds: 10
readinessProbe:
httpGet:
path: /health
port: http
initialDelaySeconds: 5
periodSeconds: 5
autoscaling:
enabled: false
nodeSelector: {}
tolerations: []
affinity: {}
orchard:
server:
host: "0.0.0.0"
port: 8080
database:
host: ""
port: 5432
user: orchard
password: ""
dbname: orchard
sslmode: disable
existingSecret: ""
existingSecretPasswordKey: "password"
s3:
endpoint: ""
region: us-east-1
bucket: orchard-artifacts
accessKeyId: ""
secretAccessKey: ""
usePathStyle: true
existingSecret: ""
existingSecretAccessKeyKey: "access-key-id"
existingSecretSecretKeyKey: "secret-access-key"
download:
mode: "presigned"
presignedUrlExpiry: 3600
# PostgreSQL - ephemeral, no persistence
postgresql:
enabled: true
image:
registry: containers.global.bsf.tools
repository: bitnami/postgresql
tag: "15"
pullPolicy: IfNotPresent
auth:
username: orchard
password: orchard-password
database: orchard
primary:
persistence:
enabled: false
# MinIO - ephemeral, no persistence
minio:
enabled: true
image:
registry: containers.global.bsf.tools
repository: bitnami/minio
tag: "latest"
pullPolicy: IfNotPresent
auth:
rootUser: minioadmin
rootPassword: minioadmin
defaultBuckets: "orchard-artifacts"
persistence:
enabled: false
# MinIO ingress - hostname overridden by CI
minioIngress:
enabled: true
className: "nginx"
annotations:
cert-manager.io/cluster-issuer: "letsencrypt"
nginx.ingress.kubernetes.io/proxy-body-size: "0"
host: "minio-dev.common.global.bsf.tools" # Overridden by CI
tls:
enabled: true
secretName: minio-tls # Overridden by CI
redis:
enabled: false
waitForDatabase: true
global:
security:
allowInsecureImages: true

View File

@@ -0,0 +1,190 @@
# Default values for orchard
replicaCount: 1
image:
repository: registry.global.bsf.tools/esv/bsf/bsf-integration/orchard/orchard-mvp
pullPolicy: Always
tag: "latest" # Defaults to chart appVersion
imagePullSecrets:
- name: orchard-pull-secret
# Init container image (used for wait-for-db, wait-for-minio)
initContainer:
image:
repository: containers.global.bsf.tools/busybox
tag: "1.36"
pullPolicy: IfNotPresent
serviceAccount:
create: true
automount: true
annotations: {}
name: "orchard"
podAnnotations: {}
podLabels: {}
podSecurityContext: {}
securityContext:
readOnlyRootFilesystem: false # Python needs to write __pycache__
runAsNonRoot: true
runAsUser: 1000
service:
type: ClusterIP
port: 8080
ingress:
enabled: true
className: "nginx"
annotations:
cert-manager.io/cluster-issuer: "letsencrypt"
hosts:
- host: orchard-stage.common.global.bsf.tools
paths:
- path: /
pathType: Prefix
tls:
- secretName: orchard-tls
hosts:
- orchard-stage.common.global.bsf.tools
resources:
limits:
cpu: 500m
memory: 512Mi
requests:
cpu: 500m
memory: 512Mi
livenessProbe:
httpGet:
path: /health
port: http
initialDelaySeconds: 10
periodSeconds: 10
readinessProbe:
httpGet:
path: /health
port: http
initialDelaySeconds: 5
periodSeconds: 5
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 10
targetCPUUtilizationPercentage: 80
targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}
# Orchard server configuration
orchard:
server:
host: "0.0.0.0"
port: 8080
# Database configuration (used when postgresql.enabled is false)
database:
host: ""
port: 5432
user: orchard
password: ""
dbname: orchard
sslmode: disable
existingSecret: ""
existingSecretPasswordKey: "password"
# S3 configuration (used when minio.enabled is false)
s3:
endpoint: ""
region: us-east-1
bucket: orchard-artifacts
accessKeyId: ""
secretAccessKey: ""
usePathStyle: true
existingSecret: ""
existingSecretAccessKeyKey: "access-key-id"
existingSecretSecretKeyKey: "secret-access-key"
# Download configuration
download:
mode: "presigned" # presigned, redirect, or proxy
presignedUrlExpiry: 3600 # Presigned URL expiry in seconds
# PostgreSQL subchart configuration
postgresql:
enabled: true
image:
registry: containers.global.bsf.tools
repository: bitnami/postgresql
tag: "15"
pullPolicy: IfNotPresent
auth:
username: orchard
password: orchard-password
database: orchard
primary:
persistence:
enabled: false
size: 10Gi
# MinIO subchart configuration
minio:
enabled: true
image:
registry: containers.global.bsf.tools
repository: bitnami/minio
tag: "latest"
pullPolicy: IfNotPresent
auth:
rootUser: minioadmin
rootPassword: minioadmin
defaultBuckets: "orchard-artifacts"
persistence:
enabled: false
size: 50Gi
# MinIO external ingress for presigned URL access (separate from subchart ingress)
minioIngress:
enabled: true
className: "nginx"
annotations:
cert-manager.io/cluster-issuer: "letsencrypt"
nginx.ingress.kubernetes.io/proxy-body-size: "0" # Disable body size limit for uploads
host: "minio-orch-stage.common.global.bsf.tools"
tls:
enabled: true
secretName: minio-tls
# Redis subchart configuration (for future caching)
redis:
enabled: false
image:
registry: containers.global.bsf.tools
repository: bitnami/redis
tag: "7.2"
pullPolicy: IfNotPresent
auth:
enabled: true
password: redis-password
architecture: standalone
master:
persistence:
enabled: true
size: 1Gi
# Wait for database before starting (SQLAlchemy creates tables on startup)
waitForDatabase: true
global:
security:
allowInsecureImages: true

View File

@@ -0,0 +1,98 @@
-- Migration 004: Project and Package History Tables
-- Adds history tracking tables for project and package metadata changes
-- ============================================
-- Project History Table
-- ============================================
CREATE TABLE IF NOT EXISTS project_history (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
field_name VARCHAR(100) NOT NULL,
old_value TEXT,
new_value TEXT,
changed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
changed_by VARCHAR(255) NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_project_history_project_id ON project_history(project_id);
CREATE INDEX IF NOT EXISTS idx_project_history_changed_at ON project_history(changed_at);
CREATE INDEX IF NOT EXISTS idx_project_history_project_changed_at ON project_history(project_id, changed_at);
-- ============================================
-- Package History Table
-- ============================================
CREATE TABLE IF NOT EXISTS package_history (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
package_id UUID NOT NULL REFERENCES packages(id) ON DELETE CASCADE,
field_name VARCHAR(100) NOT NULL,
old_value TEXT,
new_value TEXT,
changed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
changed_by VARCHAR(255) NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_package_history_package_id ON package_history(package_id);
CREATE INDEX IF NOT EXISTS idx_package_history_changed_at ON package_history(changed_at);
CREATE INDEX IF NOT EXISTS idx_package_history_package_changed_at ON package_history(package_id, changed_at);
-- ============================================
-- Project Update Trigger
-- ============================================
CREATE OR REPLACE FUNCTION log_project_changes()
RETURNS TRIGGER AS $$
BEGIN
-- Log description change
IF OLD.description IS DISTINCT FROM NEW.description THEN
INSERT INTO project_history (project_id, field_name, old_value, new_value, changed_by)
VALUES (NEW.id, 'description', OLD.description, NEW.description, COALESCE(current_setting('app.current_user', true), 'system'));
END IF;
-- Log is_public change
IF OLD.is_public IS DISTINCT FROM NEW.is_public THEN
INSERT INTO project_history (project_id, field_name, old_value, new_value, changed_by)
VALUES (NEW.id, 'is_public', OLD.is_public::text, NEW.is_public::text, COALESCE(current_setting('app.current_user', true), 'system'));
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS project_changes_trigger ON projects;
CREATE TRIGGER project_changes_trigger
AFTER UPDATE ON projects
FOR EACH ROW
EXECUTE FUNCTION log_project_changes();
-- ============================================
-- Package Update Trigger
-- ============================================
CREATE OR REPLACE FUNCTION log_package_changes()
RETURNS TRIGGER AS $$
BEGIN
-- Log description change
IF OLD.description IS DISTINCT FROM NEW.description THEN
INSERT INTO package_history (package_id, field_name, old_value, new_value, changed_by)
VALUES (NEW.id, 'description', OLD.description, NEW.description, COALESCE(current_setting('app.current_user', true), 'system'));
END IF;
-- Log format change
IF OLD.format IS DISTINCT FROM NEW.format THEN
INSERT INTO package_history (package_id, field_name, old_value, new_value, changed_by)
VALUES (NEW.id, 'format', OLD.format, NEW.format, COALESCE(current_setting('app.current_user', true), 'system'));
END IF;
-- Log platform change
IF OLD.platform IS DISTINCT FROM NEW.platform THEN
INSERT INTO package_history (package_id, field_name, old_value, new_value, changed_by)
VALUES (NEW.id, 'platform', OLD.platform, NEW.platform, COALESCE(current_setting('app.current_user', true), 'system'));
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS package_changes_trigger ON packages;
CREATE TRIGGER package_changes_trigger
AFTER UPDATE ON packages
FOR EACH ROW
EXECUTE FUNCTION log_package_changes();

View File

@@ -0,0 +1,83 @@
-- Migration 005: Upload Workflow Enhancements
-- Adds status tracking and error handling for uploads
-- ============================================
-- Add status column to uploads table
-- ============================================
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
WHERE table_name = 'uploads' AND column_name = 'status') THEN
ALTER TABLE uploads ADD COLUMN status VARCHAR(20) DEFAULT 'completed' NOT NULL;
END IF;
END $$;
-- ============================================
-- Add error_message column for failed uploads
-- ============================================
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
WHERE table_name = 'uploads' AND column_name = 'error_message') THEN
ALTER TABLE uploads ADD COLUMN error_message TEXT;
END IF;
END $$;
-- ============================================
-- Add client_checksum column for verification
-- ============================================
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
WHERE table_name = 'uploads' AND column_name = 'client_checksum') THEN
ALTER TABLE uploads ADD COLUMN client_checksum VARCHAR(64);
END IF;
END $$;
-- ============================================
-- Add indexes for upload status queries
-- ============================================
CREATE INDEX IF NOT EXISTS idx_uploads_status ON uploads(status);
CREATE INDEX IF NOT EXISTS idx_uploads_status_uploaded_at ON uploads(status, uploaded_at);
-- ============================================
-- Add constraint to validate status values
-- ============================================
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.constraint_column_usage
WHERE constraint_name = 'check_upload_status') THEN
ALTER TABLE uploads ADD CONSTRAINT check_upload_status
CHECK (status IN ('pending', 'completed', 'failed'));
END IF;
END $$;
-- ============================================
-- Create table for tracking in-progress uploads (for 409 conflict detection)
-- ============================================
CREATE TABLE IF NOT EXISTS upload_locks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
sha256_hash VARCHAR(64) NOT NULL,
package_id UUID NOT NULL REFERENCES packages(id) ON DELETE CASCADE,
locked_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
locked_by VARCHAR(255) NOT NULL,
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
UNIQUE(sha256_hash, package_id)
);
CREATE INDEX IF NOT EXISTS idx_upload_locks_expires_at ON upload_locks(expires_at);
CREATE INDEX IF NOT EXISTS idx_upload_locks_hash_package ON upload_locks(sha256_hash, package_id);
-- ============================================
-- Function to clean up expired upload locks
-- ============================================
CREATE OR REPLACE FUNCTION cleanup_expired_upload_locks()
RETURNS INTEGER AS $$
DECLARE
deleted_count INTEGER;
BEGIN
DELETE FROM upload_locks WHERE expires_at < NOW();
GET DIAGNOSTICS deleted_count = ROW_COUNT;
RETURN deleted_count;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1,86 @@
-- Authentication Tables Migration
-- Adds users table and updates api_keys with foreign key
-- Users table
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
username VARCHAR(255) UNIQUE NOT NULL,
password_hash VARCHAR(255),
email VARCHAR(255),
is_admin BOOLEAN DEFAULT FALSE,
is_active BOOLEAN DEFAULT TRUE,
must_change_password BOOLEAN DEFAULT FALSE,
oidc_subject VARCHAR(255),
oidc_issuer VARCHAR(512),
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
last_login TIMESTAMP WITH TIME ZONE
);
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email) WHERE email IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_users_oidc_subject ON users(oidc_subject) WHERE oidc_subject IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_users_is_active ON users(is_active) WHERE is_active = TRUE;
-- Sessions table for web login
CREATE TABLE IF NOT EXISTS sessions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
token_hash VARCHAR(64) NOT NULL UNIQUE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
last_accessed TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
user_agent VARCHAR(512),
ip_address VARCHAR(45)
);
CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
CREATE INDEX IF NOT EXISTS idx_sessions_token_hash ON sessions(token_hash);
CREATE INDEX IF NOT EXISTS idx_sessions_expires_at ON sessions(expires_at);
-- Auth settings for OIDC configuration (future use)
CREATE TABLE IF NOT EXISTS auth_settings (
key VARCHAR(255) PRIMARY KEY,
value TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Add user_id foreign key to api_keys table
-- First add the column (nullable initially)
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS owner_id UUID REFERENCES users(id) ON DELETE CASCADE;
-- Add scopes column for API key permissions
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS scopes TEXT[] DEFAULT ARRAY['read', 'write'];
-- Add description column
ALTER TABLE api_keys ADD COLUMN IF NOT EXISTS description TEXT;
-- Create index for owner_id
CREATE INDEX IF NOT EXISTS idx_api_keys_owner_id ON api_keys(owner_id) WHERE owner_id IS NOT NULL;
-- Trigger to update users.updated_at
CREATE TRIGGER users_updated_at_trigger
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Trigger to update sessions.last_accessed on access
CREATE OR REPLACE FUNCTION update_session_last_accessed()
RETURNS TRIGGER AS $$
BEGIN
NEW.last_accessed = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Function to clean up expired sessions (can be called periodically)
CREATE OR REPLACE FUNCTION cleanup_expired_sessions()
RETURNS INTEGER AS $$
DECLARE
deleted_count INTEGER;
BEGIN
DELETE FROM sessions WHERE expires_at < NOW();
GET DIAGNOSTICS deleted_count = ROW_COUNT;
RETURN deleted_count;
END;
$$ LANGUAGE plpgsql;