Add post-deployment integration tests
Adds integration test jobs that run after deployment to verify the deployed application is functioning correctly. Tests cover: - Health endpoint - Project creation - Package creation - Artifact upload - Artifact download (with content verification) - Artifact listing Each test run creates isolated resources (using unique IDs) and cleans up after itself. Tests run against the deployed URL for both stage (main branch) and feature branch deployments.
This commit is contained in:
137
.gitlab-ci.yml
137
.gitlab-ci.yml
@@ -7,13 +7,148 @@ variables:
|
||||
# renovate: datasource=gitlab-tags depName=esv/bsf/pypi/prosper versioning=semver registryUrl=https://gitlab.global.bsf.tools
|
||||
PROSPER_VERSION: v0.64.1
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
- integration # Post-deployment integration tests
|
||||
|
||||
kics:
|
||||
allow_failure: true
|
||||
|
||||
hadolint:
|
||||
allow_failure: true
|
||||
|
||||
# secrets job is a blocking check - real credential leaks should fail the pipeline
|
||||
# secrets job is a blocking check
|
||||
|
||||
# Post-deployment integration tests template
|
||||
.integration_test_template: &integration_test_template
|
||||
stage: integration
|
||||
image: deps.global.bsf.tools/docker/python:3.12-slim
|
||||
timeout: 10m
|
||||
before_script:
|
||||
- pip install httpx
|
||||
script:
|
||||
- |
|
||||
python - <<'PYTEST_SCRIPT'
|
||||
import httpx
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
import hashlib
|
||||
|
||||
BASE_URL = os.environ.get("BASE_URL")
|
||||
if not BASE_URL:
|
||||
print("ERROR: BASE_URL not set")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Running integration tests against {BASE_URL}")
|
||||
client = httpx.Client(base_url=BASE_URL, timeout=30.0)
|
||||
|
||||
# Generate unique names for this test run
|
||||
test_id = uuid.uuid4().hex[:8]
|
||||
project_name = f"ci-test-{test_id}"
|
||||
package_name = f"test-package-{test_id}"
|
||||
test_content = f"Test content from CI pipeline {test_id}"
|
||||
expected_hash = hashlib.sha256(test_content.encode()).hexdigest()
|
||||
|
||||
errors = []
|
||||
|
||||
try:
|
||||
# Test 1: Health endpoint
|
||||
print("\n=== Test 1: Health endpoint ===")
|
||||
r = client.get("/health")
|
||||
if r.status_code == 200:
|
||||
print("PASS: Health check passed")
|
||||
else:
|
||||
errors.append(f"Health check failed: {r.status_code}")
|
||||
|
||||
# Test 2: Create project
|
||||
print("\n=== Test 2: Create project ===")
|
||||
r = client.post("/api/v1/projects", json={"name": project_name})
|
||||
if r.status_code == 201:
|
||||
print(f"PASS: Created project: {project_name}")
|
||||
else:
|
||||
errors.append(f"Failed to create project: {r.status_code} - {r.text}")
|
||||
|
||||
# Test 3: Create package
|
||||
print("\n=== Test 3: Create package ===")
|
||||
r = client.post(f"/api/v1/project/{project_name}/packages", json={"name": package_name})
|
||||
if r.status_code == 201:
|
||||
print(f"PASS: Created package: {package_name}")
|
||||
else:
|
||||
errors.append(f"Failed to create package: {r.status_code} - {r.text}")
|
||||
|
||||
# Test 4: Upload artifact
|
||||
print("\n=== Test 4: Upload artifact ===")
|
||||
files = {"file": ("test.txt", test_content.encode(), "text/plain")}
|
||||
r = client.post(f"/api/v1/project/{project_name}/{package_name}/upload", files=files)
|
||||
if r.status_code == 201:
|
||||
upload_data = r.json()
|
||||
print(f"PASS: Uploaded artifact: {upload_data.get('artifact_id', 'unknown')[:16]}...")
|
||||
else:
|
||||
errors.append(f"Failed to upload: {r.status_code} - {r.text}")
|
||||
|
||||
# Test 5: Download artifact by hash
|
||||
print("\n=== Test 5: Download artifact ===")
|
||||
r = client.get(f"/api/v1/project/{project_name}/{package_name}/+/artifact:{expected_hash}", follow_redirects=True)
|
||||
if r.status_code == 200:
|
||||
if r.content.decode() == test_content:
|
||||
print("PASS: Downloaded content matches uploaded content")
|
||||
else:
|
||||
errors.append(f"Content mismatch: got '{r.content.decode()}'")
|
||||
else:
|
||||
errors.append(f"Failed to download: {r.status_code}")
|
||||
|
||||
# Test 6: List artifacts
|
||||
print("\n=== Test 6: List artifacts ===")
|
||||
r = client.get(f"/api/v1/project/{project_name}/{package_name}/artifacts")
|
||||
if r.status_code == 200:
|
||||
artifacts = r.json()
|
||||
print(f"PASS: Found {len(artifacts)} artifact(s)")
|
||||
else:
|
||||
errors.append(f"Failed to list artifacts: {r.status_code}")
|
||||
|
||||
finally:
|
||||
# Cleanup: Delete the test project
|
||||
print("\n=== Cleanup ===")
|
||||
r = client.delete(f"/api/v1/project/{project_name}")
|
||||
if r.status_code in (200, 204):
|
||||
print(f"PASS: Cleaned up project: {project_name}")
|
||||
else:
|
||||
print(f"Warning: Failed to cleanup project: {r.status_code}")
|
||||
|
||||
# Report results
|
||||
print("\n" + "=" * 50)
|
||||
if errors:
|
||||
print(f"FAILED: {len(errors)} error(s)")
|
||||
for e in errors:
|
||||
print(f" FAIL: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("SUCCESS: All integration tests passed!")
|
||||
sys.exit(0)
|
||||
PYTEST_SCRIPT
|
||||
|
||||
# Integration tests for stage deployment
|
||||
integration_test_stage:
|
||||
<<: *integration_test_template
|
||||
needs: [deploy_stage]
|
||||
variables:
|
||||
BASE_URL: https://orchard-stage.common.global.bsf.tools
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH == "main"'
|
||||
when: always
|
||||
|
||||
# Integration tests for feature deployment
|
||||
integration_test_feature:
|
||||
<<: *integration_test_template
|
||||
needs: [deploy_feature]
|
||||
variables:
|
||||
BASE_URL: https://orchard-$CI_COMMIT_REF_SLUG.common.global.bsf.tools
|
||||
rules:
|
||||
- if: '$CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != "main"'
|
||||
when: always
|
||||
|
||||
# Run Python backend tests
|
||||
python_tests:
|
||||
|
||||
Reference in New Issue
Block a user