3 Commits

Author SHA1 Message Date
8d47c0c378 [NX-501 Issue] Add wait for PostgreSQL in e2e API smoke workflow
Some checks failed
Container CVE Scan (development) / Scan backend/frontend images for CVEs (push) Successful in 2m20s
E2E API Smoke / Core API E2E Smoke (push) Failing after 2m8s
PostgreSQL Compatibility Matrix / PG14 smoke (push) Successful in 8s
PostgreSQL Compatibility Matrix / PG15 smoke (push) Successful in 7s
PostgreSQL Compatibility Matrix / PG16 smoke (push) Successful in 7s
PostgreSQL Compatibility Matrix / PG17 smoke (push) Successful in 8s
PostgreSQL Compatibility Matrix / PG18 smoke (push) Successful in 8s
This change introduces a step in the e2e API smoke workflow to wait for PostgreSQL readiness before executing further steps. It retries the connection multiple times to ensure the database is available, reducing potential errors caused by service unavailability.
2026-02-15 20:07:35 +01:00
7f7cf9179f Remove Trivy scans from container CVE scan workflow
Trivy-based scanning steps and their summaries have been removed from the GitHub Actions workflow. This change focuses on streamlining the workflow by reducing redundancy and relying on alternate scanning methods.
2026-02-15 20:04:20 +01:00
3e317abda8 [NX-501 Issue] Add E2E API smoke test workflow and related test suite
Some checks failed
Container CVE Scan (development) / Scan backend/frontend images for CVEs (push) Successful in 2m45s
E2E API Smoke / Core API E2E Smoke (push) Failing after 24s
PostgreSQL Compatibility Matrix / PG14 smoke (push) Successful in 7s
PostgreSQL Compatibility Matrix / PG15 smoke (push) Successful in 8s
PostgreSQL Compatibility Matrix / PG16 smoke (push) Successful in 7s
PostgreSQL Compatibility Matrix / PG17 smoke (push) Successful in 8s
PostgreSQL Compatibility Matrix / PG18 smoke (push) Successful in 8s
Proxy Profile Validation / validate (push) Successful in 3s
Python Dependency Security / pip-audit (block high/critical) (push) Successful in 26s
This commit introduces a GitHub Actions workflow for running E2E API smoke tests on main branches and pull requests. It includes a test suite covering authentication, CRUD operations, metrics access, and alerts status. The README is updated with instructions for running the tests locally.
2026-02-15 19:44:33 +01:00
4 changed files with 274 additions and 46 deletions

View File

@@ -55,50 +55,6 @@ jobs:
provenance: false provenance: false
sbom: false sbom: false
- name: Trivy scan (backend)
uses: aquasecurity/trivy-action@0.24.0
with:
image-ref: nexapg-backend:dev-scan
format: json
output: trivy-backend.json
severity: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL
ignore-unfixed: false
exit-code: 0
- name: Trivy scan (frontend)
uses: aquasecurity/trivy-action@0.24.0
with:
image-ref: nexapg-frontend:dev-scan
format: json
output: trivy-frontend.json
severity: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL
ignore-unfixed: false
exit-code: 0
- name: Summarize Trivy severities
run: |
python - <<'PY'
import json
from collections import Counter
def summarize(path):
c = Counter()
with open(path, "r", encoding="utf-8") as f:
data = json.load(f)
for result in data.get("Results", []):
for v in result.get("Vulnerabilities", []) or []:
c[v.get("Severity", "UNKNOWN")] += 1
for sev in ["CRITICAL", "HIGH", "MEDIUM", "LOW", "UNKNOWN"]:
c.setdefault(sev, 0)
return c
for label, path in [("backend", "trivy-backend.json"), ("frontend", "trivy-frontend.json")]:
s = summarize(path)
print(f"===== Trivy {label} =====")
print(f"CRITICAL={s['CRITICAL']} HIGH={s['HIGH']} MEDIUM={s['MEDIUM']} LOW={s['LOW']} UNKNOWN={s['UNKNOWN']}")
print()
PY
- name: Docker Scout scan (backend) - name: Docker Scout scan (backend)
continue-on-error: true continue-on-error: true
run: | run: |
@@ -152,7 +108,5 @@ jobs:
with: with:
name: container-cve-scan-reports name: container-cve-scan-reports
path: | path: |
trivy-backend.json
trivy-frontend.json
scout-backend.txt scout-backend.txt
scout-frontend.txt scout-frontend.txt

104
.github/workflows/e2e-api-smoke.yml vendored Normal file
View File

@@ -0,0 +1,104 @@
name: E2E API Smoke
on:
push:
branches: ["main", "master", "development"]
paths:
- "backend/**"
- ".github/workflows/e2e-api-smoke.yml"
pull_request:
paths:
- "backend/**"
- ".github/workflows/e2e-api-smoke.yml"
workflow_dispatch:
jobs:
e2e-smoke:
name: Core API E2E Smoke
runs-on: ubuntu-latest
services:
postgres:
image: postgres:16
env:
POSTGRES_DB: nexapg
POSTGRES_USER: nexapg
POSTGRES_PASSWORD: nexapg
ports:
- 5432:5432
options: >-
--health-cmd "pg_isready -U nexapg -d nexapg"
--health-interval 5s
--health-timeout 5s
--health-retries 20
env:
APP_NAME: NexaPG Monitor
ENVIRONMENT: test
LOG_LEVEL: INFO
DB_HOST: 127.0.0.1
DB_PORT: 5432
DB_NAME: nexapg
DB_USER: nexapg
DB_PASSWORD: nexapg
JWT_SECRET_KEY: smoke_jwt_secret_for_ci_only
JWT_ALGORITHM: HS256
JWT_ACCESS_TOKEN_MINUTES: 15
JWT_REFRESH_TOKEN_MINUTES: 10080
ENCRYPTION_KEY: 5fLf8HSTbEUeo1c4DnWnvkXxU6v8XJ8iW58wNw5vJ8s=
CORS_ORIGINS: http://localhost:5173
POLL_INTERVAL_SECONDS: 30
INIT_ADMIN_EMAIL: admin@example.com
INIT_ADMIN_PASSWORD: ChangeMe123!
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: Install backend dependencies + test tooling
run: |
python -m pip install --upgrade pip
pip install -r backend/requirements.txt
pip install pytest
- name: Wait for PostgreSQL service
run: |
python - <<'PY'
import asyncio
import asyncpg
async def wait_for_db():
dsn = "postgresql://nexapg:nexapg@127.0.0.1:5432/nexapg?sslmode=disable"
last_err = None
for attempt in range(1, 61):
try:
conn = await asyncpg.connect(dsn=dsn, timeout=3)
try:
await conn.execute("SELECT 1")
finally:
await conn.close()
print(f"PostgreSQL ready after {attempt} attempt(s).")
return
except Exception as exc:
last_err = exc
await asyncio.sleep(2)
raise RuntimeError(f"PostgreSQL not ready after retries: {last_err}")
asyncio.run(wait_for_db())
PY
- name: Run Alembic migrations
working-directory: backend
run: alembic upgrade head
- name: Run core API smoke suite
env:
PYTHONPATH: backend
run: pytest -q backend/tests/e2e/test_api_smoke.py

View File

@@ -22,6 +22,7 @@ It combines FastAPI, React, and PostgreSQL in a Docker Compose stack with RBAC,
- [Reverse Proxy / SSL Guidance](#reverse-proxy--ssl-guidance) - [Reverse Proxy / SSL Guidance](#reverse-proxy--ssl-guidance)
- [Production Proxy Profile](#production-proxy-profile) - [Production Proxy Profile](#production-proxy-profile)
- [PostgreSQL Compatibility Smoke Test](#postgresql-compatibility-smoke-test) - [PostgreSQL Compatibility Smoke Test](#postgresql-compatibility-smoke-test)
- [E2E API Smoke Test](#e2e-api-smoke-test)
- [Dependency Exception Flow](#dependency-exception-flow) - [Dependency Exception Flow](#dependency-exception-flow)
- [Secret Management (Production)](#secret-management-production) - [Secret Management (Production)](#secret-management-production)
- [Troubleshooting](#troubleshooting) - [Troubleshooting](#troubleshooting)
@@ -405,6 +406,22 @@ PG_DSN_CANDIDATES='postgresql://postgres:postgres@postgres:5432/compatdb?sslmode
python backend/scripts/pg_compat_smoke.py python backend/scripts/pg_compat_smoke.py
``` ```
## E2E API Smoke Test
Core API smoke suite covers:
- auth login + `/me`
- targets CRUD
- metrics access
- alerts status
- admin users CRUD
Run locally (with backend env vars set and DB migrated):
```bash
PYTHONPATH=backend pytest -q backend/tests/e2e/test_api_smoke.py
```
## Dependency Exception Flow ## Dependency Exception Flow
Python dependency vulnerabilities are enforced by CI via `pip-audit`. Python dependency vulnerabilities are enforced by CI via `pip-audit`.

View File

@@ -0,0 +1,153 @@
import asyncio
import os
from datetime import datetime, timedelta, timezone
from uuid import uuid4
from fastapi.testclient import TestClient
from app.core.db import SessionLocal
from app.main import app
from app.models.models import Metric
def _admin_credentials() -> tuple[str, str]:
return (
os.getenv("INIT_ADMIN_EMAIL", "admin@example.com"),
os.getenv("INIT_ADMIN_PASSWORD", "ChangeMe123!"),
)
def _auth_headers(access_token: str) -> dict[str, str]:
return {"Authorization": f"Bearer {access_token}"}
async def _insert_metric(target_id: int, metric_name: str, value: float) -> None:
async with SessionLocal() as db:
db.add(
Metric(
target_id=target_id,
ts=datetime.now(timezone.utc),
metric_name=metric_name,
value=value,
labels={},
)
)
await db.commit()
def test_core_api_smoke_suite() -> None:
admin_email, admin_password = _admin_credentials()
unique = uuid4().hex[:8]
target_name = f"smoke-target-{unique}"
user_email = f"smoke-user-{unique}@example.com"
with TestClient(app) as client:
# Auth: login
login_res = client.post(
"/api/v1/auth/login",
json={"email": admin_email, "password": admin_password},
)
assert login_res.status_code == 200, login_res.text
tokens = login_res.json()
assert tokens.get("access_token")
assert tokens.get("refresh_token")
headers = _auth_headers(tokens["access_token"])
# Auth: me
me_res = client.get("/api/v1/me", headers=headers)
assert me_res.status_code == 200, me_res.text
assert me_res.json()["email"] == admin_email
# Targets: create
create_target_res = client.post(
"/api/v1/targets",
headers=headers,
json={
"name": target_name,
"host": "127.0.0.1",
"port": 5432,
"dbname": "postgres",
"username": "postgres",
"password": "postgres",
"sslmode": "disable",
"use_pg_stat_statements": False,
"owner_user_ids": [],
"tags": {"suite": "e2e-smoke"},
},
)
assert create_target_res.status_code == 201, create_target_res.text
target = create_target_res.json()
target_id = target["id"]
# Targets: list/get/update
list_targets_res = client.get("/api/v1/targets", headers=headers)
assert list_targets_res.status_code == 200, list_targets_res.text
assert any(item["id"] == target_id for item in list_targets_res.json())
get_target_res = client.get(f"/api/v1/targets/{target_id}", headers=headers)
assert get_target_res.status_code == 200, get_target_res.text
update_target_res = client.put(
f"/api/v1/targets/{target_id}",
headers=headers,
json={"name": f"{target_name}-updated"},
)
assert update_target_res.status_code == 200, update_target_res.text
assert update_target_res.json()["name"].endswith("-updated")
# Metrics access
asyncio.run(_insert_metric(target_id, "connections_total", 7.0))
now = datetime.now(timezone.utc)
from_ts = (now - timedelta(minutes=5)).isoformat()
to_ts = (now + timedelta(minutes=5)).isoformat()
metrics_res = client.get(
f"/api/v1/targets/{target_id}/metrics",
headers=headers,
params={"metric": "connections_total", "from": from_ts, "to": to_ts},
)
assert metrics_res.status_code == 200, metrics_res.text
assert isinstance(metrics_res.json(), list)
assert len(metrics_res.json()) >= 1
# Alerts status
alerts_status_res = client.get("/api/v1/alerts/status", headers=headers)
assert alerts_status_res.status_code == 200, alerts_status_res.text
payload = alerts_status_res.json()
assert "warnings" in payload
assert "alerts" in payload
# Admin users: list/create/update/delete
users_res = client.get("/api/v1/admin/users", headers=headers)
assert users_res.status_code == 200, users_res.text
assert isinstance(users_res.json(), list)
create_user_res = client.post(
"/api/v1/admin/users",
headers=headers,
json={
"email": user_email,
"first_name": "Smoke",
"last_name": "User",
"password": "SmokePass123!",
"role": "viewer",
},
)
assert create_user_res.status_code == 201, create_user_res.text
created_user_id = create_user_res.json()["id"]
update_user_res = client.put(
f"/api/v1/admin/users/{created_user_id}",
headers=headers,
json={"role": "operator", "first_name": "SmokeUpdated"},
)
assert update_user_res.status_code == 200, update_user_res.text
assert update_user_res.json()["role"] == "operator"
delete_user_res = client.delete(f"/api/v1/admin/users/{created_user_id}", headers=headers)
assert delete_user_res.status_code == 200, delete_user_res.text
assert delete_user_res.json().get("status") == "deleted"
# Cleanup target
delete_target_res = client.delete(f"/api/v1/targets/{target_id}", headers=headers)
assert delete_target_res.status_code == 200, delete_target_res.text
assert delete_target_res.json().get("status") == "deleted"