diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..4d6446c
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,27 @@
+# App
+APP_NAME=NexaPG Monitor
+ENVIRONMENT=dev
+LOG_LEVEL=INFO
+
+# Core DB
+DB_NAME=nexapg
+DB_USER=nexapg
+DB_PASSWORD=nexapg
+DB_PORT=5433
+
+# Backend
+BACKEND_PORT=8000
+JWT_SECRET_KEY=change_this_super_secret
+JWT_ALGORITHM=HS256
+JWT_ACCESS_TOKEN_MINUTES=15
+JWT_REFRESH_TOKEN_MINUTES=10080
+# Generate with: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"
+ENCRYPTION_KEY=REPLACE_WITH_FERNET_KEY
+CORS_ORIGINS=http://localhost:5173,http://localhost:8080
+POLL_INTERVAL_SECONDS=30
+INIT_ADMIN_EMAIL=admin@example.com
+INIT_ADMIN_PASSWORD=ChangeMe123!
+
+# Frontend
+FRONTEND_PORT=5173
+VITE_API_URL=http://localhost:8000/api/v1
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..d529d31
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,14 @@
+.env
+.venv/
+__pycache__/
+*.pyc
+*.pyo
+*.pyd
+.pytest_cache/
+.mypy_cache/
+node_modules/
+dist/
+coverage/
+backend/.venv/
+frontend/node_modules/
+frontend/dist/
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..524bdc9
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,13 @@
+.PHONY: up down logs migrate
+
+up:
+ docker compose up -d --build
+
+down:
+ docker compose down
+
+logs:
+ docker compose logs -f --tail=200
+
+migrate:
+ docker compose exec backend alembic upgrade head
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..f65259e
--- /dev/null
+++ b/README.md
@@ -0,0 +1,113 @@
+# NexaPG - PostgreSQL Monitoring Stack
+
+Docker-basierte Monitoring-Loesung fuer mehrere PostgreSQL-Targets mit FastAPI + React.
+
+## Features
+
+- Multi-target PostgreSQL Monitoring (remote)
+- Polling Collector fuer:
+ - `pg_stat_database`
+ - `pg_stat_activity`
+ - `pg_stat_bgwriter`
+ - `pg_locks`
+ - `pg_stat_statements` (falls auf Target aktiviert)
+- Core-DB fuer:
+ - User/Auth/RBAC (`admin`, `operator`, `viewer`)
+ - Targets (Credentials verschluesselt via Fernet)
+ - Metrics / Query Stats
+ - Audit Logs
+- Auth mit JWT Access/Refresh Tokens
+- FastAPI + SQLAlchemy async + Alembic
+- React (Vite) Frontend mit:
+ - Login/Logout
+ - Dashboard
+ - Target Detail mit Charts
+ - Query Insights
+ - Admin User Management
+- Health Endpoints:
+ - `/api/v1/healthz`
+ - `/api/v1/readyz`
+
+## Struktur
+
+- `backend/` FastAPI App
+- `frontend/` React (Vite) App
+- `ops/` Scripts
+- `docker-compose.yml` Stack
+- `.env.example` Konfigurationsvorlage
+
+## Schnellstart
+
+1. Env-Datei erstellen:
+
+```bash
+cp .env.example .env
+```
+
+2. Fernet Key setzen:
+
+```bash
+python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"
+```
+
+Wert in `.env` bei `ENCRYPTION_KEY` eintragen.
+
+3. Stack starten:
+
+```bash
+make up
+```
+
+4. URLs:
+
+- Frontend: `http://localhost:5173`
+- Backend API: `http://localhost:8000/api/v1`
+- OpenAPI: `http://localhost:8000/docs`
+
+Default Admin (aus `.env`):
+- Email: `admin@example.com`
+- Passwort: `ChangeMe123!`
+
+## Commands
+
+```bash
+make up
+make down
+make logs
+make migrate
+```
+
+## API (Minimum)
+
+- `POST /api/v1/auth/login`
+- `POST /api/v1/auth/refresh`
+- `POST /api/v1/auth/logout`
+- `GET /api/v1/me`
+- CRUD: `GET/POST/PUT/DELETE /api/v1/targets`
+- `GET /api/v1/targets/{id}/metrics?from=&to=&metric=`
+- `GET /api/v1/targets/{id}/locks`
+- `GET /api/v1/targets/{id}/activity`
+- `GET /api/v1/targets/{id}/top-queries`
+- Admin-only CRUD users:
+ - `GET /api/v1/admin/users`
+ - `POST /api/v1/admin/users`
+ - `PUT /api/v1/admin/users/{user_id}`
+ - `DELETE /api/v1/admin/users/{user_id}`
+
+## Security Notes
+
+- Keine Secrets hardcoded
+- Passwoerter als Argon2 Hash
+- Target-Credentials verschluesselt (Fernet)
+- CORS via Env steuerbar
+- Audit Logs fuer Login / Logout / Target- und User-Aenderungen
+- Rate limiting: Platzhalter (kann spaeter middleware-basiert ergaenzt werden)
+
+## Wichtiger Hinweis zu `pg_stat_statements`
+
+Auf jedem monitored Target muss `pg_stat_statements` aktiviert sein, sonst bleiben Query Insights leer.
+Beispiel:
+
+```sql
+CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
+```
diff --git a/backend/Dockerfile b/backend/Dockerfile
new file mode 100644
index 0000000..bb316eb
--- /dev/null
+++ b/backend/Dockerfile
@@ -0,0 +1,24 @@
+FROM python:3.12-slim AS base
+
+ENV PYTHONDONTWRITEBYTECODE=1
+ENV PYTHONUNBUFFERED=1
+ENV PIP_NO_CACHE_DIR=1
+
+WORKDIR /app
+
+RUN addgroup --system app && adduser --system --ingroup app app
+
+COPY requirements.txt /app/requirements.txt
+RUN pip install --upgrade pip && pip install -r /app/requirements.txt
+
+COPY . /app
+
+RUN chmod +x /app/entrypoint.sh
+
+USER app
+
+EXPOSE 8000
+
+HEALTHCHECK --interval=30s --timeout=5s --retries=5 CMD python -c "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8000/api/v1/healthz')" || exit 1
+
+CMD ["/app/entrypoint.sh"]
diff --git a/backend/alembic.ini b/backend/alembic.ini
new file mode 100644
index 0000000..7f0e5c3
--- /dev/null
+++ b/backend/alembic.ini
@@ -0,0 +1,37 @@
+[alembic]
+script_location = alembic
+prepend_sys_path = .
+sqlalchemy.url = postgresql+asyncpg://nexapg:nexapg@db:5432/nexapg
+
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers = console
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/backend/alembic/env.py b/backend/alembic/env.py
new file mode 100644
index 0000000..92322b6
--- /dev/null
+++ b/backend/alembic/env.py
@@ -0,0 +1,48 @@
+from logging.config import fileConfig
+from sqlalchemy import pool
+from sqlalchemy.engine import Connection
+from sqlalchemy.ext.asyncio import async_engine_from_config
+from alembic import context
+from app.core.config import get_settings
+from app.core.db import Base
+from app.models import models # noqa: F401
+
+config = context.config
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+settings = get_settings()
+config.set_main_option("sqlalchemy.url", settings.database_url)
+target_metadata = Base.metadata
+
+
+def run_migrations_offline() -> None:
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"})
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def do_run_migrations(connection: Connection) -> None:
+ context.configure(connection=connection, target_metadata=target_metadata)
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+async def run_migrations_online() -> None:
+ connectable = async_engine_from_config(
+ config.get_section(config.config_ini_section, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+ async with connectable.connect() as connection:
+ await connection.run_sync(do_run_migrations)
+ await connectable.dispose()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ import asyncio
+
+ asyncio.run(run_migrations_online())
diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako
new file mode 100644
index 0000000..33ffb45
--- /dev/null
+++ b/backend/alembic/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/backend/alembic/versions/0001_init.py b/backend/alembic/versions/0001_init.py
new file mode 100644
index 0000000..42d6523
--- /dev/null
+++ b/backend/alembic/versions/0001_init.py
@@ -0,0 +1,98 @@
+"""init schema
+
+Revision ID: 0001_init
+Revises:
+Create Date: 2026-02-12
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+revision = "0001_init"
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ op.create_table(
+ "users",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("email", sa.String(length=255), nullable=False, unique=True),
+ sa.Column("password_hash", sa.String(length=255), nullable=False),
+ sa.Column("role", sa.String(length=20), nullable=False),
+ sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
+ )
+ op.create_index("ix_users_email", "users", ["email"], unique=True)
+
+ op.create_table(
+ "targets",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("name", sa.String(length=120), nullable=False, unique=True),
+ sa.Column("host", sa.String(length=255), nullable=False),
+ sa.Column("port", sa.Integer(), nullable=False, server_default="5432"),
+ sa.Column("dbname", sa.String(length=120), nullable=False),
+ sa.Column("username", sa.String(length=120), nullable=False),
+ sa.Column("encrypted_password", sa.Text(), nullable=False),
+ sa.Column("sslmode", sa.String(length=20), nullable=False, server_default="prefer"),
+ sa.Column("tags", sa.JSON(), nullable=False, server_default="{}"),
+ sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
+ )
+ op.create_index("ix_targets_name", "targets", ["name"], unique=True)
+
+ op.create_table(
+ "metrics",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("target_id", sa.Integer(), sa.ForeignKey("targets.id", ondelete="CASCADE"), nullable=False),
+ sa.Column("ts", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
+ sa.Column("metric_name", sa.String(length=120), nullable=False),
+ sa.Column("value", sa.Float(), nullable=False),
+ sa.Column("labels", sa.JSON(), nullable=False, server_default="{}"),
+ )
+ op.create_index("ix_metrics_target_id", "metrics", ["target_id"])
+ op.create_index("ix_metrics_ts", "metrics", ["ts"])
+ op.create_index("ix_metrics_metric_name", "metrics", ["metric_name"])
+
+ op.create_table(
+ "query_stats",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("target_id", sa.Integer(), sa.ForeignKey("targets.id", ondelete="CASCADE"), nullable=False),
+ sa.Column("ts", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
+ sa.Column("queryid", sa.String(length=100), nullable=False),
+ sa.Column("calls", sa.Integer(), nullable=False, server_default="0"),
+ sa.Column("total_time", sa.Float(), nullable=False, server_default="0"),
+ sa.Column("mean_time", sa.Float(), nullable=False, server_default="0"),
+ sa.Column("rows", sa.Integer(), nullable=False, server_default="0"),
+ sa.Column("query_text", sa.Text(), nullable=True),
+ )
+ op.create_index("ix_query_stats_target_id", "query_stats", ["target_id"])
+ op.create_index("ix_query_stats_ts", "query_stats", ["ts"])
+
+ op.create_table(
+ "audit_logs",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("ts", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
+ sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True),
+ sa.Column("action", sa.String(length=120), nullable=False),
+ sa.Column("payload", sa.JSON(), nullable=False, server_default="{}"),
+ )
+ op.create_index("ix_audit_logs_ts", "audit_logs", ["ts"])
+ op.create_index("ix_audit_logs_user_id", "audit_logs", ["user_id"])
+
+
+def downgrade() -> None:
+ op.drop_index("ix_audit_logs_user_id", table_name="audit_logs")
+ op.drop_index("ix_audit_logs_ts", table_name="audit_logs")
+ op.drop_table("audit_logs")
+ op.drop_index("ix_query_stats_ts", table_name="query_stats")
+ op.drop_index("ix_query_stats_target_id", table_name="query_stats")
+ op.drop_table("query_stats")
+ op.drop_index("ix_metrics_metric_name", table_name="metrics")
+ op.drop_index("ix_metrics_ts", table_name="metrics")
+ op.drop_index("ix_metrics_target_id", table_name="metrics")
+ op.drop_table("metrics")
+ op.drop_index("ix_targets_name", table_name="targets")
+ op.drop_table("targets")
+ op.drop_index("ix_users_email", table_name="users")
+ op.drop_table("users")
diff --git a/backend/app/__init__.py b/backend/app/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/api/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/api/router.py b/backend/app/api/router.py
new file mode 100644
index 0000000..e30229e
--- /dev/null
+++ b/backend/app/api/router.py
@@ -0,0 +1,9 @@
+from fastapi import APIRouter
+from app.api.routes import admin_users, auth, health, me, targets
+
+api_router = APIRouter()
+api_router.include_router(health.router, tags=["health"])
+api_router.include_router(auth.router, prefix="/auth", tags=["auth"])
+api_router.include_router(me.router, tags=["auth"])
+api_router.include_router(targets.router, prefix="/targets", tags=["targets"])
+api_router.include_router(admin_users.router, prefix="/admin/users", tags=["admin"])
diff --git a/backend/app/api/routes/__init__.py b/backend/app/api/routes/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/api/routes/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/api/routes/admin_users.py b/backend/app/api/routes/admin_users.py
new file mode 100644
index 0000000..b98eecd
--- /dev/null
+++ b/backend/app/api/routes/admin_users.py
@@ -0,0 +1,65 @@
+from fastapi import APIRouter, Depends, HTTPException, status
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from app.core.db import get_db
+from app.core.deps import require_roles
+from app.core.security import hash_password
+from app.models.models import User
+from app.schemas.user import UserCreate, UserOut, UserUpdate
+from app.services.audit import write_audit_log
+
+router = APIRouter()
+
+
+@router.get("", response_model=list[UserOut])
+async def list_users(admin: User = Depends(require_roles("admin")), db: AsyncSession = Depends(get_db)) -> list[UserOut]:
+ users = (await db.scalars(select(User).order_by(User.id.asc()))).all()
+ _ = admin
+ return [UserOut.model_validate(user) for user in users]
+
+
+@router.post("", response_model=UserOut, status_code=status.HTTP_201_CREATED)
+async def create_user(payload: UserCreate, admin: User = Depends(require_roles("admin")), db: AsyncSession = Depends(get_db)) -> UserOut:
+ exists = await db.scalar(select(User).where(User.email == payload.email))
+ if exists:
+ raise HTTPException(status_code=409, detail="Email already exists")
+ user = User(email=payload.email, password_hash=hash_password(payload.password), role=payload.role)
+ db.add(user)
+ await db.commit()
+ await db.refresh(user)
+ await write_audit_log(db, "admin.user.create", admin.id, {"created_user_id": user.id})
+ return UserOut.model_validate(user)
+
+
+@router.put("/{user_id}", response_model=UserOut)
+async def update_user(
+ user_id: int,
+ payload: UserUpdate,
+ admin: User = Depends(require_roles("admin")),
+ db: AsyncSession = Depends(get_db),
+) -> UserOut:
+ user = await db.scalar(select(User).where(User.id == user_id))
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ update_data = payload.model_dump(exclude_unset=True)
+ if "password" in update_data and update_data["password"]:
+ user.password_hash = hash_password(update_data.pop("password"))
+ for key, value in update_data.items():
+ setattr(user, key, value)
+ await db.commit()
+ await db.refresh(user)
+ await write_audit_log(db, "admin.user.update", admin.id, {"updated_user_id": user.id})
+ return UserOut.model_validate(user)
+
+
+@router.delete("/{user_id}")
+async def delete_user(user_id: int, admin: User = Depends(require_roles("admin")), db: AsyncSession = Depends(get_db)) -> dict:
+ if user_id == admin.id:
+ raise HTTPException(status_code=400, detail="Cannot delete yourself")
+ user = await db.scalar(select(User).where(User.id == user_id))
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+ await db.delete(user)
+ await db.commit()
+ await write_audit_log(db, "admin.user.delete", admin.id, {"deleted_user_id": user_id})
+ return {"status": "deleted"}
diff --git a/backend/app/api/routes/auth.py b/backend/app/api/routes/auth.py
new file mode 100644
index 0000000..b9b442f
--- /dev/null
+++ b/backend/app/api/routes/auth.py
@@ -0,0 +1,54 @@
+from fastapi import APIRouter, Depends, HTTPException, status
+from jose import JWTError, jwt
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from app.core.config import get_settings
+from app.core.db import get_db
+from app.core.deps import get_current_user
+from app.core.security import create_access_token, create_refresh_token, verify_password
+from app.models.models import User
+from app.schemas.auth import LoginRequest, RefreshRequest, TokenResponse
+from app.schemas.user import UserOut
+from app.services.audit import write_audit_log
+
+router = APIRouter()
+settings = get_settings()
+
+
+@router.post("/login", response_model=TokenResponse)
+async def login(payload: LoginRequest, db: AsyncSession = Depends(get_db)) -> TokenResponse:
+ user = await db.scalar(select(User).where(User.email == payload.email))
+ if not user or not verify_password(payload.password, user.password_hash):
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
+
+ await write_audit_log(db, action="auth.login", user_id=user.id, payload={"email": user.email})
+ return TokenResponse(access_token=create_access_token(str(user.id)), refresh_token=create_refresh_token(str(user.id)))
+
+
+@router.post("/refresh", response_model=TokenResponse)
+async def refresh(payload: RefreshRequest, db: AsyncSession = Depends(get_db)) -> TokenResponse:
+ try:
+ token_payload = jwt.decode(payload.refresh_token, settings.jwt_secret_key, algorithms=[settings.jwt_algorithm])
+ except JWTError as exc:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token") from exc
+
+ if token_payload.get("type") != "refresh":
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token type")
+ user_id = token_payload.get("sub")
+ user = await db.scalar(select(User).where(User.id == int(user_id)))
+ if not user:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
+
+ await write_audit_log(db, action="auth.refresh", user_id=user.id, payload={})
+ return TokenResponse(access_token=create_access_token(str(user.id)), refresh_token=create_refresh_token(str(user.id)))
+
+
+@router.post("/logout")
+async def logout(user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> dict:
+ await write_audit_log(db, action="auth.logout", user_id=user.id, payload={})
+ return {"status": "ok"}
+
+
+@router.get("/me", response_model=UserOut)
+async def me(user: User = Depends(get_current_user)) -> UserOut:
+ return UserOut.model_validate(user)
diff --git a/backend/app/api/routes/health.py b/backend/app/api/routes/health.py
new file mode 100644
index 0000000..050580a
--- /dev/null
+++ b/backend/app/api/routes/health.py
@@ -0,0 +1,17 @@
+from fastapi import APIRouter
+from sqlalchemy import text
+from app.core.db import SessionLocal
+
+router = APIRouter()
+
+
+@router.get("/healthz")
+async def healthz() -> dict:
+ return {"status": "ok"}
+
+
+@router.get("/readyz")
+async def readyz() -> dict:
+ async with SessionLocal() as session:
+ await session.execute(text("SELECT 1"))
+ return {"status": "ready"}
diff --git a/backend/app/api/routes/me.py b/backend/app/api/routes/me.py
new file mode 100644
index 0000000..bd9e0dd
--- /dev/null
+++ b/backend/app/api/routes/me.py
@@ -0,0 +1,11 @@
+from fastapi import APIRouter, Depends
+from app.core.deps import get_current_user
+from app.models.models import User
+from app.schemas.user import UserOut
+
+router = APIRouter()
+
+
+@router.get("/me", response_model=UserOut)
+async def me(user: User = Depends(get_current_user)) -> UserOut:
+ return UserOut.model_validate(user)
diff --git a/backend/app/api/routes/targets.py b/backend/app/api/routes/targets.py
new file mode 100644
index 0000000..ed2a89c
--- /dev/null
+++ b/backend/app/api/routes/targets.py
@@ -0,0 +1,181 @@
+from datetime import datetime
+import asyncpg
+from fastapi import APIRouter, Depends, HTTPException, Query, status
+from sqlalchemy import and_, desc, select
+from sqlalchemy.ext.asyncio import AsyncSession
+from app.core.db import get_db
+from app.core.deps import get_current_user, require_roles
+from app.models.models import Metric, QueryStat, Target, User
+from app.schemas.metric import MetricOut, QueryStatOut
+from app.schemas.target import TargetCreate, TargetOut, TargetUpdate
+from app.services.audit import write_audit_log
+from app.services.collector import build_target_dsn
+from app.services.crypto import encrypt_secret
+
+router = APIRouter()
+
+
+@router.get("", response_model=list[TargetOut])
+async def list_targets(user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> list[TargetOut]:
+ targets = (await db.scalars(select(Target).order_by(Target.id.desc()))).all()
+ return [TargetOut.model_validate(item) for item in targets]
+
+
+@router.post("", response_model=TargetOut, status_code=status.HTTP_201_CREATED)
+async def create_target(
+ payload: TargetCreate,
+ user: User = Depends(require_roles("admin", "operator")),
+ db: AsyncSession = Depends(get_db),
+) -> TargetOut:
+ target = Target(
+ name=payload.name,
+ host=payload.host,
+ port=payload.port,
+ dbname=payload.dbname,
+ username=payload.username,
+ encrypted_password=encrypt_secret(payload.password),
+ sslmode=payload.sslmode,
+ tags=payload.tags,
+ )
+ db.add(target)
+ await db.commit()
+ await db.refresh(target)
+ await write_audit_log(db, "target.create", user.id, {"target_id": target.id, "name": target.name})
+ return TargetOut.model_validate(target)
+
+
+@router.get("/{target_id}", response_model=TargetOut)
+async def get_target(target_id: int, user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> TargetOut:
+ target = await db.scalar(select(Target).where(Target.id == target_id))
+ if not target:
+ raise HTTPException(status_code=404, detail="Target not found")
+ return TargetOut.model_validate(target)
+
+
+@router.put("/{target_id}", response_model=TargetOut)
+async def update_target(
+ target_id: int,
+ payload: TargetUpdate,
+ user: User = Depends(require_roles("admin", "operator")),
+ db: AsyncSession = Depends(get_db),
+) -> TargetOut:
+ target = await db.scalar(select(Target).where(Target.id == target_id))
+ if not target:
+ raise HTTPException(status_code=404, detail="Target not found")
+
+ updates = payload.model_dump(exclude_unset=True)
+ if "password" in updates:
+ target.encrypted_password = encrypt_secret(updates.pop("password"))
+ for key, value in updates.items():
+ setattr(target, key, value)
+ await db.commit()
+ await db.refresh(target)
+ await write_audit_log(db, "target.update", user.id, {"target_id": target.id})
+ return TargetOut.model_validate(target)
+
+
+@router.delete("/{target_id}")
+async def delete_target(
+ target_id: int,
+ user: User = Depends(require_roles("admin", "operator")),
+ db: AsyncSession = Depends(get_db),
+) -> dict:
+ target = await db.scalar(select(Target).where(Target.id == target_id))
+ if not target:
+ raise HTTPException(status_code=404, detail="Target not found")
+ await db.delete(target)
+ await db.commit()
+ await write_audit_log(db, "target.delete", user.id, {"target_id": target_id})
+ return {"status": "deleted"}
+
+
+@router.get("/{target_id}/metrics", response_model=list[MetricOut])
+async def get_metrics(
+ target_id: int,
+ metric: str = Query(...),
+ from_ts: datetime = Query(alias="from"),
+ to_ts: datetime = Query(alias="to"),
+ user: User = Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> list[MetricOut]:
+ _ = user
+ rows = (
+ await db.scalars(
+ select(Metric).where(
+ and_(Metric.target_id == target_id, Metric.metric_name == metric, Metric.ts >= from_ts, Metric.ts <= to_ts)
+ ).order_by(Metric.ts.asc())
+ )
+ ).all()
+ return [MetricOut(ts=r.ts, metric_name=r.metric_name, value=r.value, labels=r.labels) for r in rows]
+
+
+async def _live_conn(target: Target) -> asyncpg.Connection:
+ return await asyncpg.connect(dsn=build_target_dsn(target))
+
+
+@router.get("/{target_id}/locks")
+async def get_locks(target_id: int, user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> list[dict]:
+ _ = user
+ target = await db.scalar(select(Target).where(Target.id == target_id))
+ if not target:
+ raise HTTPException(status_code=404, detail="Target not found")
+ conn = await _live_conn(target)
+ try:
+ rows = await conn.fetch(
+ """
+ SELECT locktype, mode, granted, relation::regclass::text AS relation, pid
+ FROM pg_locks
+ ORDER BY granted ASC, mode
+ LIMIT 500
+ """
+ )
+ return [dict(r) for r in rows]
+ finally:
+ await conn.close()
+
+
+@router.get("/{target_id}/activity")
+async def get_activity(target_id: int, user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> list[dict]:
+ _ = user
+ target = await db.scalar(select(Target).where(Target.id == target_id))
+ if not target:
+ raise HTTPException(status_code=404, detail="Target not found")
+ conn = await _live_conn(target)
+ try:
+ rows = await conn.fetch(
+ """
+ SELECT pid, usename, application_name, client_addr::text, state, wait_event_type, wait_event, now() - query_start AS running_for, left(query, 300) AS query
+ FROM pg_stat_activity
+ WHERE datname = current_database()
+ ORDER BY query_start NULLS LAST
+ LIMIT 200
+ """
+ )
+ return [dict(r) for r in rows]
+ finally:
+ await conn.close()
+
+
+@router.get("/{target_id}/top-queries", response_model=list[QueryStatOut])
+async def get_top_queries(target_id: int, user: User = Depends(get_current_user), db: AsyncSession = Depends(get_db)) -> list[QueryStatOut]:
+ _ = user
+ rows = (
+ await db.scalars(
+ select(QueryStat)
+ .where(QueryStat.target_id == target_id)
+ .order_by(desc(QueryStat.ts))
+ .limit(100)
+ )
+ ).all()
+ return [
+ QueryStatOut(
+ ts=r.ts,
+ queryid=r.queryid,
+ calls=r.calls,
+ total_time=r.total_time,
+ mean_time=r.mean_time,
+ rows=r.rows,
+ query_text=r.query_text,
+ )
+ for r in rows
+ ]
diff --git a/backend/app/core/__init__.py b/backend/app/core/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/core/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/core/config.py b/backend/app/core/config.py
new file mode 100644
index 0000000..21c0ba1
--- /dev/null
+++ b/backend/app/core/config.py
@@ -0,0 +1,53 @@
+from functools import lru_cache
+from pydantic import field_validator
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+class Settings(BaseSettings):
+ model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore")
+
+ app_name: str = "NexaPG Monitor"
+ environment: str = "dev"
+ api_v1_prefix: str = "/api/v1"
+ log_level: str = "INFO"
+
+ db_host: str = "db"
+ db_port: int = 5432
+ db_name: str = "nexapg"
+ db_user: str = "nexapg"
+ db_password: str = "nexapg"
+
+ jwt_secret_key: str
+ jwt_algorithm: str = "HS256"
+ jwt_access_token_minutes: int = 15
+ jwt_refresh_token_minutes: int = 60 * 24 * 7
+
+ encryption_key: str
+ cors_origins: str = "http://localhost:5173"
+ poll_interval_seconds: int = 30
+ init_admin_email: str = "admin@example.com"
+ init_admin_password: str = "ChangeMe123!"
+
+ @property
+ def database_url(self) -> str:
+ return (
+ f"postgresql+asyncpg://{self.db_user}:{self.db_password}"
+ f"@{self.db_host}:{self.db_port}/{self.db_name}"
+ )
+
+ @property
+ def cors_origins_list(self) -> list[str]:
+ return [item.strip() for item in self.cors_origins.split(",") if item.strip()]
+
+ @field_validator("environment")
+ @classmethod
+ def validate_environment(cls, value: str) -> str:
+ allowed = {"dev", "staging", "prod", "test"}
+ if value not in allowed:
+ raise ValueError(f"environment must be one of {allowed}")
+ return value
+
+
+@lru_cache
+def get_settings() -> Settings:
+ return Settings()
diff --git a/backend/app/core/db.py b/backend/app/core/db.py
new file mode 100644
index 0000000..565a178
--- /dev/null
+++ b/backend/app/core/db.py
@@ -0,0 +1,18 @@
+from collections.abc import AsyncGenerator
+from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
+from sqlalchemy.orm import DeclarativeBase
+from app.core.config import get_settings
+
+settings = get_settings()
+
+engine = create_async_engine(settings.database_url, future=True, pool_pre_ping=True)
+SessionLocal = async_sessionmaker(bind=engine, expire_on_commit=False, class_=AsyncSession)
+
+
+class Base(DeclarativeBase):
+ pass
+
+
+async def get_db() -> AsyncGenerator[AsyncSession, None]:
+ async with SessionLocal() as session:
+ yield session
diff --git a/backend/app/core/deps.py b/backend/app/core/deps.py
new file mode 100644
index 0000000..125150b
--- /dev/null
+++ b/backend/app/core/deps.py
@@ -0,0 +1,42 @@
+from fastapi import Depends, HTTPException, status
+from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
+from jose import JWTError, jwt
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from app.core.config import get_settings
+from app.core.db import get_db
+from app.models.models import User
+
+settings = get_settings()
+bearer = HTTPBearer(auto_error=False)
+
+
+async def get_current_user(
+ credentials: HTTPAuthorizationCredentials | None = Depends(bearer),
+ db: AsyncSession = Depends(get_db),
+) -> User:
+ if not credentials:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing token")
+ token = credentials.credentials
+ try:
+ payload = jwt.decode(token, settings.jwt_secret_key, algorithms=[settings.jwt_algorithm])
+ except JWTError as exc:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") from exc
+
+ if payload.get("type") != "access":
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token type")
+
+ user_id = payload.get("sub")
+ user = await db.scalar(select(User).where(User.id == int(user_id)))
+ if not user:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
+ return user
+
+
+def require_roles(*roles: str):
+ async def role_dependency(user: User = Depends(get_current_user)) -> User:
+ if user.role not in roles:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
+ return user
+
+ return role_dependency
diff --git a/backend/app/core/logging.py b/backend/app/core/logging.py
new file mode 100644
index 0000000..d268890
--- /dev/null
+++ b/backend/app/core/logging.py
@@ -0,0 +1,22 @@
+import json
+import logging
+from datetime import datetime, timezone
+
+
+class JsonFormatter(logging.Formatter):
+ def format(self, record: logging.LogRecord) -> str:
+ payload = {
+ "ts": datetime.now(timezone.utc).isoformat(),
+ "level": record.levelname,
+ "logger": record.name,
+ "msg": record.getMessage(),
+ }
+ if record.exc_info:
+ payload["exc_info"] = self.formatException(record.exc_info)
+ return json.dumps(payload, ensure_ascii=True)
+
+
+def configure_logging(level: str) -> None:
+ handler = logging.StreamHandler()
+ handler.setFormatter(JsonFormatter())
+ logging.basicConfig(level=level, handlers=[handler], force=True)
diff --git a/backend/app/core/security.py b/backend/app/core/security.py
new file mode 100644
index 0000000..9884738
--- /dev/null
+++ b/backend/app/core/security.py
@@ -0,0 +1,30 @@
+from datetime import datetime, timedelta, timezone
+from jose import jwt
+from passlib.context import CryptContext
+from app.core.config import get_settings
+
+settings = get_settings()
+pwd_context = CryptContext(schemes=["argon2"], deprecated="auto")
+
+
+def hash_password(password: str) -> str:
+ return pwd_context.hash(password)
+
+
+def verify_password(password: str, password_hash: str) -> bool:
+ return pwd_context.verify(password, password_hash)
+
+
+def create_token(subject: str, token_type: str, expires_minutes: int) -> str:
+ now = datetime.now(timezone.utc)
+ exp = now + timedelta(minutes=expires_minutes)
+ payload = {"sub": subject, "type": token_type, "iat": int(now.timestamp()), "exp": int(exp.timestamp())}
+ return jwt.encode(payload, settings.jwt_secret_key, algorithm=settings.jwt_algorithm)
+
+
+def create_access_token(subject: str) -> str:
+ return create_token(subject, "access", settings.jwt_access_token_minutes)
+
+
+def create_refresh_token(subject: str) -> str:
+ return create_token(subject, "refresh", settings.jwt_refresh_token_minutes)
diff --git a/backend/app/main.py b/backend/app/main.py
new file mode 100644
index 0000000..82e53d9
--- /dev/null
+++ b/backend/app/main.py
@@ -0,0 +1,59 @@
+import asyncio
+import logging
+from contextlib import asynccontextmanager
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from sqlalchemy import select
+from app.api.router import api_router
+from app.core.config import get_settings
+from app.core.db import SessionLocal
+from app.core.logging import configure_logging
+from app.core.security import hash_password
+from app.models.models import User
+from app.services.collector import collector_loop
+
+settings = get_settings()
+configure_logging(settings.log_level)
+logger = logging.getLogger(__name__)
+
+collector_task: asyncio.Task | None = None
+collector_stop_event = asyncio.Event()
+
+
+async def ensure_admin_user() -> None:
+ async with SessionLocal() as db:
+ admin = await db.scalar(select(User).where(User.email == settings.init_admin_email))
+ if admin:
+ return
+ user = User(
+ email=settings.init_admin_email,
+ password_hash=hash_password(settings.init_admin_password),
+ role="admin",
+ )
+ db.add(user)
+ await db.commit()
+ logger.info("created initial admin user")
+
+
+@asynccontextmanager
+async def lifespan(_: FastAPI):
+ global collector_task
+ await ensure_admin_user()
+ collector_task = asyncio.create_task(collector_loop(collector_stop_event))
+ try:
+ yield
+ finally:
+ collector_stop_event.set()
+ if collector_task:
+ await collector_task
+
+
+app = FastAPI(title=settings.app_name, lifespan=lifespan)
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=settings.cors_origins_list,
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+app.include_router(api_router, prefix=settings.api_v1_prefix)
diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
new file mode 100644
index 0000000..6ea25b6
--- /dev/null
+++ b/backend/app/models/__init__.py
@@ -0,0 +1,3 @@
+from app.models.models import AuditLog, Metric, QueryStat, Target, User
+
+__all__ = ["User", "Target", "Metric", "QueryStat", "AuditLog"]
diff --git a/backend/app/models/models.py b/backend/app/models/models.py
new file mode 100644
index 0000000..ce90997
--- /dev/null
+++ b/backend/app/models/models.py
@@ -0,0 +1,75 @@
+from datetime import datetime
+from sqlalchemy import JSON, DateTime, Float, ForeignKey, Integer, String, Text, func
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+from app.core.db import Base
+
+
+class User(Base):
+ __tablename__ = "users"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ email: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
+ password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
+ role: Mapped[str] = mapped_column(String(20), nullable=False, default="viewer")
+ created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), nullable=False)
+
+ audit_logs: Mapped[list["AuditLog"]] = relationship(back_populates="user")
+
+
+class Target(Base):
+ __tablename__ = "targets"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ name: Mapped[str] = mapped_column(String(120), unique=True, index=True, nullable=False)
+ host: Mapped[str] = mapped_column(String(255), nullable=False)
+ port: Mapped[int] = mapped_column(Integer, nullable=False, default=5432)
+ dbname: Mapped[str] = mapped_column(String(120), nullable=False)
+ username: Mapped[str] = mapped_column(String(120), nullable=False)
+ encrypted_password: Mapped[str] = mapped_column(Text, nullable=False)
+ sslmode: Mapped[str] = mapped_column(String(20), nullable=False, default="prefer")
+ tags: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
+ created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), nullable=False)
+
+ metrics: Mapped[list["Metric"]] = relationship(back_populates="target", cascade="all, delete-orphan")
+ query_stats: Mapped[list["QueryStat"]] = relationship(back_populates="target", cascade="all, delete-orphan")
+
+
+class Metric(Base):
+ __tablename__ = "metrics"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ target_id: Mapped[int] = mapped_column(ForeignKey("targets.id", ondelete="CASCADE"), nullable=False, index=True)
+ ts: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now(), index=True)
+ metric_name: Mapped[str] = mapped_column(String(120), nullable=False, index=True)
+ value: Mapped[float] = mapped_column(Float, nullable=False)
+ labels: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
+
+ target: Mapped[Target] = relationship(back_populates="metrics")
+
+
+class QueryStat(Base):
+ __tablename__ = "query_stats"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ target_id: Mapped[int] = mapped_column(ForeignKey("targets.id", ondelete="CASCADE"), nullable=False, index=True)
+ ts: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now(), index=True)
+ queryid: Mapped[str] = mapped_column(String(100), nullable=False)
+ calls: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
+ total_time: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
+ mean_time: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
+ rows: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
+ query_text: Mapped[str | None] = mapped_column(Text, nullable=True)
+
+ target: Mapped[Target] = relationship(back_populates="query_stats")
+
+
+class AuditLog(Base):
+ __tablename__ = "audit_logs"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ ts: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now(), index=True)
+ user_id: Mapped[int | None] = mapped_column(ForeignKey("users.id"), nullable=True, index=True)
+ action: Mapped[str] = mapped_column(String(120), nullable=False)
+ payload: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
+
+ user: Mapped[User | None] = relationship(back_populates="audit_logs")
diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/schemas/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/schemas/auth.py b/backend/app/schemas/auth.py
new file mode 100644
index 0000000..0affd78
--- /dev/null
+++ b/backend/app/schemas/auth.py
@@ -0,0 +1,16 @@
+from pydantic import BaseModel, EmailStr
+
+
+class LoginRequest(BaseModel):
+ email: EmailStr
+ password: str
+
+
+class RefreshRequest(BaseModel):
+ refresh_token: str
+
+
+class TokenResponse(BaseModel):
+ access_token: str
+ refresh_token: str
+ token_type: str = "bearer"
diff --git a/backend/app/schemas/metric.py b/backend/app/schemas/metric.py
new file mode 100644
index 0000000..2c9b6ad
--- /dev/null
+++ b/backend/app/schemas/metric.py
@@ -0,0 +1,19 @@
+from datetime import datetime
+from pydantic import BaseModel
+
+
+class MetricOut(BaseModel):
+ ts: datetime
+ metric_name: str
+ value: float
+ labels: dict
+
+
+class QueryStatOut(BaseModel):
+ ts: datetime
+ queryid: str
+ calls: int
+ total_time: float
+ mean_time: float
+ rows: int
+ query_text: str | None
diff --git a/backend/app/schemas/target.py b/backend/app/schemas/target.py
new file mode 100644
index 0000000..a28a0bc
--- /dev/null
+++ b/backend/app/schemas/target.py
@@ -0,0 +1,34 @@
+from datetime import datetime
+from pydantic import BaseModel, Field
+
+
+class TargetBase(BaseModel):
+ name: str
+ host: str
+ port: int = 5432
+ dbname: str
+ username: str
+ sslmode: str = "prefer"
+ tags: dict = Field(default_factory=dict)
+
+
+class TargetCreate(TargetBase):
+ password: str
+
+
+class TargetUpdate(BaseModel):
+ name: str | None = None
+ host: str | None = None
+ port: int | None = None
+ dbname: str | None = None
+ username: str | None = None
+ password: str | None = None
+ sslmode: str | None = None
+ tags: dict | None = None
+
+
+class TargetOut(TargetBase):
+ id: int
+ created_at: datetime
+
+ model_config = {"from_attributes": True}
diff --git a/backend/app/schemas/user.py b/backend/app/schemas/user.py
new file mode 100644
index 0000000..0c52067
--- /dev/null
+++ b/backend/app/schemas/user.py
@@ -0,0 +1,23 @@
+from datetime import datetime
+from pydantic import BaseModel, EmailStr
+
+
+class UserOut(BaseModel):
+ id: int
+ email: EmailStr
+ role: str
+ created_at: datetime
+
+ model_config = {"from_attributes": True}
+
+
+class UserCreate(BaseModel):
+ email: EmailStr
+ password: str
+ role: str = "viewer"
+
+
+class UserUpdate(BaseModel):
+ email: EmailStr | None = None
+ password: str | None = None
+ role: str | None = None
diff --git a/backend/app/services/__init__.py b/backend/app/services/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/backend/app/services/__init__.py
@@ -0,0 +1 @@
+
diff --git a/backend/app/services/audit.py b/backend/app/services/audit.py
new file mode 100644
index 0000000..19724a7
--- /dev/null
+++ b/backend/app/services/audit.py
@@ -0,0 +1,7 @@
+from sqlalchemy.ext.asyncio import AsyncSession
+from app.models.models import AuditLog
+
+
+async def write_audit_log(db: AsyncSession, action: str, user_id: int | None, payload: dict | None = None) -> None:
+ db.add(AuditLog(action=action, user_id=user_id, payload=payload or {}))
+ await db.commit()
diff --git a/backend/app/services/collector.py b/backend/app/services/collector.py
new file mode 100644
index 0000000..88b81e2
--- /dev/null
+++ b/backend/app/services/collector.py
@@ -0,0 +1,149 @@
+import asyncio
+import logging
+from datetime import datetime, timezone
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.exc import SQLAlchemyError
+from app.core.config import get_settings
+from app.core.db import SessionLocal
+from app.models.models import Metric, QueryStat, Target
+from app.services.crypto import decrypt_secret
+
+import asyncpg
+
+logger = logging.getLogger(__name__)
+settings = get_settings()
+
+
+def build_target_dsn(target: Target) -> str:
+ password = decrypt_secret(target.encrypted_password)
+ return (
+ f"postgresql://{target.username}:{password}"
+ f"@{target.host}:{target.port}/{target.dbname}?sslmode={target.sslmode}"
+ )
+
+
+async def _store_metric(db: AsyncSession, target_id: int, name: str, value: float, labels: dict | None = None) -> None:
+ db.add(
+ Metric(
+ target_id=target_id,
+ ts=datetime.now(timezone.utc),
+ metric_name=name,
+ value=float(value),
+ labels=labels or {},
+ )
+ )
+
+
+async def collect_target(target: Target) -> None:
+ dsn = build_target_dsn(target)
+ conn = await asyncpg.connect(dsn=dsn)
+ try:
+ stat_db = await conn.fetchrow(
+ """
+ SELECT numbackends, xact_commit, xact_rollback, blks_hit, blks_read, tup_returned, tup_fetched
+ FROM pg_stat_database
+ WHERE datname = current_database()
+ """
+ )
+ activity = await conn.fetchrow(
+ """
+ SELECT
+ count(*) FILTER (WHERE state = 'active') AS active_connections,
+ count(*) AS total_connections
+ FROM pg_stat_activity
+ WHERE datname = current_database()
+ """
+ )
+ bgwriter = await conn.fetchrow(
+ """
+ SELECT checkpoints_timed, checkpoints_req, buffers_checkpoint, buffers_clean, maxwritten_clean
+ FROM pg_stat_bgwriter
+ """
+ )
+
+ if stat_db is None:
+ stat_db = {
+ "numbackends": 0,
+ "xact_commit": 0,
+ "xact_rollback": 0,
+ "blks_hit": 0,
+ "blks_read": 0,
+ "tup_returned": 0,
+ "tup_fetched": 0,
+ }
+ if activity is None:
+ activity = {"active_connections": 0, "total_connections": 0}
+ if bgwriter is None:
+ bgwriter = {
+ "checkpoints_timed": 0,
+ "checkpoints_req": 0,
+ "buffers_checkpoint": 0,
+ "buffers_clean": 0,
+ "maxwritten_clean": 0,
+ }
+
+ lock_count = await conn.fetchval("SELECT count(*) FROM pg_locks")
+ cache_hit_ratio = 0.0
+ if stat_db and (stat_db["blks_hit"] + stat_db["blks_read"]) > 0:
+ cache_hit_ratio = stat_db["blks_hit"] / (stat_db["blks_hit"] + stat_db["blks_read"])
+
+ query_rows = []
+ try:
+ query_rows = await conn.fetch(
+ """
+ SELECT queryid::text, calls, total_exec_time, mean_exec_time, rows, left(query, 2000) AS query_text
+ FROM pg_stat_statements
+ ORDER BY total_exec_time DESC
+ LIMIT 20
+ """
+ )
+ except Exception:
+ # Extension may be disabled on monitored instance.
+ query_rows = []
+
+ async with SessionLocal() as db:
+ await _store_metric(db, target.id, "connections_total", activity["total_connections"], {})
+ await _store_metric(db, target.id, "connections_active", activity["active_connections"], {})
+ await _store_metric(db, target.id, "xacts_total", stat_db["xact_commit"] + stat_db["xact_rollback"], {})
+ await _store_metric(db, target.id, "cache_hit_ratio", cache_hit_ratio, {})
+ await _store_metric(db, target.id, "locks_total", lock_count, {})
+ await _store_metric(db, target.id, "checkpoints_timed", bgwriter["checkpoints_timed"], {})
+ await _store_metric(db, target.id, "checkpoints_req", bgwriter["checkpoints_req"], {})
+
+ for row in query_rows:
+ db.add(
+ QueryStat(
+ target_id=target.id,
+ ts=datetime.now(timezone.utc),
+ queryid=row["queryid"] or "0",
+ calls=row["calls"] or 0,
+ total_time=row["total_exec_time"] or 0.0,
+ mean_time=row["mean_exec_time"] or 0.0,
+ rows=row["rows"] or 0,
+ query_text=row["query_text"],
+ )
+ )
+ await db.commit()
+ finally:
+ await conn.close()
+
+
+async def collect_once() -> None:
+ async with SessionLocal() as db:
+ targets = (await db.scalars(select(Target))).all()
+
+ for target in targets:
+ try:
+ await collect_target(target)
+ except (OSError, SQLAlchemyError, asyncpg.PostgresError, Exception) as exc:
+ logger.exception("collector_error target=%s err=%s", target.id, exc)
+
+
+async def collector_loop(stop_event: asyncio.Event) -> None:
+ while not stop_event.is_set():
+ await collect_once()
+ try:
+ await asyncio.wait_for(stop_event.wait(), timeout=settings.poll_interval_seconds)
+ except asyncio.TimeoutError:
+ pass
diff --git a/backend/app/services/crypto.py b/backend/app/services/crypto.py
new file mode 100644
index 0000000..f32e93d
--- /dev/null
+++ b/backend/app/services/crypto.py
@@ -0,0 +1,13 @@
+from cryptography.fernet import Fernet
+from app.core.config import get_settings
+
+settings = get_settings()
+fernet = Fernet(settings.encryption_key.encode("utf-8"))
+
+
+def encrypt_secret(value: str) -> str:
+ return fernet.encrypt(value.encode("utf-8")).decode("utf-8")
+
+
+def decrypt_secret(value: str) -> str:
+ return fernet.decrypt(value.encode("utf-8")).decode("utf-8")
diff --git a/backend/app/wait_for_db.py b/backend/app/wait_for_db.py
new file mode 100644
index 0000000..2882cb8
--- /dev/null
+++ b/backend/app/wait_for_db.py
@@ -0,0 +1,20 @@
+import asyncio
+import sys
+from sqlalchemy import text
+from app.core.db import SessionLocal
+
+
+async def main() -> int:
+ retries = 30
+ for _ in range(retries):
+ try:
+ async with SessionLocal() as session:
+ await session.execute(text("SELECT 1"))
+ return 0
+ except Exception:
+ await asyncio.sleep(2)
+ return 1
+
+
+if __name__ == "__main__":
+ raise SystemExit(asyncio.run(main()))
diff --git a/backend/entrypoint.sh b/backend/entrypoint.sh
new file mode 100644
index 0000000..da3e229
--- /dev/null
+++ b/backend/entrypoint.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+set -e
+
+echo "Waiting for database..."
+python -m app.wait_for_db
+
+echo "Running migrations..."
+alembic upgrade head
+
+echo "Starting API..."
+exec gunicorn app.main:app \
+ -k uvicorn.workers.UvicornWorker \
+ --bind 0.0.0.0:8000 \
+ --workers 2 \
+ --timeout 60
diff --git a/backend/requirements.txt b/backend/requirements.txt
new file mode 100644
index 0000000..63bb0c7
--- /dev/null
+++ b/backend/requirements.txt
@@ -0,0 +1,13 @@
+fastapi==0.116.1
+uvicorn[standard]==0.35.0
+gunicorn==23.0.0
+sqlalchemy[asyncio]==2.0.44
+asyncpg==0.30.0
+alembic==1.16.5
+pydantic==2.11.7
+pydantic-settings==2.11.0
+email-validator==2.2.0
+python-jose[cryptography]==3.5.0
+passlib[argon2]==1.7.4
+cryptography==45.0.7
+python-multipart==0.0.20
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..05bb7db
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,62 @@
+services:
+ db:
+ image: postgres:16
+ container_name: nexapg-db
+ restart: unless-stopped
+ environment:
+ POSTGRES_DB: ${DB_NAME}
+ POSTGRES_USER: ${DB_USER}
+ POSTGRES_PASSWORD: ${DB_PASSWORD}
+ ports:
+ - "${DB_PORT}:5432"
+ volumes:
+ - pg_data:/var/lib/postgresql/data
+ healthcheck:
+ test: ["CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}"]
+ interval: 10s
+ timeout: 5s
+ retries: 10
+
+ backend:
+ build:
+ context: ./backend
+ container_name: nexapg-backend
+ restart: unless-stopped
+ environment:
+ APP_NAME: ${APP_NAME}
+ ENVIRONMENT: ${ENVIRONMENT}
+ LOG_LEVEL: ${LOG_LEVEL}
+ DB_HOST: db
+ DB_PORT: 5432
+ DB_NAME: ${DB_NAME}
+ DB_USER: ${DB_USER}
+ DB_PASSWORD: ${DB_PASSWORD}
+ JWT_SECRET_KEY: ${JWT_SECRET_KEY}
+ JWT_ALGORITHM: ${JWT_ALGORITHM}
+ JWT_ACCESS_TOKEN_MINUTES: ${JWT_ACCESS_TOKEN_MINUTES}
+ JWT_REFRESH_TOKEN_MINUTES: ${JWT_REFRESH_TOKEN_MINUTES}
+ ENCRYPTION_KEY: ${ENCRYPTION_KEY}
+ CORS_ORIGINS: ${CORS_ORIGINS}
+ POLL_INTERVAL_SECONDS: ${POLL_INTERVAL_SECONDS}
+ INIT_ADMIN_EMAIL: ${INIT_ADMIN_EMAIL}
+ INIT_ADMIN_PASSWORD: ${INIT_ADMIN_PASSWORD}
+ depends_on:
+ db:
+ condition: service_healthy
+ ports:
+ - "${BACKEND_PORT}:8000"
+
+ frontend:
+ build:
+ context: ./frontend
+ args:
+ VITE_API_URL: ${VITE_API_URL}
+ container_name: nexapg-frontend
+ restart: unless-stopped
+ depends_on:
+ - backend
+ ports:
+ - "${FRONTEND_PORT}:80"
+
+volumes:
+ pg_data:
diff --git a/frontend/Dockerfile b/frontend/Dockerfile
new file mode 100644
index 0000000..5fc14c2
--- /dev/null
+++ b/frontend/Dockerfile
@@ -0,0 +1,14 @@
+FROM node:22-alpine AS build
+WORKDIR /app
+COPY package.json package-lock.json* ./
+RUN npm install
+COPY . .
+ARG VITE_API_URL=/api/v1
+ENV VITE_API_URL=${VITE_API_URL}
+RUN npm run build
+
+FROM nginx:1.29-alpine
+COPY nginx.conf /etc/nginx/conf.d/default.conf
+COPY --from=build /app/dist /usr/share/nginx/html
+EXPOSE 80
+HEALTHCHECK --interval=30s --timeout=3s --retries=5 CMD wget -qO- http://127.0.0.1/ || exit 1
diff --git a/frontend/index.html b/frontend/index.html
new file mode 100644
index 0000000..1a5c08b
--- /dev/null
+++ b/frontend/index.html
@@ -0,0 +1,12 @@
+
+
+
+
+
+ NexaPG Monitor
+
+
+
+
+
+
diff --git a/frontend/nginx.conf b/frontend/nginx.conf
new file mode 100644
index 0000000..79fd959
--- /dev/null
+++ b/frontend/nginx.conf
@@ -0,0 +1,11 @@
+server {
+ listen 80;
+ server_name _;
+
+ root /usr/share/nginx/html;
+ index index.html;
+
+ location / {
+ try_files $uri $uri/ /index.html;
+ }
+}
diff --git a/frontend/package.json b/frontend/package.json
new file mode 100644
index 0000000..cb709bf
--- /dev/null
+++ b/frontend/package.json
@@ -0,0 +1,21 @@
+{
+ "name": "nexapg-frontend",
+ "version": "0.1.0",
+ "private": true,
+ "type": "module",
+ "scripts": {
+ "dev": "vite --host 0.0.0.0 --port 5173",
+ "build": "vite build",
+ "preview": "vite preview --host 0.0.0.0 --port 5173"
+ },
+ "dependencies": {
+ "react": "^18.3.1",
+ "react-dom": "^18.3.1",
+ "react-router-dom": "^6.30.1",
+ "recharts": "^2.15.4"
+ },
+ "devDependencies": {
+ "@vitejs/plugin-react": "^5.0.2",
+ "vite": "^7.1.5"
+ }
+}
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
new file mode 100644
index 0000000..51d539c
--- /dev/null
+++ b/frontend/src/App.jsx
@@ -0,0 +1,63 @@
+import React from "react";
+import { Link, Navigate, Route, Routes, useLocation } from "react-router-dom";
+import { useAuth } from "./state";
+import { LoginPage } from "./pages/LoginPage";
+import { DashboardPage } from "./pages/DashboardPage";
+import { TargetsPage } from "./pages/TargetsPage";
+import { TargetDetailPage } from "./pages/TargetDetailPage";
+import { QueryInsightsPage } from "./pages/QueryInsightsPage";
+import { AdminUsersPage } from "./pages/AdminUsersPage";
+
+function Protected({ children }) {
+ const { tokens } = useAuth();
+ const location = useLocation();
+ if (!tokens?.accessToken) return ;
+ return children;
+}
+
+function Layout({ children }) {
+ const { me, logout } = useAuth();
+ return (
+
+
+
{children}
+
+ );
+}
+
+export function App() {
+ return (
+
+ } />
+
+
+
+ } />
+ } />
+ } />
+ } />
+ } />
+
+
+
+ }
+ />
+
+ );
+}
diff --git a/frontend/src/api.js b/frontend/src/api.js
new file mode 100644
index 0000000..a97e22f
--- /dev/null
+++ b/frontend/src/api.js
@@ -0,0 +1,28 @@
+const API_URL = import.meta.env.VITE_API_URL || "http://localhost:8000/api/v1";
+
+export async function apiFetch(path, options = {}, tokens, onUnauthorized) {
+ const headers = {
+ "Content-Type": "application/json",
+ ...(options.headers || {}),
+ };
+ if (tokens?.accessToken) {
+ headers.Authorization = `Bearer ${tokens.accessToken}`;
+ }
+
+ let res = await fetch(`${API_URL}${path}`, { ...options, headers });
+ if (res.status === 401 && tokens?.refreshToken && onUnauthorized) {
+ const refreshed = await onUnauthorized();
+ if (refreshed) {
+ headers.Authorization = `Bearer ${refreshed.accessToken}`;
+ res = await fetch(`${API_URL}${path}`, { ...options, headers });
+ }
+ }
+ if (!res.ok) {
+ const txt = await res.text();
+ throw new Error(txt || `HTTP ${res.status}`);
+ }
+ if (res.status === 204) return null;
+ return res.json();
+}
+
+export { API_URL };
diff --git a/frontend/src/main.jsx b/frontend/src/main.jsx
new file mode 100644
index 0000000..9b3390b
--- /dev/null
+++ b/frontend/src/main.jsx
@@ -0,0 +1,16 @@
+import React from "react";
+import ReactDOM from "react-dom/client";
+import { BrowserRouter } from "react-router-dom";
+import { App } from "./App";
+import { AuthProvider } from "./state";
+import "./styles.css";
+
+ReactDOM.createRoot(document.getElementById("root")).render(
+
+
+
+
+
+
+
+);
diff --git a/frontend/src/pages/AdminUsersPage.jsx b/frontend/src/pages/AdminUsersPage.jsx
new file mode 100644
index 0000000..81e64ec
--- /dev/null
+++ b/frontend/src/pages/AdminUsersPage.jsx
@@ -0,0 +1,84 @@
+import React, { useEffect, useState } from "react";
+import { apiFetch } from "../api";
+import { useAuth } from "../state";
+
+export function AdminUsersPage() {
+ const { tokens, refresh, me } = useAuth();
+ const [users, setUsers] = useState([]);
+ const [form, setForm] = useState({ email: "", password: "", role: "viewer" });
+ const [error, setError] = useState("");
+
+ const load = async () => {
+ setUsers(await apiFetch("/admin/users", {}, tokens, refresh));
+ };
+
+ useEffect(() => {
+ if (me?.role === "admin") load().catch((e) => setError(String(e.message || e)));
+ }, [me]);
+
+ if (me?.role !== "admin") return Nur fuer Admin.
;
+
+ const create = async (e) => {
+ e.preventDefault();
+ try {
+ await apiFetch("/admin/users", { method: "POST", body: JSON.stringify(form) }, tokens, refresh);
+ setForm({ email: "", password: "", role: "viewer" });
+ await load();
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ };
+
+ const remove = async (id) => {
+ try {
+ await apiFetch(`/admin/users/${id}`, { method: "DELETE" }, tokens, refresh);
+ await load();
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ };
+
+ return (
+
+
Admin Users
+ {error &&
{error}
}
+
+
+
+
+
+ | ID |
+ Email |
+ Role |
+ Action |
+
+
+
+ {users.map((u) => (
+
+ | {u.id} |
+ {u.email} |
+ {u.role} |
+ {u.id !== me.id && } |
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/DashboardPage.jsx b/frontend/src/pages/DashboardPage.jsx
new file mode 100644
index 0000000..461ecbb
--- /dev/null
+++ b/frontend/src/pages/DashboardPage.jsx
@@ -0,0 +1,74 @@
+import React, { useEffect, useState } from "react";
+import { Link } from "react-router-dom";
+import { apiFetch } from "../api";
+import { useAuth } from "../state";
+
+export function DashboardPage() {
+ const { tokens, refresh } = useAuth();
+ const [targets, setTargets] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState("");
+
+ useEffect(() => {
+ let active = true;
+ (async () => {
+ try {
+ const data = await apiFetch("/targets", {}, tokens, refresh);
+ if (active) setTargets(data);
+ } catch (e) {
+ if (active) setError(String(e.message || e));
+ } finally {
+ if (active) setLoading(false);
+ }
+ })();
+ return () => {
+ active = false;
+ };
+ }, [tokens, refresh]);
+
+ if (loading) return Lade Dashboard...
;
+ if (error) return {error}
;
+
+ return (
+
+
Dashboard Overview
+
+
+ {targets.length}
+ Targets
+
+
+ {targets.length}
+ Status OK (placeholder)
+
+
+ 0
+ Alerts (placeholder)
+
+
+
+
Targets
+
+
+
+ | Name |
+ Host |
+ DB |
+ Aktion |
+
+
+
+ {targets.map((t) => (
+
+ | {t.name} |
+ {t.host}:{t.port} |
+ {t.dbname} |
+ Details |
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/LoginPage.jsx b/frontend/src/pages/LoginPage.jsx
new file mode 100644
index 0000000..1679f70
--- /dev/null
+++ b/frontend/src/pages/LoginPage.jsx
@@ -0,0 +1,40 @@
+import React, { useState } from "react";
+import { useNavigate } from "react-router-dom";
+import { useAuth } from "../state";
+
+export function LoginPage() {
+ const { login } = useAuth();
+ const navigate = useNavigate();
+ const [email, setEmail] = useState("admin@example.com");
+ const [password, setPassword] = useState("ChangeMe123!");
+ const [error, setError] = useState("");
+ const [loading, setLoading] = useState(false);
+
+ const submit = async (e) => {
+ e.preventDefault();
+ setError("");
+ setLoading(true);
+ try {
+ await login(email, password);
+ navigate("/");
+ } catch {
+ setError("Login fehlgeschlagen");
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ return (
+
+ );
+}
diff --git a/frontend/src/pages/QueryInsightsPage.jsx b/frontend/src/pages/QueryInsightsPage.jsx
new file mode 100644
index 0000000..1752a6c
--- /dev/null
+++ b/frontend/src/pages/QueryInsightsPage.jsx
@@ -0,0 +1,79 @@
+import React, { useEffect, useState } from "react";
+import { apiFetch } from "../api";
+import { useAuth } from "../state";
+
+export function QueryInsightsPage() {
+ const { tokens, refresh } = useAuth();
+ const [targets, setTargets] = useState([]);
+ const [targetId, setTargetId] = useState("");
+ const [rows, setRows] = useState([]);
+ const [error, setError] = useState("");
+
+ useEffect(() => {
+ (async () => {
+ try {
+ const t = await apiFetch("/targets", {}, tokens, refresh);
+ setTargets(t);
+ if (t.length > 0) setTargetId(String(t[0].id));
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ })();
+ }, []);
+
+ useEffect(() => {
+ if (!targetId) return;
+ (async () => {
+ try {
+ const data = await apiFetch(`/targets/${targetId}/top-queries`, {}, tokens, refresh);
+ setRows(data);
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ })();
+ }, [targetId, tokens, refresh]);
+
+ return (
+
+
Query Insights
+
Hinweis: Benötigt aktivierte Extension pg_stat_statements auf dem Zielsystem.
+ {error &&
{error}
}
+
+
+
+
+
+
+
+
+ | Time |
+ Calls |
+ Total ms |
+ Mean ms |
+ Rows |
+ Query |
+
+
+
+ {rows.map((r, i) => (
+
+ | {new Date(r.ts).toLocaleString()} |
+ {r.calls} |
+ {r.total_time.toFixed(2)} |
+ {r.mean_time.toFixed(2)} |
+ {r.rows} |
+ {r.query_text || "-"} |
+
+ ))}
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/TargetDetailPage.jsx b/frontend/src/pages/TargetDetailPage.jsx
new file mode 100644
index 0000000..fa1f0c9
--- /dev/null
+++ b/frontend/src/pages/TargetDetailPage.jsx
@@ -0,0 +1,157 @@
+import React, { useEffect, useMemo, useState } from "react";
+import { useParams } from "react-router-dom";
+import { Line, LineChart, ResponsiveContainer, Tooltip, XAxis, YAxis } from "recharts";
+import { apiFetch } from "../api";
+import { useAuth } from "../state";
+
+const ranges = {
+ "15m": 15 * 60 * 1000,
+ "1h": 60 * 60 * 1000,
+ "24h": 24 * 60 * 60 * 1000,
+ "7d": 7 * 24 * 60 * 60 * 1000,
+};
+
+function toQueryRange(range) {
+ const to = new Date();
+ const from = new Date(to.getTime() - ranges[range]);
+ return { from: from.toISOString(), to: to.toISOString() };
+}
+
+async function loadMetric(targetId, metric, range, tokens, refresh) {
+ const { from, to } = toQueryRange(range);
+ return apiFetch(
+ `/targets/${targetId}/metrics?metric=${encodeURIComponent(metric)}&from=${encodeURIComponent(from)}&to=${encodeURIComponent(to)}`,
+ {},
+ tokens,
+ refresh
+ );
+}
+
+export function TargetDetailPage() {
+ const { id } = useParams();
+ const { tokens, refresh } = useAuth();
+ const [range, setRange] = useState("1h");
+ const [series, setSeries] = useState({});
+ const [locks, setLocks] = useState([]);
+ const [activity, setActivity] = useState([]);
+ const [error, setError] = useState("");
+ const [loading, setLoading] = useState(true);
+
+ useEffect(() => {
+ let active = true;
+ (async () => {
+ setLoading(true);
+ try {
+ const [connections, xacts, cache, locksTable, activityTable] = await Promise.all([
+ loadMetric(id, "connections_total", range, tokens, refresh),
+ loadMetric(id, "xacts_total", range, tokens, refresh),
+ loadMetric(id, "cache_hit_ratio", range, tokens, refresh),
+ apiFetch(`/targets/${id}/locks`, {}, tokens, refresh),
+ apiFetch(`/targets/${id}/activity`, {}, tokens, refresh),
+ ]);
+ if (!active) return;
+ setSeries({ connections, xacts, cache });
+ setLocks(locksTable);
+ setActivity(activityTable);
+ setError("");
+ } catch (e) {
+ if (active) setError(String(e.message || e));
+ } finally {
+ if (active) setLoading(false);
+ }
+ })();
+ return () => {
+ active = false;
+ };
+ }, [id, range, tokens, refresh]);
+
+ const chartData = useMemo(
+ () =>
+ (series.connections || []).map((point, idx) => ({
+ ts: new Date(point.ts).toLocaleTimeString(),
+ connections: point.value,
+ xacts: series.xacts?.[idx]?.value || 0,
+ cache: series.cache?.[idx]?.value || 0,
+ })),
+ [series]
+ );
+
+ if (loading) return Lade Target Detail...
;
+ if (error) return {error}
;
+
+ return (
+
+
Target Detail #{id}
+
+ {Object.keys(ranges).map((r) => (
+
+ ))}
+
+
+
Connections / TPS approx / Cache hit ratio
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Locks
+
+
+
+ | Type |
+ Mode |
+ Granted |
+ Relation |
+ PID |
+
+
+
+ {locks.map((l, i) => (
+
+ | {l.locktype} |
+ {l.mode} |
+ {String(l.granted)} |
+ {l.relation || "-"} |
+ {l.pid} |
+
+ ))}
+
+
+
+
+
Activity
+
+
+
+ | PID |
+ User |
+ State |
+ Wait |
+
+
+
+ {activity.map((a) => (
+
+ | {a.pid} |
+ {a.usename} |
+ {a.state} |
+ {a.wait_event_type || "-"} |
+
+ ))}
+
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/TargetsPage.jsx b/frontend/src/pages/TargetsPage.jsx
new file mode 100644
index 0000000..9769f02
--- /dev/null
+++ b/frontend/src/pages/TargetsPage.jsx
@@ -0,0 +1,114 @@
+import React, { useEffect, useState } from "react";
+import { Link } from "react-router-dom";
+import { apiFetch } from "../api";
+import { useAuth } from "../state";
+
+const emptyForm = {
+ name: "",
+ host: "",
+ port: 5432,
+ dbname: "",
+ username: "",
+ password: "",
+ sslmode: "prefer",
+ tags: {},
+};
+
+export function TargetsPage() {
+ const { tokens, refresh, me } = useAuth();
+ const [targets, setTargets] = useState([]);
+ const [form, setForm] = useState(emptyForm);
+ const [error, setError] = useState("");
+ const [loading, setLoading] = useState(true);
+
+ const canManage = me?.role === "admin" || me?.role === "operator";
+
+ const load = async () => {
+ setLoading(true);
+ try {
+ setTargets(await apiFetch("/targets", {}, tokens, refresh));
+ setError("");
+ } catch (e) {
+ setError(String(e.message || e));
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ useEffect(() => {
+ load();
+ }, []);
+
+ const createTarget = async (e) => {
+ e.preventDefault();
+ try {
+ await apiFetch("/targets", { method: "POST", body: JSON.stringify(form) }, tokens, refresh);
+ setForm(emptyForm);
+ await load();
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ };
+
+ const deleteTarget = async (id) => {
+ if (!confirm("Target löschen?")) return;
+ try {
+ await apiFetch(`/targets/${id}`, { method: "DELETE" }, tokens, refresh);
+ await load();
+ } catch (e) {
+ setError(String(e.message || e));
+ }
+ };
+
+ return (
+
+
Targets Management
+ {error &&
{error}
}
+ {canManage && (
+
+ )}
+
+ {loading ? (
+
Lade Targets...
+ ) : (
+
+
+
+ | Name |
+ Host |
+ DB |
+ Aktionen |
+
+
+
+ {targets.map((t) => (
+
+ | {t.name} |
+ {t.host}:{t.port} |
+ {t.dbname} |
+
+ Details{" "}
+ {canManage && }
+ |
+
+ ))}
+
+
+ )}
+
+
+ );
+}
diff --git a/frontend/src/state.jsx b/frontend/src/state.jsx
new file mode 100644
index 0000000..b2b215f
--- /dev/null
+++ b/frontend/src/state.jsx
@@ -0,0 +1,89 @@
+import React, { createContext, useContext, useMemo, useState } from "react";
+import { API_URL } from "./api";
+
+const AuthCtx = createContext(null);
+
+function loadStorage() {
+ try {
+ return JSON.parse(localStorage.getItem("nexapg_auth") || "null");
+ } catch {
+ return null;
+ }
+}
+
+export function AuthProvider({ children }) {
+ const initial = loadStorage();
+ const [tokens, setTokens] = useState(initial?.tokens || null);
+ const [me, setMe] = useState(initial?.me || null);
+
+ const persist = (nextTokens, nextMe) => {
+ if (nextTokens && nextMe) {
+ localStorage.setItem("nexapg_auth", JSON.stringify({ tokens: nextTokens, me: nextMe }));
+ } else {
+ localStorage.removeItem("nexapg_auth");
+ }
+ };
+
+ const refresh = async () => {
+ if (!tokens?.refreshToken) return null;
+ const res = await fetch(`${API_URL}/auth/refresh`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ refresh_token: tokens.refreshToken }),
+ });
+ if (!res.ok) {
+ setTokens(null);
+ setMe(null);
+ persist(null, null);
+ return null;
+ }
+ const data = await res.json();
+ const nextTokens = { accessToken: data.access_token, refreshToken: data.refresh_token };
+ setTokens(nextTokens);
+ persist(nextTokens, me);
+ return nextTokens;
+ };
+
+ const login = async (email, password) => {
+ const res = await fetch(`${API_URL}/auth/login`, {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ email, password }),
+ });
+ if (!res.ok) throw new Error("Login failed");
+ const data = await res.json();
+ const nextTokens = { accessToken: data.access_token, refreshToken: data.refresh_token };
+ const meRes = await fetch(`${API_URL}/me`, {
+ headers: { Authorization: `Bearer ${nextTokens.accessToken}` },
+ });
+ if (!meRes.ok) throw new Error("Could not load user profile");
+ const profile = await meRes.json();
+ setTokens(nextTokens);
+ setMe(profile);
+ persist(nextTokens, profile);
+ };
+
+ const logout = async () => {
+ try {
+ if (tokens?.accessToken) {
+ await fetch(`${API_URL}/auth/logout`, {
+ method: "POST",
+ headers: { Authorization: `Bearer ${tokens.accessToken}` },
+ });
+ }
+ } finally {
+ setTokens(null);
+ setMe(null);
+ persist(null, null);
+ }
+ };
+
+ const value = useMemo(() => ({ tokens, me, login, logout, refresh }), [tokens, me]);
+ return {children};
+}
+
+export function useAuth() {
+ const ctx = useContext(AuthCtx);
+ if (!ctx) throw new Error("useAuth must be used inside AuthProvider");
+ return ctx;
+}
diff --git a/frontend/src/styles.css b/frontend/src/styles.css
new file mode 100644
index 0000000..add6f40
--- /dev/null
+++ b/frontend/src/styles.css
@@ -0,0 +1,163 @@
+:root {
+ --bg: #0b1020;
+ --bg2: #131a30;
+ --card: #1b233d;
+ --text: #e5edf7;
+ --muted: #98a6c0;
+ --accent: #38bdf8;
+ --danger: #ef4444;
+}
+
+* {
+ box-sizing: border-box;
+}
+
+body {
+ margin: 0;
+ font-family: "Space Grotesk", "Segoe UI", sans-serif;
+ color: var(--text);
+ background: radial-gradient(circle at top right, #1d335f, #0b1020 55%);
+}
+
+a {
+ color: var(--accent);
+ text-decoration: none;
+}
+
+.shell {
+ display: grid;
+ grid-template-columns: 260px 1fr;
+ min-height: 100vh;
+}
+
+.sidebar {
+ background: linear-gradient(180deg, #10182f, #0a1022);
+ border-right: 1px solid #223056;
+ padding: 20px;
+ display: flex;
+ flex-direction: column;
+ gap: 20px;
+}
+
+.sidebar nav {
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+}
+
+.profile {
+ margin-top: auto;
+ border-top: 1px solid #223056;
+ padding-top: 16px;
+}
+
+.role {
+ color: var(--muted);
+ margin-bottom: 10px;
+}
+
+.main {
+ padding: 24px;
+}
+
+.card {
+ background: color-mix(in oklab, var(--card), black 10%);
+ border: 1px solid #2a3a66;
+ border-radius: 14px;
+ padding: 16px;
+ margin-bottom: 16px;
+}
+
+.grid {
+ display: grid;
+ gap: 12px;
+}
+
+.grid.two {
+ grid-template-columns: repeat(2, minmax(0, 1fr));
+}
+
+.grid.three {
+ grid-template-columns: repeat(3, minmax(0, 1fr));
+}
+
+.stat strong {
+ font-size: 28px;
+ display: block;
+}
+
+input,
+select,
+button {
+ background: #10182f;
+ color: var(--text);
+ border: 1px solid #2b3f74;
+ border-radius: 10px;
+ padding: 10px;
+}
+
+button {
+ cursor: pointer;
+}
+
+table {
+ width: 100%;
+ border-collapse: collapse;
+ font-size: 14px;
+}
+
+th,
+td {
+ text-align: left;
+ border-bottom: 1px solid #223056;
+ padding: 8px 6px;
+}
+
+.error {
+ color: #fecaca;
+ border-color: #7f1d1d;
+}
+
+.range-picker {
+ display: flex;
+ gap: 8px;
+ margin-bottom: 10px;
+}
+
+.range-picker .active {
+ border-color: var(--accent);
+}
+
+.login-wrap {
+ min-height: 100vh;
+ display: grid;
+ place-items: center;
+}
+
+.login-card {
+ width: min(420px, 90vw);
+ display: grid;
+ gap: 8px;
+}
+
+.query {
+ max-width: 400px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+@media (max-width: 980px) {
+ .shell {
+ grid-template-columns: 1fr;
+ }
+ .sidebar {
+ position: sticky;
+ top: 0;
+ z-index: 2;
+ }
+ .grid.two,
+ .grid.three {
+ grid-template-columns: 1fr;
+ }
+}
diff --git a/frontend/vite.config.js b/frontend/vite.config.js
new file mode 100644
index 0000000..081c8d9
--- /dev/null
+++ b/frontend/vite.config.js
@@ -0,0 +1,6 @@
+import { defineConfig } from "vite";
+import react from "@vitejs/plugin-react";
+
+export default defineConfig({
+ plugins: [react()],
+});
diff --git a/ops/.env.example b/ops/.env.example
new file mode 100644
index 0000000..4d6446c
--- /dev/null
+++ b/ops/.env.example
@@ -0,0 +1,27 @@
+# App
+APP_NAME=NexaPG Monitor
+ENVIRONMENT=dev
+LOG_LEVEL=INFO
+
+# Core DB
+DB_NAME=nexapg
+DB_USER=nexapg
+DB_PASSWORD=nexapg
+DB_PORT=5433
+
+# Backend
+BACKEND_PORT=8000
+JWT_SECRET_KEY=change_this_super_secret
+JWT_ALGORITHM=HS256
+JWT_ACCESS_TOKEN_MINUTES=15
+JWT_REFRESH_TOKEN_MINUTES=10080
+# Generate with: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())"
+ENCRYPTION_KEY=REPLACE_WITH_FERNET_KEY
+CORS_ORIGINS=http://localhost:5173,http://localhost:8080
+POLL_INTERVAL_SECONDS=30
+INIT_ADMIN_EMAIL=admin@example.com
+INIT_ADMIN_PASSWORD=ChangeMe123!
+
+# Frontend
+FRONTEND_PORT=5173
+VITE_API_URL=http://localhost:8000/api/v1
diff --git a/ops/scripts/down.sh b/ops/scripts/down.sh
new file mode 100644
index 0000000..6689568
--- /dev/null
+++ b/ops/scripts/down.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+set -e
+docker compose down
diff --git a/ops/scripts/logs.sh b/ops/scripts/logs.sh
new file mode 100644
index 0000000..e43bf16
--- /dev/null
+++ b/ops/scripts/logs.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+set -e
+docker compose logs -f --tail=200
diff --git a/ops/scripts/migrate.sh b/ops/scripts/migrate.sh
new file mode 100644
index 0000000..3b5143b
--- /dev/null
+++ b/ops/scripts/migrate.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+set -e
+docker compose exec backend alembic upgrade head
diff --git a/ops/scripts/up.sh b/ops/scripts/up.sh
new file mode 100644
index 0000000..bfc49c3
--- /dev/null
+++ b/ops/scripts/up.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+set -e
+docker compose up -d --build