From 8c4a555b880ce0d91849a6b26d4edc89e6f1e28b Mon Sep 17 00:00:00 2001 From: matteoscrugli Date: Wed, 17 Dec 2025 22:27:32 +0100 Subject: [PATCH] Add comprehensive backend features and mobile UI improvements MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Backend: - Add 2FA authentication with TOTP support - Add API keys management system - Add audit logging for security events - Add file upload/management system - Add notifications system with preferences - Add session management - Add webhooks integration - Add analytics endpoints - Add export functionality - Add password policy enforcement - Add new database migrations for core tables Frontend: - Add module position system (top/bottom sidebar sections) - Add search and notifications module configuration tabs - Add mobile logo replacing hamburger menu - Center page title absolutely when no tabs present - Align sidebar footer toggles with navigation items - Add lighter icon color in dark theme for mobile - Add API keys management page - Add notifications page with context - Add admin analytics and audit logs pages πŸ€– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- Dockerfile | 2 +- README.md | 30 +- backend/alembic/env.py | 6 + .../alembic/versions/005_add_core_tables.py | 255 ++++++++++++ backend/app/api/v1/analytics.py | 280 +++++++++++++ backend/app/api/v1/api_keys.py | 246 +++++++++++ backend/app/api/v1/audit.py | 162 ++++++++ backend/app/api/v1/auth.py | 381 ++++++++++++++++- backend/app/api/v1/export.py | 370 +++++++++++++++++ backend/app/api/v1/files.py | 377 +++++++++++++++++ backend/app/api/v1/health.py | 96 ++++- backend/app/api/v1/notifications.py | 244 +++++++++++ backend/app/api/v1/router.py | 11 +- backend/app/api/v1/sessions.py | 259 ++++++++++++ backend/app/api/v1/two_factor.py | 361 ++++++++++++++++ backend/app/api/v1/webhooks.py | 380 +++++++++++++++++ backend/app/core/password_policy.py | 240 +++++++++++ backend/app/core/settings_registry.py | 51 +++ backend/app/crud/__init__.py | 8 +- backend/app/crud/api_key.py | 184 ++++++++ backend/app/crud/audit_log.py | 228 ++++++++++ backend/app/crud/file.py | 264 ++++++++++++ backend/app/crud/notification.py | 233 +++++++++++ backend/app/crud/session.py | 274 ++++++++++++ backend/app/crud/webhook.py | 345 +++++++++++++++ backend/app/dependencies.py | 60 ++- backend/app/main.py | 17 + backend/app/models/__init__.py | 8 +- backend/app/models/api_key.py | 69 +++ backend/app/models/audit_log.py | 48 +++ backend/app/models/file.py | 43 ++ backend/app/models/notification.py | 51 +++ backend/app/models/session.py | 46 ++ backend/app/models/user.py | 23 + backend/app/models/webhook.py | 63 +++ backend/app/schemas/__init__.py | 41 +- backend/app/schemas/api_key.py | 55 +++ backend/app/schemas/audit_log.py | 65 +++ backend/app/schemas/auth.py | 17 + backend/app/schemas/file.py | 87 ++++ backend/app/schemas/notification.py | 75 ++++ backend/app/schemas/session.py | 55 +++ backend/app/schemas/webhook.py | 98 +++++ backend/migrate_db.py | 261 ++++++++++++ backend/requirements.txt | 8 + docker-compose.yml | 2 +- frontend/src/App.tsx | 26 +- frontend/src/api/client.ts | 210 +++++++++- frontend/src/components/Sidebar.tsx | 47 ++- frontend/src/components/UserMenu.tsx | 24 ++ frontend/src/contexts/AuthContext.tsx | 49 ++- frontend/src/contexts/ModulesContext.tsx | 59 ++- .../src/contexts/NotificationsContext.tsx | 53 +++ frontend/src/locales/en.json | 107 ++++- frontend/src/locales/it.json | 107 ++++- frontend/src/modules/index.ts | 18 + frontend/src/pages/APIKeys.tsx | 189 +++++++++ frontend/src/pages/Login.tsx | 86 +++- frontend/src/pages/Notifications.tsx | 166 ++++++++ frontend/src/pages/Settings.tsx | 393 +++++++++++++++++- frontend/src/pages/admin/Analytics.tsx | 155 +++++++ frontend/src/pages/admin/AuditLogs.tsx | 167 ++++++++ frontend/src/pages/admin/Features.tsx | 234 ++++++++--- frontend/src/styles/APIKeys.css | 154 +++++++ frontend/src/styles/AdminAnalytics.css | 196 +++++++++ frontend/src/styles/AdminAudit.css | 149 +++++++ frontend/src/styles/AdminPanel.css | 341 ++++++++++----- frontend/src/styles/Layout.css | 59 ++- frontend/src/styles/Login.css | 2 +- frontend/src/styles/Notifications.css | 202 +++++++++ frontend/src/styles/SettingsPage.css | 252 +++++++++-- frontend/src/styles/Sidebar.css | 115 ++++- frontend/src/styles/ThemeSettings.css | 7 +- frontend/src/styles/Users.css | 14 +- frontend/src/styles/theme/index.css | 6 + frontend/src/types/index.ts | 8 +- 76 files changed, 9751 insertions(+), 323 deletions(-) create mode 100644 backend/alembic/versions/005_add_core_tables.py create mode 100644 backend/app/api/v1/analytics.py create mode 100644 backend/app/api/v1/api_keys.py create mode 100644 backend/app/api/v1/audit.py create mode 100644 backend/app/api/v1/export.py create mode 100644 backend/app/api/v1/files.py create mode 100644 backend/app/api/v1/notifications.py create mode 100644 backend/app/api/v1/sessions.py create mode 100644 backend/app/api/v1/two_factor.py create mode 100644 backend/app/api/v1/webhooks.py create mode 100644 backend/app/core/password_policy.py create mode 100644 backend/app/crud/api_key.py create mode 100644 backend/app/crud/audit_log.py create mode 100644 backend/app/crud/file.py create mode 100644 backend/app/crud/notification.py create mode 100644 backend/app/crud/session.py create mode 100644 backend/app/crud/webhook.py create mode 100644 backend/app/models/api_key.py create mode 100644 backend/app/models/audit_log.py create mode 100644 backend/app/models/file.py create mode 100644 backend/app/models/notification.py create mode 100644 backend/app/models/session.py create mode 100644 backend/app/models/webhook.py create mode 100644 backend/app/schemas/api_key.py create mode 100644 backend/app/schemas/audit_log.py create mode 100644 backend/app/schemas/file.py create mode 100644 backend/app/schemas/notification.py create mode 100644 backend/app/schemas/session.py create mode 100644 backend/app/schemas/webhook.py create mode 100644 backend/migrate_db.py create mode 100644 frontend/src/contexts/NotificationsContext.tsx create mode 100644 frontend/src/pages/APIKeys.tsx create mode 100644 frontend/src/pages/Notifications.tsx create mode 100644 frontend/src/pages/admin/Analytics.tsx create mode 100644 frontend/src/pages/admin/AuditLogs.tsx create mode 100644 frontend/src/styles/APIKeys.css create mode 100644 frontend/src/styles/AdminAnalytics.css create mode 100644 frontend/src/styles/AdminAudit.css create mode 100644 frontend/src/styles/Notifications.css diff --git a/Dockerfile b/Dockerfile index 1a86b21..2ae3987 100644 --- a/Dockerfile +++ b/Dockerfile @@ -59,7 +59,7 @@ EXPOSE 8000 # Health check HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ - CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')" || exit 1 + CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/v1/health')" || exit 1 # Run database migrations and start the application CMD ["sh", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8000"] diff --git a/README.md b/README.md index c26cc19..2d0469a 100644 --- a/README.md +++ b/README.md @@ -2,14 +2,17 @@ Modern web application for service management. Built with React, FastAPI, and SQLite. - ## Features +## Features - - πŸ‘₯ User Management - - πŸ” Authentication & Authorization - - 🎨 Modern, responsive UI with dark mode - - 🐳 Fully containerized with Docker + - User management (admin) + - Authentication (JWT) + 2FA (TOTP) + API keys + - Active sessions (view/revoke) + - Audit log + analytics (admin) + - In-app notifications + - Modern, responsive UI with theming + - Fully containerized with Docker - ## Technology Stack +## Technology Stack ### Frontend - React 19 + TypeScript @@ -17,7 +20,7 @@ - React Router - Axios - ### Backend +### Backend - FastAPI (Python 3.11+) - SQLAlchemy 2.0 (ORM) - SQLite (Database) @@ -86,15 +89,20 @@ └── scripts/ # Utility scripts ``` - ## Configuration +## Configuration - ### Environment Variables +### Environment Variables - See `.env.example` for all available configuration options. +See `.env.example` for all available configuration options. - Key variables: +Key variables: - `SECRET_KEY`: JWT secret key - `ALLOWED_HOSTS`: CORS configuration + +### Persistent Data + +- SQLite database: `sqlite:////config/config.db` (bind-mounted via `./config:/config`) +- Uploads: `/config/uploads` by default (can be overridden with `FILE_STORAGE_PATH`) ## Contributing diff --git a/backend/alembic/env.py b/backend/alembic/env.py index 2256e23..e9eea82 100644 --- a/backend/alembic/env.py +++ b/backend/alembic/env.py @@ -16,6 +16,12 @@ from app.config import settings # Import all models so Alembic can detect them for auto-generating migrations from app.models.user import User # noqa from app.models.settings import Settings # noqa +from app.models.audit_log import AuditLog # noqa +from app.models.api_key import APIKey # noqa +from app.models.notification import Notification # noqa +from app.models.session import UserSession # noqa +from app.models.webhook import Webhook, WebhookDelivery # noqa +from app.models.file import StoredFile # noqa # this is the Alembic Config object, which provides # access to the values within the .ini file in use. diff --git a/backend/alembic/versions/005_add_core_tables.py b/backend/alembic/versions/005_add_core_tables.py new file mode 100644 index 0000000..39edd39 --- /dev/null +++ b/backend/alembic/versions/005_add_core_tables.py @@ -0,0 +1,255 @@ +"""Add core tables (audit, sessions, notifications, webhooks, files, api keys, 2FA fields) + +Revision ID: 005 +Revises: 004 +Create Date: 2025-12-15 00:00:00.000000 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy import inspect + + +# revision identifiers, used by Alembic. +revision: str = "005" +down_revision: Union[str, None] = "004" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def _has_column(inspector, table_name: str, column_name: str) -> bool: + try: + return any(col.get("name") == column_name for col in inspector.get_columns(table_name)) + except Exception: + return False + + +def upgrade() -> None: + bind = op.get_bind() + inspector = inspect(bind) + existing_tables = set(inspector.get_table_names()) + + # ------------------------------------------------------------------------- + # Users table: add 2FA columns if missing + # ------------------------------------------------------------------------- + if "users" in existing_tables: + if not _has_column(inspector, "users", "totp_secret"): + op.add_column("users", sa.Column("totp_secret", sa.String(length=32), nullable=True)) + if not _has_column(inspector, "users", "totp_enabled"): + op.add_column( + "users", + sa.Column("totp_enabled", sa.Boolean(), nullable=False, server_default="0"), + ) + if not _has_column(inspector, "users", "totp_backup_codes"): + op.add_column("users", sa.Column("totp_backup_codes", sa.Text(), nullable=True)) + + # ------------------------------------------------------------------------- + # Audit logs + # ------------------------------------------------------------------------- + if "audit_logs" not in existing_tables: + op.create_table( + "audit_logs", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("user_id", sa.String(length=36), sa.ForeignKey("users.id", ondelete="SET NULL"), nullable=True), + sa.Column("username", sa.String(length=100), nullable=True), + sa.Column("action", sa.String(length=50), nullable=False), + sa.Column("resource_type", sa.String(length=50), nullable=True), + sa.Column("resource_id", sa.String(length=255), nullable=True), + sa.Column("details", sa.Text(), nullable=True), + sa.Column("ip_address", sa.String(length=45), nullable=True), + sa.Column("user_agent", sa.String(length=500), nullable=True), + sa.Column("status", sa.String(length=20), nullable=False, server_default=sa.text("'success'")), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + ) + op.create_index("ix_audit_logs_user_id", "audit_logs", ["user_id"]) + op.create_index("ix_audit_logs_action", "audit_logs", ["action"]) + op.create_index("ix_audit_logs_resource_type", "audit_logs", ["resource_type"]) + op.create_index("ix_audit_logs_created_at", "audit_logs", ["created_at"]) + op.create_index("ix_audit_user_action", "audit_logs", ["user_id", "action"]) + op.create_index("ix_audit_resource", "audit_logs", ["resource_type", "resource_id"]) + + # ------------------------------------------------------------------------- + # User sessions + # ------------------------------------------------------------------------- + if "user_sessions" not in existing_tables: + op.create_table( + "user_sessions", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("user_id", sa.String(length=36), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), + sa.Column("token_hash", sa.String(length=64), nullable=False), + sa.Column("device_name", sa.String(length=200), nullable=True), + sa.Column("device_type", sa.String(length=50), nullable=True), + sa.Column("browser", sa.String(length=100), nullable=True), + sa.Column("os", sa.String(length=100), nullable=True), + sa.Column("user_agent", sa.String(length=500), nullable=True), + sa.Column("ip_address", sa.String(length=45), nullable=True), + sa.Column("location", sa.String(length=200), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"), + sa.Column("is_current", sa.Boolean(), nullable=False, server_default="0"), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("last_active_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("expires_at", sa.DateTime(), nullable=True), + sa.Column("revoked_at", sa.DateTime(), nullable=True), + ) + op.create_index("ix_user_sessions_user_id", "user_sessions", ["user_id"]) + op.create_index("ix_user_sessions_token_hash", "user_sessions", ["token_hash"], unique=True) + op.create_index("ix_user_sessions_created_at", "user_sessions", ["created_at"]) + + # ------------------------------------------------------------------------- + # Notifications + # ------------------------------------------------------------------------- + if "notifications" not in existing_tables: + op.create_table( + "notifications", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("user_id", sa.String(length=36), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), + sa.Column("title", sa.String(length=200), nullable=False), + sa.Column("message", sa.Text(), nullable=True), + sa.Column("type", sa.String(length=50), nullable=False, server_default=sa.text("'info'")), + sa.Column("link", sa.String(length=500), nullable=True), + sa.Column("metadata", sa.Text(), nullable=True), + sa.Column("is_read", sa.Boolean(), nullable=False, server_default="0"), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("read_at", sa.DateTime(), nullable=True), + ) + op.create_index("ix_notifications_user_id", "notifications", ["user_id"]) + op.create_index("ix_notifications_is_read", "notifications", ["is_read"]) + op.create_index("ix_notifications_created_at", "notifications", ["created_at"]) + + # ------------------------------------------------------------------------- + # API keys + # ------------------------------------------------------------------------- + if "api_keys" not in existing_tables: + op.create_table( + "api_keys", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("user_id", sa.String(length=36), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), + sa.Column("name", sa.String(length=100), nullable=False), + sa.Column("key_hash", sa.String(length=255), nullable=False), + sa.Column("key_prefix", sa.String(length=20), nullable=False), + sa.Column("scopes", sa.Text(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"), + sa.Column("last_used_at", sa.DateTime(), nullable=True), + sa.Column("last_used_ip", sa.String(length=45), nullable=True), + sa.Column("usage_count", sa.String(length=20), nullable=False, server_default="0"), + sa.Column("expires_at", sa.DateTime(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + ) + op.create_index("ix_api_keys_user_id", "api_keys", ["user_id"]) + op.create_index("ix_api_keys_key_hash", "api_keys", ["key_hash"], unique=True) + + # ------------------------------------------------------------------------- + # Webhooks + # ------------------------------------------------------------------------- + if "webhooks" not in existing_tables: + op.create_table( + "webhooks", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("name", sa.String(length=100), nullable=False), + sa.Column("url", sa.String(length=500), nullable=False), + sa.Column("secret", sa.String(length=64), nullable=True), + sa.Column("events", sa.Text(), nullable=False, server_default=sa.text("'[\"*\"]'")), + sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"), + sa.Column("retry_count", sa.Integer(), nullable=False, server_default="3"), + sa.Column("timeout_seconds", sa.Integer(), nullable=False, server_default="30"), + sa.Column("created_by", sa.String(length=36), sa.ForeignKey("users.id", ondelete="SET NULL"), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("last_triggered_at", sa.DateTime(), nullable=True), + sa.Column("success_count", sa.Integer(), nullable=False, server_default="0"), + sa.Column("failure_count", sa.Integer(), nullable=False, server_default="0"), + ) + + if "webhook_deliveries" not in existing_tables: + op.create_table( + "webhook_deliveries", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("webhook_id", sa.String(length=36), sa.ForeignKey("webhooks.id", ondelete="CASCADE"), nullable=False), + sa.Column("event_type", sa.String(length=50), nullable=False), + sa.Column("payload", sa.Text(), nullable=False), + sa.Column("status", sa.String(length=20), nullable=False, server_default=sa.text("'pending'")), + sa.Column("status_code", sa.Integer(), nullable=True), + sa.Column("response_body", sa.Text(), nullable=True), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("attempt_count", sa.Integer(), nullable=False, server_default="0"), + sa.Column("next_retry_at", sa.DateTime(), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("delivered_at", sa.DateTime(), nullable=True), + ) + op.create_index("ix_webhook_deliveries_webhook_id", "webhook_deliveries", ["webhook_id"]) + op.create_index("ix_webhook_deliveries_status", "webhook_deliveries", ["status"]) + + # ------------------------------------------------------------------------- + # Stored files + # ------------------------------------------------------------------------- + if "stored_files" not in existing_tables: + op.create_table( + "stored_files", + sa.Column("id", sa.String(length=36), primary_key=True), + sa.Column("original_filename", sa.String(length=255), nullable=False), + sa.Column("content_type", sa.String(length=100), nullable=True), + sa.Column("size_bytes", sa.BigInteger(), nullable=False), + sa.Column("storage_path", sa.String(length=500), nullable=False), + sa.Column("storage_type", sa.String(length=20), nullable=False, server_default=sa.text("'local'")), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("tags", sa.Text(), nullable=True), + sa.Column("is_public", sa.Boolean(), nullable=False, server_default="0"), + sa.Column("uploaded_by", sa.String(length=36), sa.ForeignKey("users.id", ondelete="SET NULL"), nullable=True), + sa.Column("file_hash", sa.String(length=64), nullable=True), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(datetime('now'))"), nullable=False), + sa.Column("is_deleted", sa.Boolean(), nullable=False, server_default="0"), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + ) + op.create_index("ix_stored_files_file_hash", "stored_files", ["file_hash"]) + + +def downgrade() -> None: + # Best-effort downgrade (may not be supported on SQLite for some operations). + bind = op.get_bind() + inspector = inspect(bind) + existing_tables = set(inspector.get_table_names()) + + if "stored_files" in existing_tables: + op.drop_index("ix_stored_files_file_hash", table_name="stored_files") + op.drop_table("stored_files") + + if "webhook_deliveries" in existing_tables: + op.drop_index("ix_webhook_deliveries_status", table_name="webhook_deliveries") + op.drop_index("ix_webhook_deliveries_webhook_id", table_name="webhook_deliveries") + op.drop_table("webhook_deliveries") + + if "webhooks" in existing_tables: + op.drop_table("webhooks") + + if "api_keys" in existing_tables: + op.drop_index("ix_api_keys_key_hash", table_name="api_keys") + op.drop_index("ix_api_keys_user_id", table_name="api_keys") + op.drop_table("api_keys") + + if "notifications" in existing_tables: + op.drop_index("ix_notifications_created_at", table_name="notifications") + op.drop_index("ix_notifications_is_read", table_name="notifications") + op.drop_index("ix_notifications_user_id", table_name="notifications") + op.drop_table("notifications") + + if "user_sessions" in existing_tables: + op.drop_index("ix_user_sessions_created_at", table_name="user_sessions") + op.drop_index("ix_user_sessions_token_hash", table_name="user_sessions") + op.drop_index("ix_user_sessions_user_id", table_name="user_sessions") + op.drop_table("user_sessions") + + if "audit_logs" in existing_tables: + op.drop_index("ix_audit_resource", table_name="audit_logs") + op.drop_index("ix_audit_user_action", table_name="audit_logs") + op.drop_index("ix_audit_logs_created_at", table_name="audit_logs") + op.drop_index("ix_audit_logs_resource_type", table_name="audit_logs") + op.drop_index("ix_audit_logs_action", table_name="audit_logs") + op.drop_index("ix_audit_logs_user_id", table_name="audit_logs") + op.drop_table("audit_logs") + + # Columns on users are intentionally not removed (SQLite limitations). diff --git a/backend/app/api/v1/analytics.py b/backend/app/api/v1/analytics.py new file mode 100644 index 0000000..bc4a694 --- /dev/null +++ b/backend/app/api/v1/analytics.py @@ -0,0 +1,280 @@ +"""Dashboard analytics endpoints.""" + +import json +from datetime import datetime, timedelta +from typing import Any, Optional +from fastapi import APIRouter, Depends, Query +from sqlalchemy.orm import Session +from sqlalchemy import func, desc + +from app.dependencies import get_db, get_current_superuser +from app.models.user import User +from app.models.audit_log import AuditLog +from app.models.session import UserSession +from app.models.notification import Notification +from app.models.api_key import APIKey +from app import crud + + +router = APIRouter() + + +@router.get("/overview") +def get_analytics_overview( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +) -> Any: + """ + Get dashboard overview analytics. + Returns summary statistics for the admin dashboard. + """ + now = datetime.utcnow() + today_start = now.replace(hour=0, minute=0, second=0, microsecond=0) + week_start = today_start - timedelta(days=today_start.weekday()) + month_start = today_start.replace(day=1) + + # User statistics + total_users = db.query(func.count(User.id)).scalar() or 0 + active_users = db.query(func.count(User.id)).filter(User.is_active == True).scalar() or 0 + new_users_today = db.query(func.count(User.id)).filter(User.created_at >= today_start).scalar() or 0 + new_users_week = db.query(func.count(User.id)).filter(User.created_at >= week_start).scalar() or 0 + new_users_month = db.query(func.count(User.id)).filter(User.created_at >= month_start).scalar() or 0 + + # Active sessions + active_sessions = db.query(func.count(UserSession.id))\ + .filter(UserSession.is_active == True).scalar() or 0 + + # API Keys + total_api_keys = db.query(func.count(APIKey.id)).scalar() or 0 + active_api_keys = db.query(func.count(APIKey.id)).filter(APIKey.is_active == True).scalar() or 0 + + # Recent logins (last 24h) + logins_24h = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.action == "login")\ + .filter(AuditLog.status == "success")\ + .filter(AuditLog.created_at >= now - timedelta(hours=24))\ + .scalar() or 0 + + # Failed logins (last 24h) + failed_logins_24h = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.action == "login")\ + .filter(AuditLog.status == "failure")\ + .filter(AuditLog.created_at >= now - timedelta(hours=24))\ + .scalar() or 0 + + # Notifications stats + unread_notifications = db.query(func.count(Notification.id))\ + .filter(Notification.is_read == False).scalar() or 0 + + return { + "users": { + "total": total_users, + "active": active_users, + "new_today": new_users_today, + "new_this_week": new_users_week, + "new_this_month": new_users_month + }, + "sessions": { + "active": active_sessions + }, + "api_keys": { + "total": total_api_keys, + "active": active_api_keys + }, + "security": { + "logins_24h": logins_24h, + "failed_logins_24h": failed_logins_24h + }, + "notifications": { + "unread_total": unread_notifications + }, + "generated_at": now.isoformat() + } + + +@router.get("/activity") +def get_recent_activity( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + limit: int = Query(20, ge=1, le=100) +) -> Any: + """ + Get recent activity from audit logs. + """ + logs = db.query(AuditLog)\ + .order_by(desc(AuditLog.created_at))\ + .limit(limit)\ + .all() + + return { + "items": [ + { + "id": log.id, + "user_id": log.user_id, + "username": log.username, + "action": log.action, + "resource_type": log.resource_type, + "resource_id": log.resource_id, + "status": log.status, + "ip_address": log.ip_address, + "created_at": log.created_at.isoformat() + } + for log in logs + ] + } + + +@router.get("/users/activity") +def get_user_activity_stats( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + days: int = Query(7, ge=1, le=90) +) -> Any: + """ + Get user activity statistics over time. + Returns daily active users and new registrations. + """ + now = datetime.utcnow() + start_date = now - timedelta(days=days) + + # Get daily stats + daily_stats = [] + for i in range(days): + day_start = (start_date + timedelta(days=i)).replace(hour=0, minute=0, second=0, microsecond=0) + day_end = day_start + timedelta(days=1) + + # Active users (users who logged in that day) + active = db.query(func.count(func.distinct(AuditLog.user_id)))\ + .filter(AuditLog.action == "login")\ + .filter(AuditLog.status == "success")\ + .filter(AuditLog.created_at >= day_start)\ + .filter(AuditLog.created_at < day_end)\ + .scalar() or 0 + + # New registrations + new_users = db.query(func.count(User.id))\ + .filter(User.created_at >= day_start)\ + .filter(User.created_at < day_end)\ + .scalar() or 0 + + daily_stats.append({ + "date": day_start.strftime("%Y-%m-%d"), + "active_users": active, + "new_users": new_users + }) + + return {"daily_stats": daily_stats} + + +@router.get("/actions/breakdown") +def get_actions_breakdown( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + hours: int = Query(24, ge=1, le=168) +) -> Any: + """ + Get breakdown of actions by type. + """ + since = datetime.utcnow() - timedelta(hours=hours) + + # Group by action + actions = db.query( + AuditLog.action, + func.count(AuditLog.id).label('count') + ).filter(AuditLog.created_at >= since)\ + .group_by(AuditLog.action)\ + .order_by(desc('count'))\ + .all() + + return { + "period_hours": hours, + "actions": [{"action": action, "count": count} for action, count in actions] + } + + +@router.get("/top-users") +def get_top_users( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + limit: int = Query(10, ge=1, le=50), + days: int = Query(7, ge=1, le=90) +) -> Any: + """ + Get most active users by action count. + """ + since = datetime.utcnow() - timedelta(days=days) + + top_users = db.query( + AuditLog.user_id, + AuditLog.username, + func.count(AuditLog.id).label('action_count') + ).filter(AuditLog.created_at >= since)\ + .filter(AuditLog.user_id.isnot(None))\ + .group_by(AuditLog.user_id, AuditLog.username)\ + .order_by(desc('action_count'))\ + .limit(limit)\ + .all() + + return { + "period_days": days, + "users": [ + {"user_id": uid, "username": uname, "action_count": count} + for uid, uname, count in top_users + ] + } + + +@router.get("/security/failed-logins") +def get_failed_logins( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + hours: int = Query(24, ge=1, le=168) +) -> Any: + """ + Get failed login attempts grouped by IP address. + """ + since = datetime.utcnow() - timedelta(hours=hours) + + # Failed logins by IP + by_ip = db.query( + AuditLog.ip_address, + func.count(AuditLog.id).label('count') + ).filter(AuditLog.action == "login")\ + .filter(AuditLog.status == "failure")\ + .filter(AuditLog.created_at >= since)\ + .group_by(AuditLog.ip_address)\ + .order_by(desc('count'))\ + .limit(20)\ + .all() + + # Recent failed login details + recent = db.query(AuditLog)\ + .filter(AuditLog.action == "login")\ + .filter(AuditLog.status == "failure")\ + .filter(AuditLog.created_at >= since)\ + .order_by(desc(AuditLog.created_at))\ + .limit(50)\ + .all() + + recent_items = [] + for log in recent: + attempted_username = log.username + if not attempted_username and log.details: + try: + parsed = json.loads(log.details) + except json.JSONDecodeError: + parsed = None + if isinstance(parsed, dict): + attempted_username = parsed.get("username") + + recent_items.append( + { + "id": log.id, + "username": attempted_username, + "ip_address": log.ip_address, + "user_agent": log.user_agent, + "created_at": log.created_at.isoformat(), + } + ) + + return {"period_hours": hours, "by_ip": [{"ip": ip, "count": count} for ip, count in by_ip], "recent": recent_items} diff --git a/backend/app/api/v1/api_keys.py b/backend/app/api/v1/api_keys.py new file mode 100644 index 0000000..0a2e49f --- /dev/null +++ b/backend/app/api/v1/api_keys.py @@ -0,0 +1,246 @@ +"""API Key management endpoints.""" + +import json +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_user +from app.models.user import User +from app import crud +from app.schemas.api_key import ( + APIKey, + APIKeyCreate, + APIKeyUpdate, + APIKeyWithSecret, + APIKeyList +) + + +router = APIRouter() + +MAX_KEYS_PER_USER = 10 # Limit API keys per user + + +def get_client_ip(request: Request) -> str: + """Extract client IP from request.""" + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + return request.client.host if request.client else "unknown" + + +def serialize_api_key(db_obj) -> dict: + """Serialize API key for response.""" + scopes = None + if db_obj.scopes: + try: + scopes = json.loads(db_obj.scopes) + except json.JSONDecodeError: + scopes = [] + + return { + "id": db_obj.id, + "user_id": db_obj.user_id, + "name": db_obj.name, + "key_prefix": db_obj.key_prefix, + "scopes": scopes, + "is_active": db_obj.is_active, + "last_used_at": db_obj.last_used_at, + "last_used_ip": db_obj.last_used_ip, + "usage_count": int(db_obj.usage_count or "0"), + "expires_at": db_obj.expires_at, + "created_at": db_obj.created_at, + "updated_at": db_obj.updated_at + } + + +@router.post("", response_model=APIKeyWithSecret, status_code=status.HTTP_201_CREATED) +def create_api_key( + request: Request, + *, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + key_in: APIKeyCreate +) -> Any: + """ + Create a new API key. + The actual key is only returned once on creation. + """ + # Check key limit + key_count = crud.api_key.count_by_user(db, current_user.id) + if key_count >= MAX_KEYS_PER_USER: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Maximum {MAX_KEYS_PER_USER} API keys allowed per user" + ) + + db_obj, plain_key = crud.api_key.create(db, obj_in=key_in, user_id=current_user.id) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="create", + resource_type="api_key", + resource_id=db_obj.id, + details={"name": key_in.name}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + result = serialize_api_key(db_obj) + result["key"] = plain_key + return result + + +@router.get("", response_model=APIKeyList) +def list_api_keys( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + List all API keys for the current user. + """ + keys = crud.api_key.get_multi_by_user(db, user_id=current_user.id) + total = crud.api_key.count_by_user(db, current_user.id) + + return { + "items": [serialize_api_key(k) for k in keys], + "total": total + } + + +@router.get("/{key_id}", response_model=APIKey) +def get_api_key( + key_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get a specific API key. + """ + db_obj = crud.api_key.get(db, id=key_id) + if not db_obj or db_obj.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="API key not found" + ) + + return serialize_api_key(db_obj) + + +@router.patch("/{key_id}", response_model=APIKey) +def update_api_key( + request: Request, + key_id: str, + *, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + key_in: APIKeyUpdate +) -> Any: + """ + Update an API key. + """ + db_obj = crud.api_key.get(db, id=key_id) + if not db_obj or db_obj.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="API key not found" + ) + + db_obj = crud.api_key.update(db, db_obj=db_obj, obj_in=key_in) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="update", + resource_type="api_key", + resource_id=db_obj.id, + details={"name": db_obj.name, "changes": key_in.model_dump(exclude_unset=True)}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return serialize_api_key(db_obj) + + +@router.post("/{key_id}/revoke", response_model=APIKey) +def revoke_api_key( + request: Request, + key_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Revoke (deactivate) an API key. + """ + db_obj = crud.api_key.get(db, id=key_id) + if not db_obj or db_obj.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="API key not found" + ) + + db_obj = crud.api_key.revoke(db, id=key_id) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="revoke", + resource_type="api_key", + resource_id=db_obj.id, + details={"name": db_obj.name}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return serialize_api_key(db_obj) + + +@router.delete("/{key_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_api_key( + request: Request, + key_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> None: + """ + Delete an API key. + """ + db_obj = crud.api_key.get(db, id=key_id) + if not db_obj or db_obj.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="API key not found" + ) + + key_name = db_obj.name + + if not crud.api_key.delete(db, id=key_id): + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to delete API key" + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="delete", + resource_type="api_key", + resource_id=key_id, + details={"name": key_name}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) diff --git a/backend/app/api/v1/audit.py b/backend/app/api/v1/audit.py new file mode 100644 index 0000000..98b2161 --- /dev/null +++ b/backend/app/api/v1/audit.py @@ -0,0 +1,162 @@ +"""Audit log API endpoints.""" + +from datetime import datetime +from typing import Optional +from fastapi import APIRouter, Depends, Query, HTTPException, status +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_superuser +from app.models.user import User +from app import crud +from app.schemas.audit_log import ( + AuditLog, + AuditLogList, + AuditLogFilter, + AuditLogStats +) + + +router = APIRouter() + + +@router.get("", response_model=AuditLogList) +def get_audit_logs( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), + user_id: Optional[str] = None, + username: Optional[str] = None, + action: Optional[str] = None, + resource_type: Optional[str] = None, + resource_id: Optional[str] = None, + status_filter: Optional[str] = Query(None, alias="status"), + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None, +): + """ + Get paginated audit logs with optional filtering. + Requires superuser authentication. + """ + filters = AuditLogFilter( + user_id=user_id, + username=username, + action=action, + resource_type=resource_type, + resource_id=resource_id, + status=status_filter, + start_date=start_date, + end_date=end_date + ) + + skip = (page - 1) * page_size + items, total = crud.audit_log.get_multi( + db, skip=skip, limit=page_size, filters=filters + ) + + total_pages = (total + page_size - 1) // page_size + + return AuditLogList( + items=items, + total=total, + page=page, + page_size=page_size, + total_pages=total_pages + ) + + +@router.get("/stats", response_model=AuditLogStats) +def get_audit_stats( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Get audit log statistics. + Requires superuser authentication. + """ + return crud.audit_log.get_stats(db) + + +@router.get("/actions") +def get_distinct_actions( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Get list of distinct action types for filtering. + """ + return {"actions": crud.audit_log.get_distinct_actions(db)} + + +@router.get("/resource-types") +def get_distinct_resource_types( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Get list of distinct resource types for filtering. + """ + return {"resource_types": crud.audit_log.get_distinct_resource_types(db)} + + +@router.get("/user/{user_id}", response_model=AuditLogList) +def get_user_audit_logs( + user_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +): + """ + Get audit logs for a specific user. + Requires superuser authentication. + """ + filters = AuditLogFilter(user_id=user_id) + skip = (page - 1) * page_size + items, total = crud.audit_log.get_multi( + db, skip=skip, limit=page_size, filters=filters + ) + + total_pages = (total + page_size - 1) // page_size + + return AuditLogList( + items=items, + total=total, + page=page, + page_size=page_size, + total_pages=total_pages + ) + + +@router.get("/{log_id}", response_model=AuditLog) +def get_audit_log( + log_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Get a specific audit log entry. + Requires superuser authentication. + """ + log = crud.audit_log.get(db, id=log_id) + if not log: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Audit log not found" + ) + return log + + +@router.delete("/cleanup") +def cleanup_old_logs( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + days: int = Query(90, ge=1, le=365), +): + """ + Delete audit logs older than specified days. + Requires superuser authentication. + Default: 90 days. + """ + deleted = crud.audit_log.delete_old(db, days=days) + return {"deleted": deleted, "days_threshold": days} diff --git a/backend/app/api/v1/auth.py b/backend/app/api/v1/auth.py index 4c21836..1caa0ac 100644 --- a/backend/app/api/v1/auth.py +++ b/backend/app/api/v1/auth.py @@ -2,11 +2,14 @@ from datetime import datetime, timedelta from typing import Any -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, status, Request +from fastapi.security import HTTPAuthorizationCredentials from sqlalchemy.orm import Session +from slowapi import Limiter +from slowapi.util import get_remote_address from app import crud, schemas -from app.dependencies import get_db, get_current_user +from app.dependencies import get_db, get_current_user, security from app.core.security import create_access_token from app.config import settings from app.models.user import User @@ -14,9 +17,22 @@ from app.models.user import User router = APIRouter() +# Rate limiter for auth endpoints +limiter = Limiter(key_func=get_remote_address) + + +def get_client_ip(request: Request) -> str: + """Extract client IP from request, considering proxies.""" + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + return request.client.host if request.client else "unknown" + @router.post("/register", response_model=schemas.User, status_code=status.HTTP_201_CREATED) +@limiter.limit("5/minute") # Limit registration attempts def register( + request: Request, *, db: Session = Depends(get_db), user_in: schemas.RegisterRequest @@ -27,32 +43,54 @@ def register( Creates a new user account with the provided credentials. Registration can be disabled by administrators via settings. """ + ip_address = get_client_ip(request) + user_agent = request.headers.get("User-Agent", "")[:500] + # Check if this is the first user (always allow for initial setup) user_count = db.query(User).count() is_first_user = user_count == 0 - + # If not the first user, check if registration is enabled if not is_first_user: registration_enabled_raw = crud.settings.get_setting_value( - db, - key="registration_enabled", + db, + key="registration_enabled", default=True # Default to enabled if setting doesn't exist ) - + if isinstance(registration_enabled_raw, str): registration_enabled = registration_enabled_raw.strip().lower() in ("true", "1") else: registration_enabled = bool(registration_enabled_raw) if not registration_enabled: + # Log failed registration attempt + crud.audit_log.log_action( + db, + action="register", + resource_type="user", + details={"username": user_in.username, "reason": "registration_disabled"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail="User registration is currently disabled" ) - + # Check if username already exists user = crud.user.get_by_username(db, username=user_in.username) if user: + crud.audit_log.log_action( + db, + action="register", + resource_type="user", + details={"username": user_in.username, "reason": "username_exists"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Username already registered" @@ -61,11 +99,44 @@ def register( # Check if email already exists user = crud.user.get_by_email(db, email=user_in.email) if user: + crud.audit_log.log_action( + db, + action="register", + resource_type="user", + details={"username": user_in.username, "reason": "email_exists"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered" ) + # Validate password against policy + from app.core.password_policy import get_password_policy, validate_password + policy = get_password_policy(db) + password_errors = validate_password( + user_in.password, + policy=policy, + username=user_in.username, + email=user_in.email + ) + if password_errors: + crud.audit_log.log_action( + db, + action="register", + resource_type="user", + details={"username": user_in.username, "reason": "weak_password"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail={"message": "Password does not meet requirements", "errors": password_errors} + ) + # Create new user (first user becomes superuser) user_create = schemas.UserCreate( username=user_in.username, @@ -76,11 +147,28 @@ def register( ) user = crud.user.create(db, obj_in=user_create) + + # Log successful registration + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="register", + resource_type="user", + resource_id=user.id, + details={"is_first_user": is_first_user}, + ip_address=ip_address, + user_agent=user_agent, + status="success" + ) + return user -@router.post("/login", response_model=schemas.Token) +@router.post("/login") +@limiter.limit("10/minute") # Stricter limit for login attempts def login( + request: Request, *, db: Session = Depends(get_db), credentials: schemas.LoginRequest @@ -89,7 +177,11 @@ def login( Login and get access token. Authenticates user and returns a JWT access token. + If 2FA is enabled, returns requires_2fa=True with a temp_token. """ + ip_address = get_client_ip(request) + user_agent = request.headers.get("User-Agent", "")[:500] + # Authenticate user user = crud.user.authenticate( db, @@ -98,6 +190,16 @@ def login( ) if not user: + # Log failed login attempt + crud.audit_log.log_action( + db, + action="login", + resource_type="auth", + details={"username": credentials.username, "reason": "invalid_credentials"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Incorrect username or password", @@ -105,11 +207,70 @@ def login( ) if not crud.user.is_active(user): + # Log inactive user login attempt + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login", + resource_type="auth", + details={"reason": "inactive_user"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail="Inactive user" ) + # Check if 2FA is enabled + if user.totp_enabled: + # If TOTP code provided, verify it + if credentials.totp_code: + from app.api.v1.two_factor import verify_totp_or_backup + if not verify_totp_or_backup(user, credentials.totp_code, db): + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login", + resource_type="auth", + details={"reason": "invalid_2fa_code"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid 2FA code" + ) + else: + # No code provided, return temp token for 2FA verification + temp_token_expires = timedelta(minutes=5) # 5 minute expiry + temp_token = create_access_token( + data={"sub": user.id, "temp": True, "purpose": "2fa"}, + expires_delta=temp_token_expires + ) + + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login_2fa_required", + resource_type="auth", + ip_address=ip_address, + user_agent=user_agent, + status="pending" + ) + + return { + "access_token": None, + "token_type": "bearer", + "requires_2fa": True, + "temp_token": temp_token + } + # Update last_login timestamp user.last_login = datetime.utcnow() db.add(user) @@ -122,6 +283,135 @@ def login( expires_delta=access_token_expires ) + # Track session for revocation / active sessions + session = crud.session.create( + db, + user_id=user.id, + token=access_token, + user_agent=user_agent, + ip_address=ip_address, + expires_at=datetime.utcnow() + access_token_expires, + ) + crud.session.mark_as_current(db, session_id=session.id, user_id=user.id) + + # Log successful login + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login", + resource_type="auth", + details={"session_id": session.id}, + ip_address=ip_address, + user_agent=user_agent, + status="success" + ) + + return { + "access_token": access_token, + "token_type": "bearer", + "requires_2fa": False, + "temp_token": None + } + + +@router.post("/verify-2fa", response_model=schemas.Token) +def verify_2fa_login( + request: Request, + *, + db: Session = Depends(get_db), + verify_request: schemas.Verify2FARequest +) -> Any: + """ + Complete login by verifying 2FA code. + + Use the temp_token from the login response along with the TOTP code. + """ + from jose import JWTError + from app.core.security import decode_access_token + from app.api.v1.two_factor import verify_totp_or_backup + + ip_address = get_client_ip(request) + user_agent = request.headers.get("User-Agent", "")[:500] + + try: + payload = decode_access_token(verify_request.temp_token) + user_id = payload.get("sub") + is_temp = payload.get("temp") + purpose = payload.get("purpose") + + if not user_id or not is_temp or purpose != "2fa": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid temporary token" + ) + except JWTError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired temporary token" + ) + + user = crud.user.get(db, id=user_id) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not found" + ) + + # Verify TOTP code + if not verify_totp_or_backup(user, verify_request.code, db): + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login_2fa_verify", + resource_type="auth", + details={"reason": "invalid_code"}, + ip_address=ip_address, + user_agent=user_agent, + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid 2FA code" + ) + + # Update last_login timestamp + user.last_login = datetime.utcnow() + db.add(user) + db.commit() + + # Create full access token + access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.id}, + expires_delta=access_token_expires + ) + + # Track session for revocation / active sessions + session = crud.session.create( + db, + user_id=user.id, + token=access_token, + user_agent=user_agent, + ip_address=ip_address, + expires_at=datetime.utcnow() + access_token_expires, + ) + crud.session.mark_as_current(db, session_id=session.id, user_id=user.id) + + # Log successful login + crud.audit_log.log_action( + db, + user_id=user.id, + username=user.username, + action="login", + resource_type="auth", + details={"method": "2fa", "session_id": session.id}, + ip_address=ip_address, + user_agent=user_agent, + status="success" + ) + return { "access_token": access_token, "token_type": "bearer" @@ -141,6 +431,41 @@ def read_users_me( return current_user +@router.post("/logout") +def logout( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Logout by revoking the current session. + + This invalidates the current access token server-side (session revocation). + """ + ip_address = get_client_ip(request) + user_agent = request.headers.get("User-Agent", "")[:500] + token = credentials.credentials + + session = crud.session.get_by_token(db, token) + if session: + crud.session.revoke(db, id=session.id, user_id=current_user.id) + + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="logout", + resource_type="auth", + details={"session_id": session.id if session else None}, + ip_address=ip_address, + user_agent=user_agent, + status="success", + ) + + return {"message": "Logged out"} + + @router.get("/registration-status") def get_registration_status( db: Session = Depends(get_db) @@ -163,3 +488,43 @@ def get_registration_status( registration_enabled = bool(registration_enabled_raw) return {"registration_enabled": registration_enabled} + + +@router.get("/password-requirements") +def get_password_requirements( + db: Session = Depends(get_db) +) -> Any: + """ + Get password requirements/policy. + + This is a public endpoint that returns the password policy + for display during registration. + """ + from app.core.password_policy import get_password_policy, get_password_requirements as get_reqs + + policy = get_password_policy(db) + return get_reqs(policy) + + +@router.post("/check-password-strength") +def check_password_strength( + password_data: dict, + db: Session = Depends(get_db) +) -> Any: + """ + Check password strength. + + This is a public endpoint that returns password strength analysis + for real-time feedback during registration. + """ + from app.core.password_policy import check_password_strength as check_strength + + password = password_data.get("password", "") + if not password: + return { + "score": 0, + "level": "weak", + "feedback": ["Password is required"] + } + + return check_strength(password) diff --git a/backend/app/api/v1/export.py b/backend/app/api/v1/export.py new file mode 100644 index 0000000..0fb0097 --- /dev/null +++ b/backend/app/api/v1/export.py @@ -0,0 +1,370 @@ +"""Data export/import endpoints.""" + +import csv +import io +import json +from datetime import datetime +from typing import Any, List +from fastapi import APIRouter, Depends, HTTPException, status, UploadFile, File, Response +from fastapi.responses import StreamingResponse +from sqlalchemy.orm import Session +from pydantic import BaseModel + +from app.dependencies import get_db, get_current_superuser +from app.models.user import User +from app.models.audit_log import AuditLog +from app import crud, schemas + + +router = APIRouter() + + +class ImportResult(BaseModel): + """Import operation result.""" + success: int = 0 + failed: int = 0 + errors: List[str] = [] + + +# ============================================================================= +# EXPORT ENDPOINTS +# ============================================================================= + +@router.get("/users/csv") +def export_users_csv( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Export all users to CSV format. + """ + users = db.query(User).all() + + # Create CSV in memory + output = io.StringIO() + writer = csv.writer(output) + + # Header + writer.writerow([ + "id", "username", "email", "is_active", "is_superuser", + "totp_enabled", "created_at", "last_login" + ]) + + # Data rows + for user in users: + writer.writerow([ + user.id, + user.username, + user.email, + user.is_active, + user.is_superuser, + user.totp_enabled, + user.created_at.isoformat() if user.created_at else "", + user.last_login.isoformat() if user.last_login else "" + ]) + + output.seek(0) + + # Log export + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="export", + resource_type="users", + details={"format": "csv", "count": len(users)}, + status="success" + ) + + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={ + "Content-Disposition": f"attachment; filename=users_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.csv" + } + ) + + +@router.get("/users/json") +def export_users_json( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Export all users to JSON format. + """ + users = db.query(User).all() + + data = { + "exported_at": datetime.utcnow().isoformat(), + "exported_by": current_user.username, + "count": len(users), + "users": [ + { + "id": user.id, + "username": user.username, + "email": user.email, + "is_active": user.is_active, + "is_superuser": user.is_superuser, + "totp_enabled": user.totp_enabled, + "permissions": user.permissions, + "created_at": user.created_at.isoformat() if user.created_at else None, + "last_login": user.last_login.isoformat() if user.last_login else None + } + for user in users + ] + } + + # Log export + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="export", + resource_type="users", + details={"format": "json", "count": len(users)}, + status="success" + ) + + content = json.dumps(data, indent=2) + + return Response( + content=content, + media_type="application/json", + headers={ + "Content-Disposition": f"attachment; filename=users_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.json" + } + ) + + +@router.get("/settings/json") +def export_settings_json( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Export all settings to JSON format. + """ + from app.models.settings import Settings + + settings_list = db.query(Settings).all() + + data = { + "exported_at": datetime.utcnow().isoformat(), + "exported_by": current_user.username, + "count": len(settings_list), + "settings": { + setting.key: setting.value + for setting in settings_list + } + } + + # Log export + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="export", + resource_type="settings", + details={"format": "json", "count": len(settings_list)}, + status="success" + ) + + content = json.dumps(data, indent=2) + + return Response( + content=content, + media_type="application/json", + headers={ + "Content-Disposition": f"attachment; filename=settings_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.json" + } + ) + + +@router.get("/audit/csv") +def export_audit_csv( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + days: int = 30 +): + """ + Export audit logs to CSV format. + """ + from datetime import timedelta + + since = datetime.utcnow() - timedelta(days=days) + logs = db.query(AuditLog).filter(AuditLog.created_at >= since).all() + + # Create CSV in memory + output = io.StringIO() + writer = csv.writer(output) + + # Header + writer.writerow([ + "id", "user_id", "username", "action", "resource_type", + "resource_id", "status", "ip_address", "created_at" + ]) + + # Data rows + for log in logs: + writer.writerow([ + log.id, + log.user_id, + log.username, + log.action, + log.resource_type, + log.resource_id, + log.status, + log.ip_address, + log.created_at.isoformat() if log.created_at else "" + ]) + + output.seek(0) + + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={ + "Content-Disposition": f"attachment; filename=audit_logs_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.csv" + } + ) + + +# ============================================================================= +# IMPORT ENDPOINTS +# ============================================================================= + +@router.post("/users/json", response_model=ImportResult) +async def import_users_json( + file: UploadFile = File(...), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Import users from JSON file. + Only creates new users, does not update existing ones. + """ + result = ImportResult() + + try: + content = await file.read() + data = json.loads(content.decode()) + except json.JSONDecodeError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid JSON file: {str(e)}" + ) + + users_data = data.get("users", []) + if not users_data: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="No users found in file" + ) + + for user_data in users_data: + try: + username = user_data.get("username") + email = user_data.get("email") + + # Check if user already exists + if crud.user.get_by_username(db, username=username): + result.errors.append(f"User '{username}' already exists") + result.failed += 1 + continue + + if crud.user.get_by_email(db, email=email): + result.errors.append(f"Email '{email}' already exists") + result.failed += 1 + continue + + # Create user with a default password (must be changed) + import secrets + temp_password = secrets.token_urlsafe(16) + + user_create = schemas.UserCreate( + username=username, + email=email, + password=temp_password, + is_active=user_data.get("is_active", True), + is_superuser=user_data.get("is_superuser", False), + permissions=user_data.get("permissions") + ) + + crud.user.create(db, obj_in=user_create) + result.success += 1 + + except Exception as e: + result.errors.append(f"Error importing user: {str(e)}") + result.failed += 1 + + # Log import + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="import", + resource_type="users", + details={ + "format": "json", + "success": result.success, + "failed": result.failed + }, + status="success" if result.failed == 0 else "partial" + ) + + return result + + +@router.post("/settings/json", response_model=ImportResult) +async def import_settings_json( + file: UploadFile = File(...), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Import settings from JSON file. + Updates existing settings and creates new ones. + """ + result = ImportResult() + + try: + content = await file.read() + data = json.loads(content.decode()) + except json.JSONDecodeError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid JSON file: {str(e)}" + ) + + settings_data = data.get("settings", {}) + if not settings_data: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="No settings found in file" + ) + + for key, value in settings_data.items(): + try: + crud.settings.update_setting(db, key=key, value=value) + result.success += 1 + except Exception as e: + result.errors.append(f"Error importing setting '{key}': {str(e)}") + result.failed += 1 + + # Log import + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="import", + resource_type="settings", + details={ + "format": "json", + "success": result.success, + "failed": result.failed + }, + status="success" if result.failed == 0 else "partial" + ) + + return result diff --git a/backend/app/api/v1/files.py b/backend/app/api/v1/files.py new file mode 100644 index 0000000..890fb67 --- /dev/null +++ b/backend/app/api/v1/files.py @@ -0,0 +1,377 @@ +"""File storage endpoints.""" + +import json +from typing import Any, List, Optional +from fastapi import APIRouter, Depends, HTTPException, status, UploadFile, File, Query +from fastapi.responses import FileResponse +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_user, get_current_superuser +from app.models.user import User +from app import crud +from app.schemas.file import ( + StoredFile as StoredFileSchema, + FileCreate, + FileUpdate, + FileUploadResponse, + FileListResponse, + ALLOWED_CONTENT_TYPES, + MAX_FILE_SIZE, +) + + +router = APIRouter() + + +def file_to_schema(db_file) -> dict: + """Convert a StoredFile model to schema dict.""" + return { + "id": db_file.id, + "original_filename": db_file.original_filename, + "content_type": db_file.content_type, + "size_bytes": db_file.size_bytes, + "storage_type": db_file.storage_type, + "description": db_file.description, + "tags": json.loads(db_file.tags) if db_file.tags else None, + "is_public": db_file.is_public, + "uploaded_by": db_file.uploaded_by, + "file_hash": db_file.file_hash, + "created_at": db_file.created_at, + "updated_at": db_file.updated_at, + } + + +@router.post("/upload", response_model=FileUploadResponse, status_code=status.HTTP_201_CREATED) +async def upload_file( + file: UploadFile = File(...), + description: Optional[str] = None, + tags: Optional[str] = None, # Comma-separated tags + is_public: bool = False, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Upload a file. + Returns file metadata with download URL. + """ + # Read file content + content = await file.read() + size = len(content) + + # Validate upload + is_valid, error = crud.file_storage.validate_upload( + content_type=file.content_type, + size_bytes=size + ) + if not is_valid: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=error + ) + + # Parse tags + tag_list = None + if tags: + tag_list = [t.strip() for t in tags.split(",") if t.strip()] + + # Create file metadata + metadata = FileCreate( + description=description, + tags=tag_list, + is_public=is_public + ) + + # Reset file position for reading + await file.seek(0) + + # Save file + import io + stored_file = crud.file_storage.create( + db, + file=io.BytesIO(content), + filename=file.filename, + content_type=file.content_type, + size_bytes=size, + uploaded_by=current_user.id, + metadata=metadata + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="upload", + resource_type="file", + resource_id=stored_file.id, + details={"filename": file.filename, "size": size}, + status="success" + ) + + return { + "id": stored_file.id, + "original_filename": stored_file.original_filename, + "content_type": stored_file.content_type, + "size_bytes": stored_file.size_bytes, + "download_url": f"/api/v1/files/{stored_file.id}/download" + } + + +@router.get("/", response_model=FileListResponse) +def list_files( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + page: int = Query(1, ge=1), + page_size: int = Query(20, ge=1, le=100), + mine_only: bool = False, + is_public: Optional[bool] = None, + content_type: Optional[str] = None, +): + """ + List files with pagination and filtering. + Regular users can only see their own files and public files. + Superusers can see all files. + """ + skip = (page - 1) * page_size + + # Filter by ownership for non-superusers + if not current_user.is_superuser: + if mine_only: + uploaded_by = current_user.id + is_public = None + else: + # Show user's files and public files + own_files = crud.file_storage.get_multi( + db, + skip=0, + limit=1000, # Get all for filtering + uploaded_by=current_user.id + ) + public_files = crud.file_storage.get_multi( + db, + skip=0, + limit=1000, + is_public=True + ) + # Combine and deduplicate + all_files = {f.id: f for f in own_files} + all_files.update({f.id: f for f in public_files}) + files_list = list(all_files.values()) + # Sort by created_at desc + files_list.sort(key=lambda x: x.created_at, reverse=True) + # Paginate + total = len(files_list) + files = files_list[skip:skip + page_size] + + return { + "files": [file_to_schema(f) for f in files], + "total": total, + "page": page, + "page_size": page_size + } + + uploaded_by = current_user.id if mine_only else None + else: + uploaded_by = current_user.id if mine_only else None + + files = crud.file_storage.get_multi( + db, + skip=skip, + limit=page_size, + uploaded_by=uploaded_by, + is_public=is_public, + content_type=content_type + ) + + total = crud.file_storage.count( + db, + uploaded_by=uploaded_by, + is_public=is_public + ) + + return { + "files": [file_to_schema(f) for f in files], + "total": total, + "page": page, + "page_size": page_size + } + + +@router.get("/{file_id}", response_model=StoredFileSchema) +def get_file( + file_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Get file metadata. + Users can only access their own files or public files. + """ + stored_file = crud.file_storage.get(db, id=file_id) + if not stored_file: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="File not found" + ) + + # Check access + if not stored_file.is_public and stored_file.uploaded_by != current_user.id and not current_user.is_superuser: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied" + ) + + return file_to_schema(stored_file) + + +@router.get("/{file_id}/download") +def download_file( + file_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Download a file. + Users can only download their own files or public files. + """ + stored_file = crud.file_storage.get(db, id=file_id) + if not stored_file: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="File not found" + ) + + # Check access + if not stored_file.is_public and stored_file.uploaded_by != current_user.id and not current_user.is_superuser: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied" + ) + + # Get file path + file_path = crud.file_storage.get_file_content(stored_file) + if not file_path: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="File content not found" + ) + + return FileResponse( + path=file_path, + filename=stored_file.original_filename, + media_type=stored_file.content_type + ) + + +@router.put("/{file_id}", response_model=StoredFileSchema) +def update_file( + file_id: str, + file_in: FileUpdate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Update file metadata. + Users can only update their own files. + """ + stored_file = crud.file_storage.get(db, id=file_id) + if not stored_file: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="File not found" + ) + + # Check ownership + if stored_file.uploaded_by != current_user.id and not current_user.is_superuser: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied" + ) + + updated = crud.file_storage.update(db, db_obj=stored_file, obj_in=file_in) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="update", + resource_type="file", + resource_id=file_id, + details={"filename": stored_file.original_filename}, + status="success" + ) + + return file_to_schema(updated) + + +@router.delete("/{file_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_file( + file_id: str, + permanent: bool = False, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Delete a file. + Users can only delete their own files. + Superusers can permanently delete files. + """ + stored_file = crud.file_storage.get(db, id=file_id) + if not stored_file: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="File not found" + ) + + # Check ownership + if stored_file.uploaded_by != current_user.id and not current_user.is_superuser: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied" + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="delete", + resource_type="file", + resource_id=file_id, + details={ + "filename": stored_file.original_filename, + "permanent": permanent + }, + status="success" + ) + + if permanent and current_user.is_superuser: + crud.file_storage.hard_delete(db, id=file_id) + else: + crud.file_storage.soft_delete(db, id=file_id) + + return None + + +@router.get("/allowed-types/", response_model=List[str]) +def get_allowed_types( + current_user: User = Depends(get_current_user), +): + """ + Get list of allowed file types for upload. + """ + return ALLOWED_CONTENT_TYPES + + +@router.get("/max-size/", response_model=dict) +def get_max_size( + current_user: User = Depends(get_current_user), +): + """ + Get maximum allowed file size. + """ + return { + "max_size_bytes": MAX_FILE_SIZE, + "max_size_mb": MAX_FILE_SIZE / (1024 * 1024) + } diff --git a/backend/app/api/v1/health.py b/backend/app/api/v1/health.py index af3ac23..d5098bf 100644 --- a/backend/app/api/v1/health.py +++ b/backend/app/api/v1/health.py @@ -1,20 +1,65 @@ """Health check endpoints.""" +import os +import platform +import psutil +from datetime import datetime from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from sqlalchemy import text -from app.dependencies import get_db +from app.dependencies import get_db, get_current_user, get_current_superuser from app.config import settings +from app.models.user import User router = APIRouter() +# Store app start time +APP_START_TIME = datetime.utcnow() + + +def get_system_info() -> dict: + """Get detailed system information.""" + # Memory info + memory = psutil.virtual_memory() + + # Disk info + disk = psutil.disk_usage('/') + + # CPU info + cpu_percent = psutil.cpu_percent(interval=0.1) + + return { + "memory": { + "total_gb": round(memory.total / (1024**3), 2), + "used_gb": round(memory.used / (1024**3), 2), + "available_gb": round(memory.available / (1024**3), 2), + "percent": memory.percent + }, + "disk": { + "total_gb": round(disk.total / (1024**3), 2), + "used_gb": round(disk.used / (1024**3), 2), + "free_gb": round(disk.free / (1024**3), 2), + "percent": disk.percent + }, + "cpu": { + "percent": cpu_percent, + "cores": psutil.cpu_count() + }, + "platform": { + "system": platform.system(), + "release": platform.release(), + "python": platform.python_version() + } + } + @router.get("") def health_check(db: Session = Depends(get_db)): """ Health check endpoint that verifies database connectivity. + Public endpoint for monitoring services. Returns: Dictionary with health status and database connectivity @@ -26,9 +71,56 @@ def health_check(db: Session = Depends(get_db)): except Exception as e: db_status = f"error: {str(e)}" + # Calculate uptime + uptime_seconds = (datetime.utcnow() - APP_START_TIME).total_seconds() + return { "status": "healthy" if db_status == "connected" else "unhealthy", "app": settings.APP_NAME, "version": settings.APP_VERSION, - "database": db_status + "database": db_status, + "uptime_seconds": int(uptime_seconds) + } + + +@router.get("/detailed") +def health_check_detailed( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser) +): + """ + Detailed health check with system information. + Requires superuser authentication. + + Returns: + Dictionary with detailed health status and system info + """ + # Basic health check + try: + db.execute(text("SELECT 1")) + db_status = "connected" + except Exception as e: + db_status = f"error: {str(e)}" + + # Calculate uptime + uptime_seconds = (datetime.utcnow() - APP_START_TIME).total_seconds() + days, remainder = divmod(int(uptime_seconds), 86400) + hours, remainder = divmod(remainder, 3600) + minutes, seconds = divmod(remainder, 60) + + return { + "status": "healthy" if db_status == "connected" else "unhealthy", + "app": settings.APP_NAME, + "version": settings.APP_VERSION, + "database": db_status, + "uptime": { + "seconds": int(uptime_seconds), + "formatted": f"{days}d {hours}h {minutes}m {seconds}s" + }, + "started_at": APP_START_TIME.isoformat(), + "system": get_system_info(), + "environment": { + "debug": settings.DEBUG, + "log_level": settings.LOG_LEVEL + } } diff --git a/backend/app/api/v1/notifications.py b/backend/app/api/v1/notifications.py new file mode 100644 index 0000000..b97ed46 --- /dev/null +++ b/backend/app/api/v1/notifications.py @@ -0,0 +1,244 @@ +"""Notification API endpoints.""" + +import json +from typing import Any, Optional +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_user, get_current_superuser +from app.models.user import User +from app import crud +from app.schemas.notification import ( + Notification, + NotificationCreate, + NotificationCreateForUser, + NotificationList, + NotificationStats, + NotificationBulkAction +) + + +router = APIRouter() + + +def serialize_notification(db_obj) -> dict: + """Serialize notification for response.""" + metadata = None + if db_obj.metadata: + try: + metadata = json.loads(db_obj.metadata) + except json.JSONDecodeError: + metadata = None + + return { + "id": db_obj.id, + "user_id": db_obj.user_id, + "title": db_obj.title, + "message": db_obj.message, + "type": db_obj.type, + "link": db_obj.link, + "metadata": metadata, + "is_read": db_obj.is_read, + "created_at": db_obj.created_at, + "read_at": db_obj.read_at + } + + +@router.get("", response_model=NotificationList) +def get_notifications( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=100), + unread_only: bool = False +) -> Any: + """ + Get notifications for the current user. + """ + notifications = crud.notification.get_multi_by_user( + db, + user_id=current_user.id, + skip=skip, + limit=limit, + unread_only=unread_only + ) + total = crud.notification.count_by_user(db, current_user.id) + unread_count = crud.notification.count_unread_by_user(db, current_user.id) + + return { + "items": [serialize_notification(n) for n in notifications], + "total": total, + "unread_count": unread_count + } + + +@router.get("/unread-count") +def get_unread_count( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get unread notification count for the current user. + """ + count = crud.notification.count_unread_by_user(db, current_user.id) + return {"unread_count": count} + + +@router.get("/stats", response_model=NotificationStats) +def get_notification_stats( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get notification statistics for the current user. + """ + return crud.notification.get_stats_by_user(db, current_user.id) + + +@router.get("/{notification_id}", response_model=Notification) +def get_notification( + notification_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get a specific notification. + """ + db_obj = crud.notification.get(db, id=notification_id) + if not db_obj or db_obj.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Notification not found" + ) + + return serialize_notification(db_obj) + + +@router.post("/{notification_id}/read", response_model=Notification) +def mark_as_read( + notification_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Mark a notification as read. + """ + db_obj = crud.notification.mark_as_read( + db, id=notification_id, user_id=current_user.id + ) + if not db_obj: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Notification not found" + ) + + return serialize_notification(db_obj) + + +@router.post("/read-all") +def mark_all_as_read( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Mark all notifications as read. + """ + count = crud.notification.mark_all_as_read(db, user_id=current_user.id) + return {"marked_as_read": count} + + +@router.post("/read-multiple") +def mark_multiple_as_read( + action: NotificationBulkAction, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Mark multiple notifications as read. + """ + count = crud.notification.mark_multiple_as_read( + db, + user_id=current_user.id, + notification_ids=action.notification_ids + ) + return {"marked_as_read": count} + + +@router.delete("/{notification_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_notification( + notification_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> None: + """ + Delete a notification. + """ + if not crud.notification.delete(db, id=notification_id, user_id=current_user.id): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Notification not found" + ) + + +@router.delete("/read/all") +def delete_all_read( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Delete all read notifications. + """ + count = crud.notification.delete_all_read(db, user_id=current_user.id) + return {"deleted": count} + + +@router.post("/delete-multiple") +def delete_multiple( + action: NotificationBulkAction, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Delete multiple notifications. + """ + count = crud.notification.delete_multiple( + db, + user_id=current_user.id, + notification_ids=action.notification_ids + ) + return {"deleted": count} + + +# Admin endpoints + +@router.post("/admin/send", status_code=status.HTTP_201_CREATED) +def send_notification_to_user( + notification_in: NotificationCreate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +) -> Any: + """ + Send a notification to a specific user (admin only). + """ + db_obj = crud.notification.create(db, obj_in=notification_in) + return serialize_notification(db_obj) + + +@router.post("/admin/broadcast") +def broadcast_notification( + notification_in: NotificationCreateForUser, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +) -> Any: + """ + Send a notification to all users (admin only). + """ + count = crud.notification.create_for_all_users( + db, + title=notification_in.title, + message=notification_in.message, + type=notification_in.type, + link=notification_in.link, + metadata=notification_in.metadata + ) + return {"sent_to": count} diff --git a/backend/app/api/v1/router.py b/backend/app/api/v1/router.py index ac75a9b..c4ff6f0 100644 --- a/backend/app/api/v1/router.py +++ b/backend/app/api/v1/router.py @@ -2,7 +2,7 @@ from fastapi import APIRouter -from app.api.v1 import health, auth, users, settings +from app.api.v1 import health, auth, users, settings, audit, api_keys, notifications, two_factor, sessions, analytics, export, webhooks, files # Create main API v1 router @@ -11,5 +11,14 @@ router = APIRouter() # Include all sub-routers router.include_router(health.router, prefix="/health", tags=["Health"]) router.include_router(auth.router, prefix="/auth", tags=["Authentication"]) +router.include_router(two_factor.router, prefix="/2fa", tags=["Two-Factor Auth"]) +router.include_router(sessions.router, prefix="/sessions", tags=["Sessions"]) router.include_router(users.router, prefix="/users", tags=["Users"]) router.include_router(settings.router, prefix="/settings", tags=["Settings"]) +router.include_router(audit.router, prefix="/audit", tags=["Audit"]) +router.include_router(api_keys.router, prefix="/api-keys", tags=["API Keys"]) +router.include_router(notifications.router, prefix="/notifications", tags=["Notifications"]) +router.include_router(analytics.router, prefix="/analytics", tags=["Analytics"]) +router.include_router(export.router, prefix="/export", tags=["Export/Import"]) +router.include_router(webhooks.router, prefix="/webhooks", tags=["Webhooks"]) +router.include_router(files.router, prefix="/files", tags=["Files"]) diff --git a/backend/app/api/v1/sessions.py b/backend/app/api/v1/sessions.py new file mode 100644 index 0000000..9251a71 --- /dev/null +++ b/backend/app/api/v1/sessions.py @@ -0,0 +1,259 @@ +"""User Session management endpoints.""" + +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, status, Request +from fastapi.security import HTTPAuthorizationCredentials +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_user, security +from app.models.user import User +from app import crud +from app.schemas.session import ( + Session as SessionSchema, + SessionList, + SessionRevokeRequest +) + + +router = APIRouter() + + +def get_client_ip(request: Request) -> str: + """Extract client IP from request.""" + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + return request.client.host if request.client else "unknown" + + +def serialize_session(db_obj, current_token_hash: str = None) -> dict: + """Serialize session for response.""" + from app.crud.session import hash_token + + is_current = False + if current_token_hash: + is_current = db_obj.token_hash == current_token_hash + + return { + "id": db_obj.id, + "user_id": db_obj.user_id, + "device_name": db_obj.device_name, + "device_type": db_obj.device_type, + "browser": db_obj.browser, + "os": db_obj.os, + "ip_address": db_obj.ip_address, + "location": db_obj.location, + "is_active": db_obj.is_active, + "is_current": is_current or db_obj.is_current, + "created_at": db_obj.created_at, + "last_active_at": db_obj.last_active_at, + "expires_at": db_obj.expires_at + } + + +@router.get("", response_model=SessionList) +def get_sessions( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get all active sessions for the current user. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + current_token_hash = hash_token(current_token) + + sessions = crud.session.get_multi_by_user(db, user_id=current_user.id, active_only=True) + total = len(sessions) + active_count = sum(1 for s in sessions if s.is_active) + + return { + "items": [serialize_session(s, current_token_hash) for s in sessions], + "total": total, + "active_count": active_count + } + + +@router.get("/all", response_model=SessionList) +def get_all_sessions( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get all sessions (including inactive) for the current user. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + current_token_hash = hash_token(current_token) + + sessions = crud.session.get_multi_by_user(db, user_id=current_user.id, active_only=False) + total = len(sessions) + active_count = sum(1 for s in sessions if s.is_active) + + return { + "items": [serialize_session(s, current_token_hash) for s in sessions], + "total": total, + "active_count": active_count + } + + +@router.get("/current", response_model=SessionSchema) +def get_current_session( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get the current session. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + session = crud.session.get_by_token(db, current_token) + + if not session: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Current session not found" + ) + + return serialize_session(session, hash_token(current_token)) + + +@router.post("/{session_id}/revoke", response_model=SessionSchema) +def revoke_session( + request: Request, + session_id: str, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Revoke a specific session. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + current_token_hash = hash_token(current_token) + + # Get the session to check if it's the current one + target_session = crud.session.get(db, id=session_id) + if not target_session or target_session.user_id != current_user.id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Session not found" + ) + + # Don't allow revoking the current session through this endpoint + if target_session.token_hash == current_token_hash: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Cannot revoke current session. Use logout instead." + ) + + session = crud.session.revoke(db, id=session_id, user_id=current_user.id) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="session_revoke", + resource_type="session", + resource_id=session_id, + details={"device": session.device_name}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return serialize_session(session, current_token_hash) + + +@router.post("/revoke-all") +def revoke_all_sessions( + request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Revoke all sessions except the current one. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + current_session = crud.session.get_by_token(db, current_token) + + except_id = current_session.id if current_session else None + count = crud.session.revoke_all_except( + db, + user_id=current_user.id, + except_session_id=except_id + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="sessions_revoke_all", + resource_type="session", + details={"revoked_count": count}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return {"revoked": count} + + +@router.post("/revoke-multiple") +def revoke_multiple_sessions( + request: Request, + revoke_request: SessionRevokeRequest, + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Revoke multiple sessions. + """ + from app.crud.session import hash_token + + current_token = credentials.credentials + current_session = crud.session.get_by_token(db, current_token) + + # Filter out current session if included + session_ids = [ + sid for sid in revoke_request.session_ids + if not current_session or sid != current_session.id + ] + + count = crud.session.revoke_multiple( + db, + user_id=current_user.id, + session_ids=session_ids + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="sessions_revoke_multiple", + resource_type="session", + details={"revoked_count": count, "requested_ids": len(revoke_request.session_ids)}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return {"revoked": count} diff --git a/backend/app/api/v1/two_factor.py b/backend/app/api/v1/two_factor.py new file mode 100644 index 0000000..db4f335 --- /dev/null +++ b/backend/app/api/v1/two_factor.py @@ -0,0 +1,361 @@ +"""Two-factor authentication (2FA) endpoints.""" + +import io +import secrets +import base64 +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, status, Request +from fastapi.responses import StreamingResponse +from sqlalchemy.orm import Session +from pydantic import BaseModel, Field +import pyotp +import qrcode + +from app.dependencies import get_db, get_current_user +from app.models.user import User +from app import crud +from app.config import settings + + +router = APIRouter() + + +class TOTPSetupResponse(BaseModel): + """Response for TOTP setup initiation.""" + secret: str + uri: str + qr_code: str # Base64 encoded QR code image + + +class TOTPVerifyRequest(BaseModel): + """Request to verify TOTP code.""" + code: str = Field(..., min_length=6, max_length=6) + + +class TOTPDisableRequest(BaseModel): + """Request to disable TOTP.""" + password: str + code: str = Field(..., min_length=6, max_length=6) + + +class BackupCodesResponse(BaseModel): + """Response with backup codes.""" + backup_codes: list[str] + + +def get_client_ip(request: Request) -> str: + """Extract client IP from request.""" + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + return request.client.host if request.client else "unknown" + + +def generate_backup_codes(count: int = 10) -> list[str]: + """Generate backup codes for 2FA recovery.""" + return [secrets.token_hex(4).upper() for _ in range(count)] + + +def generate_qr_code(uri: str) -> str: + """Generate QR code as base64 encoded PNG.""" + qr = qrcode.QRCode(version=1, box_size=10, border=5) + qr.add_data(uri) + qr.make(fit=True) + img = qr.make_image(fill_color="black", back_color="white") + + buffer = io.BytesIO() + img.save(buffer, format="PNG") + buffer.seek(0) + + return base64.b64encode(buffer.getvalue()).decode() + + +@router.get("/status") +def get_2fa_status( + current_user: User = Depends(get_current_user), +) -> Any: + """ + Get 2FA status for the current user. + """ + return { + "enabled": current_user.totp_enabled, + "has_backup_codes": bool(current_user.backup_codes) + } + + +@router.post("/setup", response_model=TOTPSetupResponse) +def setup_2fa( + request: Request, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Initiate 2FA setup by generating a new TOTP secret. + Returns the secret, URI, and QR code for authenticator app setup. + The user must verify the code before 2FA is enabled. + """ + if current_user.totp_enabled: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="2FA is already enabled" + ) + + # Generate new secret + secret = pyotp.random_base32() + + # Store secret temporarily (not enabled yet) + current_user.totp_secret = secret + db.add(current_user) + db.commit() + + # Generate URI for authenticator app + totp = pyotp.TOTP(secret) + uri = totp.provisioning_uri( + name=current_user.email, + issuer_name=settings.APP_NAME + ) + + # Generate QR code + qr_code = generate_qr_code(uri) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_setup_initiated", + resource_type="user", + resource_id=current_user.id, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return { + "secret": secret, + "uri": uri, + "qr_code": qr_code + } + + +@router.post("/verify") +def verify_and_enable_2fa( + request: Request, + verify_request: TOTPVerifyRequest, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Verify TOTP code and enable 2FA. + This must be called after setup to complete the 2FA activation. + """ + if current_user.totp_enabled: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="2FA is already enabled" + ) + + if not current_user.totp_secret: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="2FA setup not initiated. Call /setup first." + ) + + # Verify the code + totp = pyotp.TOTP(current_user.totp_secret) + if not totp.verify(verify_request.code, valid_window=1): + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_verify", + resource_type="user", + resource_id=current_user.id, + details={"reason": "invalid_code"}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid verification code" + ) + + # Generate backup codes + backup_codes = generate_backup_codes() + + # Enable 2FA + current_user.totp_enabled = True + current_user.backup_codes = backup_codes + db.add(current_user) + db.commit() + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_enabled", + resource_type="user", + resource_id=current_user.id, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return { + "message": "2FA enabled successfully", + "backup_codes": backup_codes + } + + +@router.post("/disable") +def disable_2fa( + request: Request, + disable_request: TOTPDisableRequest, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Disable 2FA. Requires password and current TOTP code. + """ + if not current_user.totp_enabled: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="2FA is not enabled" + ) + + # Verify password + from app.core.security import verify_password + if not verify_password(disable_request.password, current_user.hashed_password): + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_disable", + resource_type="user", + resource_id=current_user.id, + details={"reason": "invalid_password"}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid password" + ) + + # Verify TOTP code + totp = pyotp.TOTP(current_user.totp_secret) + if not totp.verify(disable_request.code, valid_window=1): + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_disable", + resource_type="user", + resource_id=current_user.id, + details={"reason": "invalid_code"}, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="failure" + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid verification code" + ) + + # Disable 2FA + current_user.totp_enabled = False + current_user.totp_secret = None + current_user.totp_backup_codes = None + db.add(current_user) + db.commit() + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_disabled", + resource_type="user", + resource_id=current_user.id, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return {"message": "2FA disabled successfully"} + + +@router.post("/regenerate-backup-codes", response_model=BackupCodesResponse) +def regenerate_backup_codes( + request: Request, + verify_request: TOTPVerifyRequest, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Any: + """ + Regenerate backup codes. Requires current TOTP code. + """ + if not current_user.totp_enabled: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="2FA is not enabled" + ) + + # Verify TOTP code + totp = pyotp.TOTP(current_user.totp_secret) + if not totp.verify(verify_request.code, valid_window=1): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid verification code" + ) + + # Generate new backup codes + backup_codes = generate_backup_codes() + current_user.backup_codes = backup_codes + db.add(current_user) + db.commit() + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="2fa_backup_codes_regenerated", + resource_type="user", + resource_id=current_user.id, + ip_address=get_client_ip(request), + user_agent=request.headers.get("User-Agent", "")[:500], + status="success" + ) + + return {"backup_codes": backup_codes} + + +def verify_totp_or_backup(user: User, code: str, db: Session) -> bool: + """ + Verify TOTP code or backup code. + Returns True if valid, False otherwise. + If backup code is used, it's removed from the list. + """ + if not user.totp_enabled or not user.totp_secret: + return True # 2FA not enabled + + # Try TOTP verification first + totp = pyotp.TOTP(user.totp_secret) + if totp.verify(code, valid_window=1): + return True + + # Try backup code + backup_codes = user.backup_codes + code_upper = code.upper().replace("-", "") + if code_upper in backup_codes: + backup_codes.remove(code_upper) + user.backup_codes = backup_codes + db.add(user) + db.commit() + return True + + return False diff --git a/backend/app/api/v1/webhooks.py b/backend/app/api/v1/webhooks.py new file mode 100644 index 0000000..6ae8e5d --- /dev/null +++ b/backend/app/api/v1/webhooks.py @@ -0,0 +1,380 @@ +"""Webhook management endpoints.""" + +import json +from typing import Any, List +from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks +from sqlalchemy.orm import Session + +from app.dependencies import get_db, get_current_superuser +from app.models.user import User +from app import crud +from app.schemas.webhook import ( + WebhookCreate, + WebhookUpdate, + Webhook as WebhookSchema, + WebhookWithSecret, + WebhookDelivery as WebhookDeliverySchema, + WebhookTest, + WEBHOOK_EVENTS, +) + + +router = APIRouter() + + +@router.get("/", response_model=List[WebhookSchema]) +def list_webhooks( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + skip: int = 0, + limit: int = 100, + is_active: bool = None +): + """ + List all webhooks. + Requires superuser permissions. + """ + webhooks = crud.webhook.get_multi(db, skip=skip, limit=limit, is_active=is_active) + # Convert events from JSON string to list + result = [] + for webhook in webhooks: + webhook_dict = { + "id": webhook.id, + "name": webhook.name, + "url": webhook.url, + "secret": webhook.secret, + "events": json.loads(webhook.events) if webhook.events else [], + "is_active": webhook.is_active, + "retry_count": webhook.retry_count, + "timeout_seconds": webhook.timeout_seconds, + "created_by": webhook.created_by, + "created_at": webhook.created_at, + "updated_at": webhook.updated_at, + "last_triggered_at": webhook.last_triggered_at, + "success_count": webhook.success_count, + "failure_count": webhook.failure_count, + } + result.append(webhook_dict) + return result + + +@router.post("/", response_model=WebhookWithSecret, status_code=status.HTTP_201_CREATED) +def create_webhook( + webhook_in: WebhookCreate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Create a new webhook. + Returns the webhook with its secret (only shown once at creation). + Requires superuser permissions. + """ + webhook = crud.webhook.create(db, obj_in=webhook_in, created_by=current_user.id) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="create", + resource_type="webhook", + resource_id=webhook.id, + details={"name": webhook.name, "url": webhook.url}, + status="success" + ) + + return { + "id": webhook.id, + "name": webhook.name, + "url": webhook.url, + "secret": webhook.secret, + "events": json.loads(webhook.events) if webhook.events else [], + "is_active": webhook.is_active, + "retry_count": webhook.retry_count, + "timeout_seconds": webhook.timeout_seconds, + "created_by": webhook.created_by, + "created_at": webhook.created_at, + "updated_at": webhook.updated_at, + "last_triggered_at": webhook.last_triggered_at, + "success_count": webhook.success_count, + "failure_count": webhook.failure_count, + } + + +@router.get("/events", response_model=List[str]) +def list_webhook_events( + current_user: User = Depends(get_current_superuser), +): + """ + List all available webhook event types. + """ + return WEBHOOK_EVENTS + + +@router.get("/{webhook_id}", response_model=WebhookSchema) +def get_webhook( + webhook_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Get a specific webhook. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + return { + "id": webhook.id, + "name": webhook.name, + "url": webhook.url, + "secret": webhook.secret, + "events": json.loads(webhook.events) if webhook.events else [], + "is_active": webhook.is_active, + "retry_count": webhook.retry_count, + "timeout_seconds": webhook.timeout_seconds, + "created_by": webhook.created_by, + "created_at": webhook.created_at, + "updated_at": webhook.updated_at, + "last_triggered_at": webhook.last_triggered_at, + "success_count": webhook.success_count, + "failure_count": webhook.failure_count, + } + + +@router.put("/{webhook_id}", response_model=WebhookSchema) +def update_webhook( + webhook_id: str, + webhook_in: WebhookUpdate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Update a webhook. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + webhook = crud.webhook.update(db, db_obj=webhook, obj_in=webhook_in) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="update", + resource_type="webhook", + resource_id=webhook.id, + details={"name": webhook.name}, + status="success" + ) + + return { + "id": webhook.id, + "name": webhook.name, + "url": webhook.url, + "secret": webhook.secret, + "events": json.loads(webhook.events) if webhook.events else [], + "is_active": webhook.is_active, + "retry_count": webhook.retry_count, + "timeout_seconds": webhook.timeout_seconds, + "created_by": webhook.created_by, + "created_at": webhook.created_at, + "updated_at": webhook.updated_at, + "last_triggered_at": webhook.last_triggered_at, + "success_count": webhook.success_count, + "failure_count": webhook.failure_count, + } + + +@router.delete("/{webhook_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_webhook( + webhook_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Delete a webhook. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="delete", + resource_type="webhook", + resource_id=webhook_id, + details={"name": webhook.name}, + status="success" + ) + + crud.webhook.delete(db, id=webhook_id) + return None + + +@router.post("/{webhook_id}/regenerate-secret", response_model=WebhookWithSecret) +def regenerate_webhook_secret( + webhook_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Regenerate the secret for a webhook. + Returns the new secret (only shown once). + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + webhook = crud.webhook.regenerate_secret(db, db_obj=webhook) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="regenerate_secret", + resource_type="webhook", + resource_id=webhook.id, + details={"name": webhook.name}, + status="success" + ) + + return { + "id": webhook.id, + "name": webhook.name, + "url": webhook.url, + "secret": webhook.secret, + "events": json.loads(webhook.events) if webhook.events else [], + "is_active": webhook.is_active, + "retry_count": webhook.retry_count, + "timeout_seconds": webhook.timeout_seconds, + "created_by": webhook.created_by, + "created_at": webhook.created_at, + "updated_at": webhook.updated_at, + "last_triggered_at": webhook.last_triggered_at, + "success_count": webhook.success_count, + "failure_count": webhook.failure_count, + } + + +@router.post("/{webhook_id}/test", response_model=WebhookDeliverySchema) +async def test_webhook( + webhook_id: str, + test_data: WebhookTest = None, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Send a test delivery to a webhook. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + event_type = test_data.event_type if test_data else "test.ping" + payload = test_data.payload if test_data and test_data.payload else None + + delivery = await crud.webhook_service.test_webhook( + db, + webhook=webhook, + event_type=event_type, + payload=payload + ) + + # Log the action + crud.audit_log.log_action( + db, + user_id=current_user.id, + username=current_user.username, + action="test", + resource_type="webhook", + resource_id=webhook.id, + details={"status": delivery.status}, + status="success" + ) + + return delivery + + +@router.get("/{webhook_id}/deliveries", response_model=List[WebhookDeliverySchema]) +def list_webhook_deliveries( + webhook_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), + skip: int = 0, + limit: int = 50 +): + """ + List deliveries for a specific webhook. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + return crud.webhook_delivery.get_by_webhook( + db, + webhook_id=webhook_id, + skip=skip, + limit=limit + ) + + +@router.post("/{webhook_id}/deliveries/{delivery_id}/retry", response_model=WebhookDeliverySchema) +async def retry_webhook_delivery( + webhook_id: str, + delivery_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_superuser), +): + """ + Retry a failed webhook delivery. + Requires superuser permissions. + """ + webhook = crud.webhook.get(db, id=webhook_id) + if not webhook: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Webhook not found" + ) + + delivery = crud.webhook_delivery.get(db, id=delivery_id) + if not delivery or delivery.webhook_id != webhook_id: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Delivery not found" + ) + + await crud.webhook_service.deliver(db, webhook, delivery) + + db.refresh(delivery) + return delivery diff --git a/backend/app/core/password_policy.py b/backend/app/core/password_policy.py new file mode 100644 index 0000000..bbeaf9e --- /dev/null +++ b/backend/app/core/password_policy.py @@ -0,0 +1,240 @@ +"""Password policy validation and enforcement.""" + +import re +from typing import List, Optional +from dataclasses import dataclass +from sqlalchemy.orm import Session + +from app import crud + + +@dataclass +class PasswordPolicy: + """Password policy configuration.""" + min_length: int = 8 + max_length: int = 128 + require_uppercase: bool = True + require_lowercase: bool = True + require_digit: bool = True + require_special: bool = False + special_characters: str = "!@#$%^&*()_+-=[]{}|;:,.<>?" + disallow_username: bool = True + disallow_email: bool = True + disallow_common: bool = True + + +# Common passwords to disallow +COMMON_PASSWORDS = { + "password", "123456", "12345678", "qwerty", "abc123", + "monkey", "1234567", "letmein", "trustno1", "dragon", + "baseball", "iloveyou", "master", "sunshine", "ashley", + "bailey", "passw0rd", "shadow", "123123", "654321", + "superman", "qazwsx", "michael", "football", "password1", + "password123", "welcome", "jesus", "ninja", "mustang", + "admin", "admin123", "root", "toor", "administrator" +} + + +def get_password_policy(db: Optional[Session] = None) -> PasswordPolicy: + """Get password policy from settings or use defaults.""" + policy = PasswordPolicy() + + if db: + try: + # Load settings from database + min_length = crud.settings.get_setting_value(db, "password_min_length") + if min_length is not None: + policy.min_length = int(min_length) + + require_uppercase = crud.settings.get_setting_value(db, "password_require_uppercase") + if require_uppercase is not None: + policy.require_uppercase = str(require_uppercase).lower() in ("true", "1") + + require_lowercase = crud.settings.get_setting_value(db, "password_require_lowercase") + if require_lowercase is not None: + policy.require_lowercase = str(require_lowercase).lower() in ("true", "1") + + require_digit = crud.settings.get_setting_value(db, "password_require_digit") + if require_digit is not None: + policy.require_digit = str(require_digit).lower() in ("true", "1") + + require_special = crud.settings.get_setting_value(db, "password_require_special") + if require_special is not None: + policy.require_special = str(require_special).lower() in ("true", "1") + + except Exception: + pass # Use defaults on error + + return policy + + +def validate_password( + password: str, + policy: Optional[PasswordPolicy] = None, + username: Optional[str] = None, + email: Optional[str] = None +) -> List[str]: + """ + Validate password against policy. + Returns list of validation errors (empty list if valid). + """ + if policy is None: + policy = PasswordPolicy() + + errors = [] + + # Length checks + if len(password) < policy.min_length: + errors.append(f"Password must be at least {policy.min_length} characters long") + + if len(password) > policy.max_length: + errors.append(f"Password must not exceed {policy.max_length} characters") + + # Character requirements + if policy.require_uppercase and not re.search(r"[A-Z]", password): + errors.append("Password must contain at least one uppercase letter") + + if policy.require_lowercase and not re.search(r"[a-z]", password): + errors.append("Password must contain at least one lowercase letter") + + if policy.require_digit and not re.search(r"\d", password): + errors.append("Password must contain at least one digit") + + if policy.require_special: + special_pattern = f"[{re.escape(policy.special_characters)}]" + if not re.search(special_pattern, password): + errors.append("Password must contain at least one special character") + + # Disallow username/email in password + password_lower = password.lower() + + if policy.disallow_username and username: + if username.lower() in password_lower: + errors.append("Password must not contain your username") + + if policy.disallow_email and email: + email_parts = email.lower().split("@") + if email_parts[0] in password_lower: + errors.append("Password must not contain your email address") + + # Disallow common passwords + if policy.disallow_common: + if password_lower in COMMON_PASSWORDS: + errors.append("Password is too common. Please choose a stronger password") + + return errors + + +def get_password_requirements(policy: Optional[PasswordPolicy] = None) -> dict: + """Get password requirements as a dictionary (for frontend display).""" + if policy is None: + policy = PasswordPolicy() + + requirements = { + "min_length": policy.min_length, + "max_length": policy.max_length, + "require_uppercase": policy.require_uppercase, + "require_lowercase": policy.require_lowercase, + "require_digit": policy.require_digit, + "require_special": policy.require_special, + } + + # Generate human-readable description + rules = [f"At least {policy.min_length} characters"] + + if policy.require_uppercase: + rules.append("At least one uppercase letter (A-Z)") + if policy.require_lowercase: + rules.append("At least one lowercase letter (a-z)") + if policy.require_digit: + rules.append("At least one digit (0-9)") + if policy.require_special: + rules.append(f"At least one special character ({policy.special_characters[:10]}...)") + + requirements["rules"] = rules + + return requirements + + +def check_password_strength(password: str) -> dict: + """ + Check password strength and return a score. + Returns dict with score (0-100), level (weak/medium/strong/very_strong), and feedback. + """ + score = 0 + feedback = [] + + # Length scoring + length = len(password) + if length >= 8: + score += 20 + if length >= 12: + score += 10 + if length >= 16: + score += 10 + + # Character variety scoring + has_upper = bool(re.search(r"[A-Z]", password)) + has_lower = bool(re.search(r"[a-z]", password)) + has_digit = bool(re.search(r"\d", password)) + has_special = bool(re.search(r"[!@#$%^&*()_+\-=\[\]{}|;:,.<>?]", password)) + + if has_upper: + score += 15 + if has_lower: + score += 15 + if has_digit: + score += 15 + if has_special: + score += 15 + + # Bonus for mixing character types + variety = sum([has_upper, has_lower, has_digit, has_special]) + if variety >= 3: + score += 10 + if variety == 4: + score += 10 + + # Penalties + if password.lower() in COMMON_PASSWORDS: + score = min(score, 20) + feedback.append("This is a commonly used password") + + if re.search(r"(.)\1{2,}", password): + score -= 10 + feedback.append("Avoid repeated characters") + + if re.search(r"(012|123|234|345|456|567|678|789|890|abc|bcd|cde|def)", password.lower()): + score -= 10 + feedback.append("Avoid sequential characters") + + # Ensure score is in valid range + score = max(0, min(100, score)) + + # Determine strength level + if score >= 80: + level = "very_strong" + elif score >= 60: + level = "strong" + elif score >= 40: + level = "medium" + else: + level = "weak" + + # Add suggestions + if not has_upper: + feedback.append("Add uppercase letters") + if not has_lower: + feedback.append("Add lowercase letters") + if not has_digit: + feedback.append("Add numbers") + if not has_special: + feedback.append("Add special characters") + if length < 12: + feedback.append("Use a longer password") + + return { + "score": score, + "level": level, + "feedback": feedback[:3] # Limit to top 3 suggestions + } diff --git a/backend/app/core/settings_registry.py b/backend/app/core/settings_registry.py index b564d68..018ecfa 100644 --- a/backend/app/core/settings_registry.py +++ b/backend/app/core/settings_registry.py @@ -310,6 +310,57 @@ register_setting(SettingDefinition( category="auth" )) +# Password policy settings +register_setting(SettingDefinition( + key="password_min_length", + type=SettingType.INTEGER, + scope=SettingScope.GLOBAL, + storage=SettingStorage.DATABASE, + default=8, + description="Minimum password length", + category="security" +)) + +register_setting(SettingDefinition( + key="password_require_uppercase", + type=SettingType.BOOLEAN, + scope=SettingScope.GLOBAL, + storage=SettingStorage.DATABASE, + default=True, + description="Require uppercase letters in passwords", + category="security" +)) + +register_setting(SettingDefinition( + key="password_require_lowercase", + type=SettingType.BOOLEAN, + scope=SettingScope.GLOBAL, + storage=SettingStorage.DATABASE, + default=True, + description="Require lowercase letters in passwords", + category="security" +)) + +register_setting(SettingDefinition( + key="password_require_digit", + type=SettingType.BOOLEAN, + scope=SettingScope.GLOBAL, + storage=SettingStorage.DATABASE, + default=True, + description="Require digits in passwords", + category="security" +)) + +register_setting(SettingDefinition( + key="password_require_special", + type=SettingType.BOOLEAN, + scope=SettingScope.GLOBAL, + storage=SettingStorage.DATABASE, + default=False, + description="Require special characters in passwords", + category="security" +)) + # ============================================================================= # UI/LAYOUT SETTINGS (Global, Database) diff --git a/backend/app/crud/__init__.py b/backend/app/crud/__init__.py index 09af0e7..ad00837 100644 --- a/backend/app/crud/__init__.py +++ b/backend/app/crud/__init__.py @@ -2,5 +2,11 @@ from app.crud.user import user from app.crud import settings +from app.crud.audit_log import audit_log +from app.crud.api_key import api_key +from app.crud.notification import notification +from app.crud.session import session +from app.crud.webhook import webhook, webhook_delivery, webhook_service +from app.crud.file import file_storage -__all__ = ["user", "settings"] +__all__ = ["user", "settings", "audit_log", "api_key", "notification", "session", "webhook", "webhook_delivery", "webhook_service", "file_storage"] diff --git a/backend/app/crud/api_key.py b/backend/app/crud/api_key.py new file mode 100644 index 0000000..64d74db --- /dev/null +++ b/backend/app/crud/api_key.py @@ -0,0 +1,184 @@ +"""CRUD operations for API Key model.""" + +import json +import hashlib +from datetime import datetime +from typing import Optional, List, Tuple +from sqlalchemy.orm import Session + +from app.models.api_key import APIKey, generate_api_key, generate_key_prefix +from app.schemas.api_key import APIKeyCreate, APIKeyUpdate + + +def hash_api_key(key: str) -> str: + """Hash an API key for secure storage.""" + return hashlib.sha256(key.encode()).hexdigest() + + +class CRUDAPIKey: + """CRUD operations for API Key model.""" + + def create( + self, + db: Session, + *, + obj_in: APIKeyCreate, + user_id: str + ) -> Tuple[APIKey, str]: + """ + Create a new API key. + Returns both the database object and the plain key (shown only once). + """ + # Generate the actual key + plain_key = generate_api_key() + key_hash = hash_api_key(plain_key) + key_prefix = generate_key_prefix(plain_key) + + # Serialize scopes to JSON + scopes_json = json.dumps(obj_in.scopes) if obj_in.scopes else None + + db_obj = APIKey( + user_id=user_id, + name=obj_in.name, + key_hash=key_hash, + key_prefix=key_prefix, + scopes=scopes_json, + expires_at=obj_in.expires_at, + is_active=True, + usage_count="0" + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + + return db_obj, plain_key + + def get(self, db: Session, id: str) -> Optional[APIKey]: + """Get an API key by ID.""" + return db.query(APIKey).filter(APIKey.id == id).first() + + def get_by_key(self, db: Session, plain_key: str) -> Optional[APIKey]: + """Get an API key by the plain key (for authentication).""" + key_hash = hash_api_key(plain_key) + return db.query(APIKey).filter(APIKey.key_hash == key_hash).first() + + def get_multi_by_user( + self, + db: Session, + *, + user_id: str, + skip: int = 0, + limit: int = 100 + ) -> List[APIKey]: + """Get all API keys for a user.""" + return db.query(APIKey)\ + .filter(APIKey.user_id == user_id)\ + .order_by(APIKey.created_at.desc())\ + .offset(skip)\ + .limit(limit)\ + .all() + + def count_by_user(self, db: Session, user_id: str) -> int: + """Count API keys for a user.""" + return db.query(APIKey).filter(APIKey.user_id == user_id).count() + + def update( + self, + db: Session, + *, + db_obj: APIKey, + obj_in: APIKeyUpdate + ) -> APIKey: + """Update an API key.""" + update_data = obj_in.model_dump(exclude_unset=True) + + # Handle scopes serialization + if "scopes" in update_data: + update_data["scopes"] = json.dumps(update_data["scopes"]) if update_data["scopes"] else None + + for field, value in update_data.items(): + setattr(db_obj, field, value) + + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def delete(self, db: Session, *, id: str) -> bool: + """Delete an API key.""" + obj = db.query(APIKey).filter(APIKey.id == id).first() + if obj: + db.delete(obj) + db.commit() + return True + return False + + def delete_by_user(self, db: Session, *, user_id: str) -> int: + """Delete all API keys for a user.""" + count = db.query(APIKey).filter(APIKey.user_id == user_id).delete() + db.commit() + return count + + def revoke(self, db: Session, *, id: str) -> Optional[APIKey]: + """Revoke (deactivate) an API key.""" + obj = db.query(APIKey).filter(APIKey.id == id).first() + if obj: + obj.is_active = False + db.add(obj) + db.commit() + db.refresh(obj) + return obj + + def record_usage( + self, + db: Session, + *, + db_obj: APIKey, + ip_address: Optional[str] = None + ) -> APIKey: + """Record API key usage.""" + db_obj.last_used_at = datetime.utcnow() + db_obj.last_used_ip = ip_address + db_obj.usage_count = str(int(db_obj.usage_count or "0") + 1) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def authenticate( + self, + db: Session, + *, + plain_key: str, + ip_address: Optional[str] = None + ) -> Optional[APIKey]: + """ + Authenticate with an API key. + Returns the key if valid, None otherwise. + Also records usage on successful auth. + """ + api_key = self.get_by_key(db, plain_key) + + if not api_key: + return None + + if not api_key.is_valid: + return None + + # Record usage + self.record_usage(db, db_obj=api_key, ip_address=ip_address) + + return api_key + + def get_scopes(self, api_key: APIKey) -> List[str]: + """Get scopes for an API key.""" + if api_key.scopes: + try: + return json.loads(api_key.scopes) + except json.JSONDecodeError: + return [] + return [] + + +# Create instance +api_key = CRUDAPIKey() diff --git a/backend/app/crud/audit_log.py b/backend/app/crud/audit_log.py new file mode 100644 index 0000000..9f9d683 --- /dev/null +++ b/backend/app/crud/audit_log.py @@ -0,0 +1,228 @@ +"""CRUD operations for Audit Log model.""" + +import json +from datetime import datetime, timedelta +from typing import Optional, List, Any +from sqlalchemy.orm import Session +from sqlalchemy import func, desc + +from app.models.audit_log import AuditLog +from app.schemas.audit_log import AuditLogCreate, AuditLogFilter + + +class CRUDAuditLog: + """CRUD operations for Audit Log model.""" + + def create( + self, + db: Session, + *, + obj_in: AuditLogCreate + ) -> AuditLog: + """Create a new audit log entry.""" + db_obj = AuditLog( + user_id=obj_in.user_id, + username=obj_in.username, + action=obj_in.action, + resource_type=obj_in.resource_type, + resource_id=obj_in.resource_id, + details=obj_in.details, + ip_address=obj_in.ip_address, + user_agent=obj_in.user_agent, + status=obj_in.status + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def log_action( + self, + db: Session, + *, + user_id: Optional[str] = None, + username: Optional[str] = None, + action: str, + resource_type: Optional[str] = None, + resource_id: Optional[str] = None, + details: Optional[dict] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + status: str = "success" + ) -> AuditLog: + """Convenience method to log an action.""" + details_str = json.dumps(details) if details else None + obj_in = AuditLogCreate( + user_id=user_id, + username=username, + action=action, + resource_type=resource_type, + resource_id=resource_id, + details=details_str, + ip_address=ip_address, + user_agent=user_agent, + status=status + ) + return self.create(db, obj_in=obj_in) + + def get(self, db: Session, id: str) -> Optional[AuditLog]: + """Get a single audit log entry by ID.""" + return db.query(AuditLog).filter(AuditLog.id == id).first() + + def get_multi( + self, + db: Session, + *, + skip: int = 0, + limit: int = 100, + filters: Optional[AuditLogFilter] = None + ) -> tuple[List[AuditLog], int]: + """Get multiple audit log entries with optional filtering.""" + query = db.query(AuditLog) + + if filters: + if filters.user_id: + query = query.filter(AuditLog.user_id == filters.user_id) + if filters.username: + query = query.filter(AuditLog.username.ilike(f"%{filters.username}%")) + if filters.action: + query = query.filter(AuditLog.action == filters.action) + if filters.resource_type: + query = query.filter(AuditLog.resource_type == filters.resource_type) + if filters.resource_id: + query = query.filter(AuditLog.resource_id == filters.resource_id) + if filters.status: + query = query.filter(AuditLog.status == filters.status) + if filters.start_date: + query = query.filter(AuditLog.created_at >= filters.start_date) + if filters.end_date: + query = query.filter(AuditLog.created_at <= filters.end_date) + + total = query.count() + items = query.order_by(desc(AuditLog.created_at)).offset(skip).limit(limit).all() + + return items, total + + def get_by_user( + self, + db: Session, + *, + user_id: str, + skip: int = 0, + limit: int = 100 + ) -> List[AuditLog]: + """Get audit logs for a specific user.""" + return db.query(AuditLog)\ + .filter(AuditLog.user_id == user_id)\ + .order_by(desc(AuditLog.created_at))\ + .offset(skip)\ + .limit(limit)\ + .all() + + def get_recent( + self, + db: Session, + *, + hours: int = 24, + limit: int = 100 + ) -> List[AuditLog]: + """Get recent audit logs within specified hours.""" + since = datetime.utcnow() - timedelta(hours=hours) + return db.query(AuditLog)\ + .filter(AuditLog.created_at >= since)\ + .order_by(desc(AuditLog.created_at))\ + .limit(limit)\ + .all() + + def get_stats(self, db: Session) -> dict[str, Any]: + """Get audit log statistics.""" + now = datetime.utcnow() + today_start = now.replace(hour=0, minute=0, second=0, microsecond=0) + week_start = today_start - timedelta(days=today_start.weekday()) + month_start = today_start.replace(day=1) + + # Total entries + total = db.query(func.count(AuditLog.id)).scalar() + + # Entries today + entries_today = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.created_at >= today_start)\ + .scalar() + + # Entries this week + entries_week = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.created_at >= week_start)\ + .scalar() + + # Entries this month + entries_month = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.created_at >= month_start)\ + .scalar() + + # Actions breakdown + actions_query = db.query( + AuditLog.action, + func.count(AuditLog.id).label('count') + ).group_by(AuditLog.action).all() + actions_breakdown = {action: count for action, count in actions_query} + + # Top users (by action count) + top_users_query = db.query( + AuditLog.user_id, + AuditLog.username, + func.count(AuditLog.id).label('count') + ).filter(AuditLog.user_id.isnot(None))\ + .group_by(AuditLog.user_id, AuditLog.username)\ + .order_by(desc('count'))\ + .limit(10)\ + .all() + top_users = [ + {"user_id": uid, "username": uname, "count": count} + for uid, uname, count in top_users_query + ] + + # Recent failures (last 24h) + recent_failures = db.query(func.count(AuditLog.id))\ + .filter(AuditLog.status == "failure")\ + .filter(AuditLog.created_at >= today_start - timedelta(days=1))\ + .scalar() + + return { + "total_entries": total or 0, + "entries_today": entries_today or 0, + "entries_this_week": entries_week or 0, + "entries_this_month": entries_month or 0, + "actions_breakdown": actions_breakdown, + "top_users": top_users, + "recent_failures": recent_failures or 0 + } + + def delete_old( + self, + db: Session, + *, + days: int = 90 + ) -> int: + """Delete audit logs older than specified days.""" + cutoff = datetime.utcnow() - timedelta(days=days) + count = db.query(AuditLog)\ + .filter(AuditLog.created_at < cutoff)\ + .delete() + db.commit() + return count + + def get_distinct_actions(self, db: Session) -> List[str]: + """Get list of distinct action types.""" + result = db.query(AuditLog.action).distinct().all() + return [r[0] for r in result] + + def get_distinct_resource_types(self, db: Session) -> List[str]: + """Get list of distinct resource types.""" + result = db.query(AuditLog.resource_type)\ + .filter(AuditLog.resource_type.isnot(None))\ + .distinct().all() + return [r[0] for r in result] + + +# Create instance +audit_log = CRUDAuditLog() diff --git a/backend/app/crud/file.py b/backend/app/crud/file.py new file mode 100644 index 0000000..a468f87 --- /dev/null +++ b/backend/app/crud/file.py @@ -0,0 +1,264 @@ +"""CRUD operations and storage service for files.""" + +import os +import json +import hashlib +import shutil +import uuid +from datetime import datetime +from pathlib import Path +from typing import Optional, List, BinaryIO +from sqlalchemy.orm import Session + +from app.models.file import StoredFile +from app.schemas.file import FileCreate, FileUpdate, ALLOWED_CONTENT_TYPES, MAX_FILE_SIZE +from app.config import settings + + +class FileStorageService: + """Service for handling file storage operations.""" + + def __init__(self, storage_path: str = None): + """Initialize the storage service.""" + configured_path = storage_path or os.getenv("FILE_STORAGE_PATH") + if configured_path: + self.storage_path = Path(configured_path) + else: + # Prefer persistent storage when running in the container (bind-mounted /config). + self.storage_path = Path("/config/uploads") if Path("/config").exists() else Path("./uploads") + self.storage_path.mkdir(parents=True, exist_ok=True) + + def _get_file_path(self, file_id: str, filename: str) -> Path: + """Generate the storage path for a file.""" + # Organize files by date and ID for better management + date_prefix = datetime.utcnow().strftime("%Y/%m") + dir_path = self.storage_path / date_prefix + dir_path.mkdir(parents=True, exist_ok=True) + + # Use file ID + original extension + ext = Path(filename).suffix + return dir_path / f"{file_id}{ext}" + + def _calculate_hash(self, file: BinaryIO) -> str: + """Calculate SHA-256 hash of file contents.""" + sha256 = hashlib.sha256() + for chunk in iter(lambda: file.read(8192), b""): + sha256.update(chunk) + file.seek(0) # Reset file position + return sha256.hexdigest() + + def save_file( + self, + file: BinaryIO, + filename: str, + file_id: str + ) -> tuple[str, str]: + """ + Save a file to storage. + Returns (relative_path, file_hash). + """ + # Calculate hash + file_hash = self._calculate_hash(file) + + # Get storage path + file_path = self._get_file_path(file_id, filename) + relative_path = str(file_path.relative_to(self.storage_path)) + + # Save file + with open(file_path, "wb") as f: + shutil.copyfileobj(file, f) + + return relative_path, file_hash + + def get_file_path(self, relative_path: str) -> Path: + """Get the full path for a stored file.""" + return self.storage_path / relative_path + + def delete_file(self, relative_path: str) -> bool: + """Delete a file from storage.""" + try: + file_path = self.storage_path / relative_path + if file_path.exists(): + file_path.unlink() + return True + return False + except Exception: + return False + + def file_exists(self, relative_path: str) -> bool: + """Check if a file exists in storage.""" + return (self.storage_path / relative_path).exists() + + +class CRUDFile: + """CRUD operations for stored files.""" + + def __init__(self): + self.storage = FileStorageService() + + def get(self, db: Session, id: str) -> Optional[StoredFile]: + """Get a file by ID.""" + return db.query(StoredFile).filter( + StoredFile.id == id, + StoredFile.is_deleted == False + ).first() + + def get_by_hash(self, db: Session, file_hash: str) -> Optional[StoredFile]: + """Get a file by its hash (for deduplication).""" + return db.query(StoredFile).filter( + StoredFile.file_hash == file_hash, + StoredFile.is_deleted == False + ).first() + + def get_multi( + self, + db: Session, + *, + skip: int = 0, + limit: int = 100, + uploaded_by: Optional[str] = None, + is_public: Optional[bool] = None, + content_type: Optional[str] = None + ) -> List[StoredFile]: + """Get multiple files with filtering.""" + query = db.query(StoredFile).filter(StoredFile.is_deleted == False) + + if uploaded_by: + query = query.filter(StoredFile.uploaded_by == uploaded_by) + if is_public is not None: + query = query.filter(StoredFile.is_public == is_public) + if content_type: + query = query.filter(StoredFile.content_type.like(f"{content_type}%")) + + return query.order_by(StoredFile.created_at.desc()).offset(skip).limit(limit).all() + + def count( + self, + db: Session, + *, + uploaded_by: Optional[str] = None, + is_public: Optional[bool] = None + ) -> int: + """Count files with optional filtering.""" + query = db.query(StoredFile).filter(StoredFile.is_deleted == False) + + if uploaded_by: + query = query.filter(StoredFile.uploaded_by == uploaded_by) + if is_public is not None: + query = query.filter(StoredFile.is_public == is_public) + + return query.count() + + def create( + self, + db: Session, + *, + file: BinaryIO, + filename: str, + content_type: Optional[str], + size_bytes: int, + uploaded_by: Optional[str] = None, + metadata: Optional[FileCreate] = None + ) -> StoredFile: + """Create a new file record and save the file.""" + file_id = str(uuid.uuid4()) + + # Save file to storage + storage_path, file_hash = self.storage.save_file(file, filename, file_id) + + # Create database record + db_obj = StoredFile( + id=file_id, + original_filename=filename, + content_type=content_type, + size_bytes=size_bytes, + storage_path=storage_path, + storage_type="local", + file_hash=file_hash, + uploaded_by=uploaded_by, + description=metadata.description if metadata else None, + tags=json.dumps(metadata.tags) if metadata and metadata.tags else None, + is_public=metadata.is_public if metadata else False + ) + + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def update( + self, + db: Session, + *, + db_obj: StoredFile, + obj_in: FileUpdate + ) -> StoredFile: + """Update file metadata.""" + update_data = obj_in.model_dump(exclude_unset=True) + + if "tags" in update_data and update_data["tags"] is not None: + update_data["tags"] = json.dumps(update_data["tags"]) + + for field, value in update_data.items(): + setattr(db_obj, field, value) + + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def soft_delete(self, db: Session, *, id: str) -> Optional[StoredFile]: + """Soft delete a file (marks as deleted but keeps record).""" + obj = db.query(StoredFile).filter(StoredFile.id == id).first() + if obj: + obj.is_deleted = True + obj.deleted_at = datetime.utcnow() + db.add(obj) + db.commit() + db.refresh(obj) + return obj + + def hard_delete(self, db: Session, *, id: str) -> bool: + """Permanently delete a file and its record.""" + obj = db.query(StoredFile).filter(StoredFile.id == id).first() + if obj: + # Delete physical file + self.storage.delete_file(obj.storage_path) + # Delete database record + db.delete(obj) + db.commit() + return True + return False + + def get_file_content(self, db_obj: StoredFile) -> Optional[Path]: + """Get the path to the actual file.""" + file_path = self.storage.get_file_path(db_obj.storage_path) + if file_path.exists(): + return file_path + return None + + def validate_upload( + self, + content_type: Optional[str], + size_bytes: int, + allowed_types: List[str] = None, + max_size: int = None + ) -> tuple[bool, Optional[str]]: + """ + Validate a file upload. + Returns (is_valid, error_message). + """ + allowed = allowed_types or ALLOWED_CONTENT_TYPES + max_size = max_size or MAX_FILE_SIZE + + if size_bytes > max_size: + return False, f"File size exceeds maximum allowed ({max_size // (1024*1024)} MB)" + + if content_type and content_type not in allowed: + return False, f"File type '{content_type}' is not allowed" + + return True, None + + +# Singleton instances +file_storage = CRUDFile() diff --git a/backend/app/crud/notification.py b/backend/app/crud/notification.py new file mode 100644 index 0000000..4ddc576 --- /dev/null +++ b/backend/app/crud/notification.py @@ -0,0 +1,233 @@ +"""CRUD operations for Notification model.""" + +import json +from datetime import datetime +from typing import Optional, List +from sqlalchemy.orm import Session +from sqlalchemy import func, desc + +from app.models.notification import Notification +from app.schemas.notification import NotificationCreate + + +class CRUDNotification: + """CRUD operations for Notification model.""" + + def create( + self, + db: Session, + *, + obj_in: NotificationCreate + ) -> Notification: + """Create a new notification.""" + extra_data_str = json.dumps(obj_in.extra_data) if obj_in.extra_data else None + + db_obj = Notification( + user_id=obj_in.user_id, + title=obj_in.title, + message=obj_in.message, + type=obj_in.type, + link=obj_in.link, + extra_data=extra_data_str, + is_read=False + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def create_for_user( + self, + db: Session, + *, + user_id: str, + title: str, + message: Optional[str] = None, + type: str = "info", + link: Optional[str] = None, + extra_data: Optional[dict] = None + ) -> Notification: + """Convenience method to create a notification for a user.""" + obj_in = NotificationCreate( + user_id=user_id, + title=title, + message=message, + type=type, + link=link, + extra_data=extra_data + ) + return self.create(db, obj_in=obj_in) + + def create_for_all_users( + self, + db: Session, + *, + title: str, + message: Optional[str] = None, + type: str = "system", + link: Optional[str] = None, + extra_data: Optional[dict] = None + ) -> int: + """Create a notification for all users (system notification).""" + from app.models.user import User + + users = db.query(User).filter(User.is_active == True).all() + count = 0 + + for user in users: + self.create_for_user( + db, + user_id=user.id, + title=title, + message=message, + type=type, + link=link, + extra_data=extra_data + ) + count += 1 + + return count + + def get(self, db: Session, id: str) -> Optional[Notification]: + """Get a notification by ID.""" + return db.query(Notification).filter(Notification.id == id).first() + + def get_multi_by_user( + self, + db: Session, + *, + user_id: str, + skip: int = 0, + limit: int = 50, + unread_only: bool = False + ) -> List[Notification]: + """Get notifications for a user.""" + query = db.query(Notification).filter(Notification.user_id == user_id) + + if unread_only: + query = query.filter(Notification.is_read == False) + + return query.order_by(desc(Notification.created_at))\ + .offset(skip)\ + .limit(limit)\ + .all() + + def count_by_user(self, db: Session, user_id: str) -> int: + """Count total notifications for a user.""" + return db.query(Notification).filter(Notification.user_id == user_id).count() + + def count_unread_by_user(self, db: Session, user_id: str) -> int: + """Count unread notifications for a user.""" + return db.query(Notification)\ + .filter(Notification.user_id == user_id)\ + .filter(Notification.is_read == False)\ + .count() + + def mark_as_read(self, db: Session, *, id: str, user_id: str) -> Optional[Notification]: + """Mark a notification as read.""" + db_obj = db.query(Notification)\ + .filter(Notification.id == id)\ + .filter(Notification.user_id == user_id)\ + .first() + + if db_obj and not db_obj.is_read: + db_obj.is_read = True + db_obj.read_at = datetime.utcnow() + db.add(db_obj) + db.commit() + db.refresh(db_obj) + + return db_obj + + def mark_all_as_read(self, db: Session, *, user_id: str) -> int: + """Mark all notifications as read for a user.""" + count = db.query(Notification)\ + .filter(Notification.user_id == user_id)\ + .filter(Notification.is_read == False)\ + .update({ + "is_read": True, + "read_at": datetime.utcnow() + }) + db.commit() + return count + + def mark_multiple_as_read( + self, + db: Session, + *, + user_id: str, + notification_ids: List[str] + ) -> int: + """Mark multiple notifications as read.""" + count = db.query(Notification)\ + .filter(Notification.id.in_(notification_ids))\ + .filter(Notification.user_id == user_id)\ + .filter(Notification.is_read == False)\ + .update({ + "is_read": True, + "read_at": datetime.utcnow() + }, synchronize_session=False) + db.commit() + return count + + def delete(self, db: Session, *, id: str, user_id: str) -> bool: + """Delete a notification.""" + obj = db.query(Notification)\ + .filter(Notification.id == id)\ + .filter(Notification.user_id == user_id)\ + .first() + + if obj: + db.delete(obj) + db.commit() + return True + return False + + def delete_all_read(self, db: Session, *, user_id: str) -> int: + """Delete all read notifications for a user.""" + count = db.query(Notification)\ + .filter(Notification.user_id == user_id)\ + .filter(Notification.is_read == True)\ + .delete() + db.commit() + return count + + def delete_multiple( + self, + db: Session, + *, + user_id: str, + notification_ids: List[str] + ) -> int: + """Delete multiple notifications.""" + count = db.query(Notification)\ + .filter(Notification.id.in_(notification_ids))\ + .filter(Notification.user_id == user_id)\ + .delete(synchronize_session=False) + db.commit() + return count + + def get_stats_by_user(self, db: Session, user_id: str) -> dict: + """Get notification statistics for a user.""" + total = self.count_by_user(db, user_id) + unread = self.count_unread_by_user(db, user_id) + + # Count by type + type_counts = db.query( + Notification.type, + func.count(Notification.id).label('count') + ).filter(Notification.user_id == user_id)\ + .group_by(Notification.type)\ + .all() + + by_type = {t: c for t, c in type_counts} + + return { + "total": total, + "unread": unread, + "by_type": by_type + } + + +# Create instance +notification = CRUDNotification() diff --git a/backend/app/crud/session.py b/backend/app/crud/session.py new file mode 100644 index 0000000..b800455 --- /dev/null +++ b/backend/app/crud/session.py @@ -0,0 +1,274 @@ +"""CRUD operations for User Session model.""" + +import hashlib +import re +from datetime import datetime, timedelta +from typing import Optional, List +from sqlalchemy.orm import Session +from sqlalchemy import desc + +from app.models.session import UserSession +from app.schemas.session import SessionCreate + + +def hash_token(token: str) -> str: + """Hash a token for secure storage.""" + return hashlib.sha256(token.encode()).hexdigest() + + +def parse_user_agent(user_agent: str) -> dict: + """Parse user agent string to extract device info.""" + result = { + "device_type": "desktop", + "browser": "Unknown", + "os": "Unknown" + } + + if not user_agent: + return result + + ua_lower = user_agent.lower() + + # Detect device type + if "mobile" in ua_lower or "android" in ua_lower and "mobile" in ua_lower: + result["device_type"] = "mobile" + elif "tablet" in ua_lower or "ipad" in ua_lower: + result["device_type"] = "tablet" + + # Detect OS + if "windows" in ua_lower: + result["os"] = "Windows" + elif "mac os" in ua_lower or "macintosh" in ua_lower: + result["os"] = "macOS" + elif "linux" in ua_lower: + result["os"] = "Linux" + elif "android" in ua_lower: + result["os"] = "Android" + elif "iphone" in ua_lower or "ipad" in ua_lower: + result["os"] = "iOS" + + # Detect browser + if "firefox" in ua_lower: + result["browser"] = "Firefox" + elif "edg" in ua_lower: + result["browser"] = "Edge" + elif "chrome" in ua_lower: + result["browser"] = "Chrome" + elif "safari" in ua_lower: + result["browser"] = "Safari" + elif "opera" in ua_lower: + result["browser"] = "Opera" + + return result + + +class CRUDSession: + """CRUD operations for User Session model.""" + + def create( + self, + db: Session, + *, + user_id: str, + token: str, + user_agent: Optional[str] = None, + ip_address: Optional[str] = None, + expires_at: Optional[datetime] = None + ) -> UserSession: + """Create a new session.""" + token_hash = hash_token(token) + parsed_ua = parse_user_agent(user_agent or "") + + # Generate device name + device_name = f"{parsed_ua['browser']} on {parsed_ua['os']}" + + db_obj = UserSession( + user_id=user_id, + token_hash=token_hash, + device_name=device_name, + device_type=parsed_ua["device_type"], + browser=parsed_ua["browser"], + os=parsed_ua["os"], + user_agent=user_agent[:500] if user_agent else None, + ip_address=ip_address, + expires_at=expires_at, + is_active=True + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def get(self, db: Session, id: str) -> Optional[UserSession]: + """Get a session by ID.""" + return db.query(UserSession).filter(UserSession.id == id).first() + + def get_by_token(self, db: Session, token: str) -> Optional[UserSession]: + """Get a session by token.""" + token_hash = hash_token(token) + return db.query(UserSession).filter(UserSession.token_hash == token_hash).first() + + def get_multi_by_user( + self, + db: Session, + *, + user_id: str, + active_only: bool = True + ) -> List[UserSession]: + """Get all sessions for a user.""" + query = db.query(UserSession).filter(UserSession.user_id == user_id) + + if active_only: + query = query.filter(UserSession.is_active == True) + + return query.order_by(desc(UserSession.last_active_at)).all() + + def count_by_user(self, db: Session, user_id: str, active_only: bool = True) -> int: + """Count sessions for a user.""" + query = db.query(UserSession).filter(UserSession.user_id == user_id) + if active_only: + query = query.filter(UserSession.is_active == True) + return query.count() + + def count_active_by_user(self, db: Session, user_id: str) -> int: + """Count active sessions for a user.""" + return self.count_by_user(db, user_id, active_only=True) + + def update_activity( + self, + db: Session, + *, + token: str, + ip_address: Optional[str] = None + ) -> Optional[UserSession]: + """Update session last activity.""" + token_hash = hash_token(token) + db_obj = db.query(UserSession).filter(UserSession.token_hash == token_hash).first() + + if db_obj and db_obj.is_active: + db_obj.last_active_at = datetime.utcnow() + if ip_address: + db_obj.ip_address = ip_address + db.add(db_obj) + db.commit() + db.refresh(db_obj) + + return db_obj + + def mark_as_current( + self, + db: Session, + *, + session_id: str, + user_id: str + ) -> None: + """Mark a session as current and unmark others.""" + # Unmark all sessions for user + db.query(UserSession)\ + .filter(UserSession.user_id == user_id)\ + .update({"is_current": False}) + + # Mark specific session as current + db.query(UserSession)\ + .filter(UserSession.id == session_id)\ + .update({"is_current": True}) + + db.commit() + + def revoke(self, db: Session, *, id: str, user_id: str) -> Optional[UserSession]: + """Revoke a specific session.""" + db_obj = db.query(UserSession)\ + .filter(UserSession.id == id)\ + .filter(UserSession.user_id == user_id)\ + .first() + + if db_obj: + db_obj.is_active = False + db_obj.revoked_at = datetime.utcnow() + db.add(db_obj) + db.commit() + db.refresh(db_obj) + + return db_obj + + def revoke_all_except( + self, + db: Session, + *, + user_id: str, + except_session_id: Optional[str] = None + ) -> int: + """Revoke all sessions for a user except the specified one.""" + query = db.query(UserSession)\ + .filter(UserSession.user_id == user_id)\ + .filter(UserSession.is_active == True) + + if except_session_id: + query = query.filter(UserSession.id != except_session_id) + + count = query.update({ + "is_active": False, + "revoked_at": datetime.utcnow() + }) + db.commit() + return count + + def revoke_multiple( + self, + db: Session, + *, + user_id: str, + session_ids: List[str] + ) -> int: + """Revoke multiple sessions.""" + count = db.query(UserSession)\ + .filter(UserSession.id.in_(session_ids))\ + .filter(UserSession.user_id == user_id)\ + .filter(UserSession.is_active == True)\ + .update({ + "is_active": False, + "revoked_at": datetime.utcnow() + }, synchronize_session=False) + db.commit() + return count + + def cleanup_expired(self, db: Session) -> int: + """Clean up expired sessions.""" + now = datetime.utcnow() + count = db.query(UserSession)\ + .filter(UserSession.expires_at < now)\ + .filter(UserSession.is_active == True)\ + .update({ + "is_active": False, + "revoked_at": now + }) + db.commit() + return count + + def is_valid(self, db: Session, token: str) -> bool: + """Check if a session token is valid.""" + session = self.get_by_token(db, token) + if not session: + return False + + if not session.is_active: + return False + + if session.expires_at and session.expires_at < datetime.utcnow(): + return False + + return True + + def delete_old_inactive(self, db: Session, days: int = 30) -> int: + """Delete old inactive sessions.""" + cutoff = datetime.utcnow() - timedelta(days=days) + count = db.query(UserSession)\ + .filter(UserSession.is_active == False)\ + .filter(UserSession.revoked_at < cutoff)\ + .delete() + db.commit() + return count + + +# Create instance +session = CRUDSession() diff --git a/backend/app/crud/webhook.py b/backend/app/crud/webhook.py new file mode 100644 index 0000000..af9b25b --- /dev/null +++ b/backend/app/crud/webhook.py @@ -0,0 +1,345 @@ +"""CRUD operations for webhooks.""" + +import json +import secrets +import hashlib +import hmac +import httpx +from datetime import datetime, timedelta +from typing import Optional, List, Any +from sqlalchemy.orm import Session + +from app.models.webhook import Webhook, WebhookDelivery +from app.schemas.webhook import WebhookCreate, WebhookUpdate + + +class CRUDWebhook: + """CRUD operations for webhooks.""" + + def get(self, db: Session, id: str) -> Optional[Webhook]: + """Get a webhook by ID.""" + return db.query(Webhook).filter(Webhook.id == id).first() + + def get_multi( + self, + db: Session, + *, + skip: int = 0, + limit: int = 100, + is_active: Optional[bool] = None + ) -> List[Webhook]: + """Get multiple webhooks.""" + query = db.query(Webhook) + if is_active is not None: + query = query.filter(Webhook.is_active == is_active) + return query.order_by(Webhook.created_at.desc()).offset(skip).limit(limit).all() + + def get_by_event(self, db: Session, event_type: str) -> List[Webhook]: + """Get all active webhooks that subscribe to an event type.""" + webhooks = db.query(Webhook).filter(Webhook.is_active == True).all() + matching = [] + for webhook in webhooks: + events = json.loads(webhook.events) if webhook.events else [] + if "*" in events or event_type in events: + matching.append(webhook) + return matching + + def create( + self, + db: Session, + *, + obj_in: WebhookCreate, + created_by: Optional[str] = None + ) -> Webhook: + """Create a new webhook with a generated secret.""" + # Generate a secret for signature verification + secret = secrets.token_hex(32) + + db_obj = Webhook( + name=obj_in.name, + url=obj_in.url, + secret=secret, + events=json.dumps(obj_in.events), + is_active=obj_in.is_active, + retry_count=obj_in.retry_count, + timeout_seconds=obj_in.timeout_seconds, + created_by=created_by + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def update( + self, + db: Session, + *, + db_obj: Webhook, + obj_in: WebhookUpdate + ) -> Webhook: + """Update a webhook.""" + update_data = obj_in.model_dump(exclude_unset=True) + + if "events" in update_data: + update_data["events"] = json.dumps(update_data["events"]) + + for field, value in update_data.items(): + setattr(db_obj, field, value) + + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def delete(self, db: Session, *, id: str) -> Optional[Webhook]: + """Delete a webhook.""" + obj = db.query(Webhook).filter(Webhook.id == id).first() + if obj: + db.delete(obj) + db.commit() + return obj + + def regenerate_secret(self, db: Session, *, db_obj: Webhook) -> Webhook: + """Regenerate the webhook secret.""" + db_obj.secret = secrets.token_hex(32) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def count(self, db: Session) -> int: + """Count total webhooks.""" + return db.query(Webhook).count() + + +class CRUDWebhookDelivery: + """CRUD operations for webhook deliveries.""" + + def get(self, db: Session, id: str) -> Optional[WebhookDelivery]: + """Get a delivery by ID.""" + return db.query(WebhookDelivery).filter(WebhookDelivery.id == id).first() + + def get_by_webhook( + self, + db: Session, + *, + webhook_id: str, + skip: int = 0, + limit: int = 50 + ) -> List[WebhookDelivery]: + """Get deliveries for a specific webhook.""" + return ( + db.query(WebhookDelivery) + .filter(WebhookDelivery.webhook_id == webhook_id) + .order_by(WebhookDelivery.created_at.desc()) + .offset(skip) + .limit(limit) + .all() + ) + + def get_pending_retries(self, db: Session) -> List[WebhookDelivery]: + """Get deliveries that need to be retried.""" + now = datetime.utcnow() + return ( + db.query(WebhookDelivery) + .filter( + WebhookDelivery.status == "failed", + WebhookDelivery.next_retry_at <= now + ) + .all() + ) + + def create( + self, + db: Session, + *, + webhook_id: str, + event_type: str, + payload: dict + ) -> WebhookDelivery: + """Create a new webhook delivery record.""" + db_obj = WebhookDelivery( + webhook_id=webhook_id, + event_type=event_type, + payload=json.dumps(payload), + status="pending" + ) + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + def update_status( + self, + db: Session, + *, + db_obj: WebhookDelivery, + status: str, + status_code: Optional[int] = None, + response_body: Optional[str] = None, + error_message: Optional[str] = None, + schedule_retry: bool = False, + max_retries: int = 3 + ) -> WebhookDelivery: + """Update delivery status.""" + db_obj.status = status + db_obj.status_code = status_code + db_obj.response_body = response_body[:1000] if response_body else None + db_obj.error_message = error_message + db_obj.attempt_count += 1 + + if status == "success": + db_obj.delivered_at = datetime.utcnow() + db_obj.next_retry_at = None + elif status == "failed" and schedule_retry and db_obj.attempt_count < max_retries: + # Exponential backoff: 1min, 5min, 30min + delays = [60, 300, 1800] + delay = delays[min(db_obj.attempt_count - 1, len(delays) - 1)] + db_obj.next_retry_at = datetime.utcnow() + timedelta(seconds=delay) + else: + db_obj.next_retry_at = None + + db.add(db_obj) + db.commit() + db.refresh(db_obj) + return db_obj + + +class WebhookService: + """Service for triggering and delivering webhooks.""" + + def __init__(self): + self.webhook_crud = CRUDWebhook() + self.delivery_crud = CRUDWebhookDelivery() + + def generate_signature(self, payload: str, secret: str) -> str: + """Generate HMAC-SHA256 signature for payload.""" + return hmac.new( + secret.encode(), + payload.encode(), + hashlib.sha256 + ).hexdigest() + + async def trigger_event( + self, + db: Session, + event_type: str, + payload: dict + ) -> List[WebhookDelivery]: + """Trigger webhooks for an event.""" + webhooks = self.webhook_crud.get_by_event(db, event_type) + deliveries = [] + + for webhook in webhooks: + delivery = self.delivery_crud.create( + db, + webhook_id=webhook.id, + event_type=event_type, + payload=payload + ) + deliveries.append(delivery) + + # Attempt delivery + await self.deliver(db, webhook, delivery) + + return deliveries + + async def deliver( + self, + db: Session, + webhook: Webhook, + delivery: WebhookDelivery + ) -> bool: + """Deliver a webhook.""" + payload_str = delivery.payload + signature = self.generate_signature(payload_str, webhook.secret) + + headers = { + "Content-Type": "application/json", + "X-Webhook-Signature": signature, + "X-Webhook-Event": delivery.event_type, + "X-Webhook-Delivery-Id": delivery.id + } + + try: + async with httpx.AsyncClient(timeout=webhook.timeout_seconds) as client: + response = await client.post( + webhook.url, + content=payload_str, + headers=headers + ) + + if response.status_code >= 200 and response.status_code < 300: + self.delivery_crud.update_status( + db, + db_obj=delivery, + status="success", + status_code=response.status_code, + response_body=response.text + ) + webhook.success_count += 1 + webhook.last_triggered_at = datetime.utcnow() + db.add(webhook) + db.commit() + return True + else: + self.delivery_crud.update_status( + db, + db_obj=delivery, + status="failed", + status_code=response.status_code, + response_body=response.text, + error_message=f"HTTP {response.status_code}", + schedule_retry=True, + max_retries=webhook.retry_count + ) + webhook.failure_count += 1 + db.add(webhook) + db.commit() + return False + + except Exception as e: + self.delivery_crud.update_status( + db, + db_obj=delivery, + status="failed", + error_message=str(e), + schedule_retry=True, + max_retries=webhook.retry_count + ) + webhook.failure_count += 1 + db.add(webhook) + db.commit() + return False + + async def test_webhook( + self, + db: Session, + webhook: Webhook, + event_type: str = "test.ping", + payload: Optional[dict] = None + ) -> WebhookDelivery: + """Send a test delivery to a webhook.""" + if payload is None: + payload = { + "event": event_type, + "timestamp": datetime.utcnow().isoformat(), + "test": True, + "message": "This is a test webhook delivery" + } + + delivery = self.delivery_crud.create( + db, + webhook_id=webhook.id, + event_type=event_type, + payload=payload + ) + + await self.deliver(db, webhook, delivery) + return delivery + + +# Singleton instances +webhook = CRUDWebhook() +webhook_delivery = CRUDWebhookDelivery() +webhook_service = WebhookService() diff --git a/backend/app/dependencies.py b/backend/app/dependencies.py index 07a157f..77d2609 100644 --- a/backend/app/dependencies.py +++ b/backend/app/dependencies.py @@ -1,7 +1,8 @@ """Shared dependencies for FastAPI dependency injection.""" -from typing import Generator, Optional -from fastapi import Depends, HTTPException, status +from datetime import datetime, timezone +from typing import Generator +from fastapi import Depends, HTTPException, status, Request from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from sqlalchemy.orm import Session from jose import JWTError, jwt @@ -28,8 +29,15 @@ def get_db() -> Generator[Session, None, None]: # Security security = HTTPBearer() +def _get_client_ip(request: Request) -> str: + forwarded = request.headers.get("X-Forwarded-For") + if forwarded: + return forwarded.split(",")[0].strip() + return request.client.host if request.client else "unknown" + def get_current_user( + request: Request, credentials: HTTPAuthorizationCredentials = Depends(security), db: Session = Depends(get_db) ) -> models.User: @@ -44,10 +52,25 @@ def get_current_user( ) try: - payload = decode_access_token(credentials.credentials) + token = credentials.credentials + + # API key authentication (programmatic access) + if token.startswith("sk_"): + api_key = crud.api_key.authenticate(db, plain_key=token, ip_address=_get_client_ip(request)) + if not api_key: + raise credentials_exception + + user = crud.user.get(db, id=api_key.user_id) + if user is None or not user.is_active: + raise credentials_exception + return user + + payload = decode_access_token(token) user_id: str = payload.get("sub") if user_id is None: raise credentials_exception + if payload.get("temp") is True: + raise credentials_exception except JWTError: raise credentials_exception @@ -61,6 +84,37 @@ def get_current_user( detail="Inactive user" ) + session = crud.session.get_by_token(db, token) + if not session: + # Backward compatibility: if a valid JWT exists without a session row, + # create a session record on first use so it can be managed/revoked. + exp = payload.get("exp") + expires_at = None + if isinstance(exp, (int, float)): + expires_at = datetime.fromtimestamp(exp, tz=timezone.utc).replace(tzinfo=None) + + created = crud.session.create( + db, + user_id=user.id, + token=token, + user_agent=request.headers.get("User-Agent", "")[:500], + ip_address=_get_client_ip(request), + expires_at=expires_at, + ) + crud.session.mark_as_current(db, session_id=created.id, user_id=user.id) + else: + if not session.is_active: + raise credentials_exception + if session.expires_at and session.expires_at < datetime.utcnow(): + raise credentials_exception + + # Update last seen opportunistically to reduce write amplification. + now = datetime.utcnow() + if session.last_active_at and (now - session.last_active_at).total_seconds() >= 60: + crud.session.update_activity(db, token=token, ip_address=_get_client_ip(request)) + if not session.is_current: + crud.session.mark_as_current(db, session_id=session.id, user_id=user.id) + return user diff --git a/backend/app/main.py b/backend/app/main.py index 0f09fcb..79ad1bf 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -8,6 +8,9 @@ from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse, FileResponse from fastapi.staticfiles import StaticFiles +from slowapi import Limiter, _rate_limit_exceeded_handler +from slowapi.util import get_remote_address +from slowapi.errors import RateLimitExceeded import logging import time @@ -17,6 +20,12 @@ from app.api.v1 import router as api_v1_router from app.db.session import engine from app.db.base import Base +# Import all models so they're registered with Base.metadata before create_all +from app.models import ( # noqa: F401 + User, Settings, AuditLog, APIKey, Notification, + UserSession, Webhook, WebhookDelivery, StoredFile +) + # Static files path STATIC_DIR = Path(__file__).parent.parent / "static" @@ -29,6 +38,10 @@ logging.basicConfig( logger = logging.getLogger(__name__) +# Rate limiter setup +limiter = Limiter(key_func=get_remote_address, default_limits=["200/minute"]) + + # Create FastAPI application app = FastAPI( title=settings.APP_NAME, @@ -39,6 +52,10 @@ app = FastAPI( openapi_url=f"{settings.API_V1_PREFIX}/openapi.json" ) +# Add rate limiter to app state +app.state.limiter = limiter +app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) + # CORS middleware app.add_middleware( diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 42502d0..b7c195a 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -2,5 +2,11 @@ from app.models.user import User from app.models.settings import Settings +from app.models.audit_log import AuditLog +from app.models.api_key import APIKey +from app.models.notification import Notification +from app.models.session import UserSession +from app.models.webhook import Webhook, WebhookDelivery +from app.models.file import StoredFile -__all__ = ["User", "Settings"] +__all__ = ["User", "Settings", "AuditLog", "APIKey", "Notification", "UserSession", "Webhook", "WebhookDelivery", "StoredFile"] diff --git a/backend/app/models/api_key.py b/backend/app/models/api_key.py new file mode 100644 index 0000000..2ba9362 --- /dev/null +++ b/backend/app/models/api_key.py @@ -0,0 +1,69 @@ +"""API Key database model for programmatic access.""" + +import uuid +import secrets +from datetime import datetime +from sqlalchemy import Column, String, Boolean, ForeignKey, Text +from sqlalchemy.sql import func +from sqlalchemy.types import DateTime + +from app.db.base import Base + + +def generate_api_key() -> str: + """Generate a secure API key.""" + return f"sk_{secrets.token_urlsafe(32)}" + + +def generate_key_prefix(key: str) -> str: + """Generate a display prefix for the key (first 8 chars after sk_).""" + return key[:11] + "..." if len(key) > 11 else key + + +class APIKey(Base): + """API Key model for programmatic authentication.""" + + __tablename__ = "api_keys" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + + # Owner + user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + + # Key info + name = Column(String(100), nullable=False) # User-friendly name + key_hash = Column(String(255), nullable=False, unique=True) # Hashed key (for lookup) + key_prefix = Column(String(20), nullable=False) # First chars for display (sk_xxx...) + + # Permissions & scope + scopes = Column(Text, nullable=True) # JSON array of allowed scopes/permissions + + # Status + is_active = Column(Boolean, default=True, nullable=False) + + # Usage tracking + last_used_at = Column(DateTime, nullable=True) + last_used_ip = Column(String(45), nullable=True) + usage_count = Column(String(20), default="0") # String for SQLite compatibility + + # Expiration + expires_at = Column(DateTime, nullable=True) # null = never expires + + # Timestamps + created_at = Column(DateTime, server_default=func.now(), nullable=False) + updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) + + def __repr__(self): + return f"" + + @property + def is_expired(self) -> bool: + """Check if key is expired.""" + if self.expires_at is None: + return False + return datetime.utcnow() > self.expires_at + + @property + def is_valid(self) -> bool: + """Check if key is valid (active and not expired).""" + return self.is_active and not self.is_expired diff --git a/backend/app/models/audit_log.py b/backend/app/models/audit_log.py new file mode 100644 index 0000000..816b11a --- /dev/null +++ b/backend/app/models/audit_log.py @@ -0,0 +1,48 @@ +"""Audit log database model for tracking user actions.""" + +import uuid +from sqlalchemy import Column, String, Text, ForeignKey, Index +from sqlalchemy.sql import func +from sqlalchemy.types import DateTime + +from app.db.base import Base + + +class AuditLog(Base): + """Audit log model for tracking user actions and system events.""" + + __tablename__ = "audit_logs" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + + # Who performed the action + user_id = Column(String(36), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, index=True) + username = Column(String(100), nullable=True) # Stored for history even if user deleted + + # What action was performed + action = Column(String(50), nullable=False, index=True) # login, logout, create, update, delete, etc. + resource_type = Column(String(50), nullable=True, index=True) # user, setting, api_key, etc. + resource_id = Column(String(255), nullable=True) # ID of affected resource + + # Additional details + details = Column(Text, nullable=True) # JSON string with extra info + + # Request context + ip_address = Column(String(45), nullable=True) # IPv6 max length + user_agent = Column(String(500), nullable=True) + + # Status + status = Column(String(20), default="success") # success, failure, error + + # Timestamp + created_at = Column(DateTime, server_default=func.now(), nullable=False, index=True) + + # Composite indexes for common queries + __table_args__ = ( + Index('ix_audit_user_action', 'user_id', 'action'), + Index('ix_audit_resource', 'resource_type', 'resource_id'), + Index('ix_audit_created_at_desc', created_at.desc()), + ) + + def __repr__(self): + return f"" diff --git a/backend/app/models/file.py b/backend/app/models/file.py new file mode 100644 index 0000000..182becd --- /dev/null +++ b/backend/app/models/file.py @@ -0,0 +1,43 @@ +"""File storage model.""" + +import uuid +from sqlalchemy import Column, String, Boolean, DateTime, BigInteger, ForeignKey, Text +from sqlalchemy.sql import func + +from app.db.base import Base + + +class StoredFile(Base): + """Model for tracking uploaded files.""" + + __tablename__ = "stored_files" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + + # Original file info + original_filename = Column(String(255), nullable=False) + content_type = Column(String(100), nullable=True) + size_bytes = Column(BigInteger, nullable=False) + + # Storage info + storage_path = Column(String(500), nullable=False) # Relative path in storage + storage_type = Column(String(20), default="local", nullable=False) # local, s3, etc. + + # Metadata + description = Column(Text, nullable=True) + tags = Column(Text, nullable=True) # JSON array as text + + # Access control + is_public = Column(Boolean, default=False, nullable=False) + uploaded_by = Column(String(36), ForeignKey("users.id", ondelete="SET NULL"), nullable=True) + + # Hash for deduplication/integrity + file_hash = Column(String(64), nullable=True, index=True) # SHA-256 + + # Timestamps + created_at = Column(DateTime, server_default=func.now(), nullable=False) + updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) + + # Soft delete + is_deleted = Column(Boolean, default=False, nullable=False) + deleted_at = Column(DateTime, nullable=True) diff --git a/backend/app/models/notification.py b/backend/app/models/notification.py new file mode 100644 index 0000000..b8d2cb4 --- /dev/null +++ b/backend/app/models/notification.py @@ -0,0 +1,51 @@ +"""Notification database model for in-app notifications.""" + +import json +import uuid +from sqlalchemy import Column, String, Boolean, ForeignKey, Text +from sqlalchemy.sql import func +from sqlalchemy.types import DateTime + +from app.db.base import Base + + +class Notification(Base): + """Notification model for in-app user notifications.""" + + __tablename__ = "notifications" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + + # Recipient + user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + + # Notification content + title = Column(String(200), nullable=False) + message = Column(Text, nullable=True) + type = Column(String(50), default="info") # info, success, warning, error, system + + # Link (optional) - where to navigate when clicked + link = Column(String(500), nullable=True) + + # Extra data (JSON string) + extra_data = Column(Text, nullable=True) + + # Status + is_read = Column(Boolean, default=False, nullable=False, index=True) + + # Timestamps + created_at = Column(DateTime, server_default=func.now(), nullable=False, index=True) + read_at = Column(DateTime, nullable=True) + + @property + def parsed_extra_data(self) -> dict | None: + """Get extra_data as parsed dict.""" + if self.extra_data: + try: + return json.loads(self.extra_data) + except json.JSONDecodeError: + return None + return None + + def __repr__(self): + return f"" diff --git a/backend/app/models/session.py b/backend/app/models/session.py new file mode 100644 index 0000000..56507a1 --- /dev/null +++ b/backend/app/models/session.py @@ -0,0 +1,46 @@ +"""User session database model for tracking active sessions.""" + +import uuid +from sqlalchemy import Column, String, Boolean, ForeignKey, Text +from sqlalchemy.sql import func +from sqlalchemy.types import DateTime + +from app.db.base import Base + + +class UserSession(Base): + """User session model for tracking active sessions.""" + + __tablename__ = "user_sessions" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + + # User + user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True) + + # Token hash (to identify and revoke specific sessions) + token_hash = Column(String(64), unique=True, nullable=False, index=True) + + # Device/client info + device_name = Column(String(200), nullable=True) # User-friendly name + device_type = Column(String(50), nullable=True) # desktop, mobile, tablet + browser = Column(String(100), nullable=True) + os = Column(String(100), nullable=True) + user_agent = Column(String(500), nullable=True) + + # Location info + ip_address = Column(String(45), nullable=True) + location = Column(String(200), nullable=True) # City, Country + + # Status + is_active = Column(Boolean, default=True, nullable=False) + is_current = Column(Boolean, default=False, nullable=False) # Marks the current session + + # Timestamps + created_at = Column(DateTime, server_default=func.now(), nullable=False, index=True) + last_active_at = Column(DateTime, server_default=func.now(), nullable=False) + expires_at = Column(DateTime, nullable=True) + revoked_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" diff --git a/backend/app/models/user.py b/backend/app/models/user.py index a6535d8..dd35174 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -27,6 +27,11 @@ class User(Base): # null means inherit from global settings (all enabled by default) _permissions = Column("permissions", Text, nullable=True) + # 2FA fields + totp_secret = Column(String(32), nullable=True) # Base32 encoded TOTP secret + totp_enabled = Column(Boolean, default=False, nullable=False) + totp_backup_codes = Column(Text, nullable=True) # JSON array of backup codes + created_at = Column(DateTime, server_default=func.now(), nullable=False) updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) last_login = Column(DateTime, nullable=True) @@ -49,5 +54,23 @@ class User(Base): else: self._permissions = json.dumps(value) + @property + def backup_codes(self) -> list: + """Get backup codes as a list.""" + if self.totp_backup_codes: + try: + return json.loads(self.totp_backup_codes) + except json.JSONDecodeError: + return [] + return [] + + @backup_codes.setter + def backup_codes(self, value: list): + """Set backup codes from a list.""" + if value is None: + self.totp_backup_codes = None + else: + self.totp_backup_codes = json.dumps(value) + def __repr__(self): return f"" diff --git a/backend/app/models/webhook.py b/backend/app/models/webhook.py new file mode 100644 index 0000000..3633f46 --- /dev/null +++ b/backend/app/models/webhook.py @@ -0,0 +1,63 @@ +"""Webhook model for external integrations.""" + +import uuid +from sqlalchemy import Column, String, Boolean, DateTime, Text, Integer, ForeignKey +from sqlalchemy.sql import func + +from app.db.base import Base + + +class Webhook(Base): + """Webhook configuration model.""" + + __tablename__ = "webhooks" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + name = Column(String(100), nullable=False) + url = Column(String(500), nullable=False) + secret = Column(String(64), nullable=True) # For signature verification + + # Events to trigger on (JSON array stored as text) + events = Column(Text, nullable=False, default='["*"]') # ["user.created", "user.updated", etc.] + + # Configuration + is_active = Column(Boolean, default=True, nullable=False) + retry_count = Column(Integer, default=3, nullable=False) + timeout_seconds = Column(Integer, default=30, nullable=False) + + # Metadata + created_by = Column(String(36), ForeignKey("users.id", ondelete="SET NULL"), nullable=True) + created_at = Column(DateTime, server_default=func.now(), nullable=False) + updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) + + # Statistics + last_triggered_at = Column(DateTime, nullable=True) + success_count = Column(Integer, default=0, nullable=False) + failure_count = Column(Integer, default=0, nullable=False) + + +class WebhookDelivery(Base): + """Webhook delivery log model.""" + + __tablename__ = "webhook_deliveries" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + webhook_id = Column(String(36), ForeignKey("webhooks.id", ondelete="CASCADE"), nullable=False) + + # Event details + event_type = Column(String(50), nullable=False) + payload = Column(Text, nullable=False) # JSON payload + + # Delivery status + status = Column(String(20), default="pending", nullable=False) # pending, success, failed + status_code = Column(Integer, nullable=True) + response_body = Column(Text, nullable=True) + error_message = Column(Text, nullable=True) + + # Retry tracking + attempt_count = Column(Integer, default=0, nullable=False) + next_retry_at = Column(DateTime, nullable=True) + + # Timestamps + created_at = Column(DateTime, server_default=func.now(), nullable=False) + delivered_at = Column(DateTime, nullable=True) diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py index 62bd59d..667ab84 100644 --- a/backend/app/schemas/__init__.py +++ b/backend/app/schemas/__init__.py @@ -1,8 +1,27 @@ """Schemas package - exports all Pydantic schemas.""" from app.schemas.user import User, UserCreate, UserUpdate, UserInDB -from app.schemas.auth import Token, TokenData, LoginRequest, RegisterRequest +from app.schemas.auth import Token, TokenData, LoginRequest, RegisterRequest, TokenWith2FA, Verify2FARequest from app.schemas.settings import Setting, SettingUpdate +from app.schemas.audit_log import AuditLog as AuditLogSchema, AuditLogCreate, AuditLogList, AuditLogStats +from app.schemas.webhook import ( + Webhook as WebhookSchema, + WebhookCreate, + WebhookUpdate, + WebhookWithSecret, + WebhookDelivery as WebhookDeliverySchema, + WebhookTest, + WEBHOOK_EVENTS, +) +from app.schemas.file import ( + StoredFile as StoredFileSchema, + FileCreate, + FileUpdate, + FileUploadResponse, + FileListResponse, + ALLOWED_CONTENT_TYPES, + MAX_FILE_SIZE, +) __all__ = [ "User", @@ -13,6 +32,26 @@ __all__ = [ "TokenData", "LoginRequest", "RegisterRequest", + "TokenWith2FA", + "Verify2FARequest", "Setting", "SettingUpdate", + "AuditLogSchema", + "AuditLogCreate", + "AuditLogList", + "AuditLogStats", + "WebhookSchema", + "WebhookCreate", + "WebhookUpdate", + "WebhookWithSecret", + "WebhookDeliverySchema", + "WebhookTest", + "WEBHOOK_EVENTS", + "StoredFileSchema", + "FileCreate", + "FileUpdate", + "FileUploadResponse", + "FileListResponse", + "ALLOWED_CONTENT_TYPES", + "MAX_FILE_SIZE", ] diff --git a/backend/app/schemas/api_key.py b/backend/app/schemas/api_key.py new file mode 100644 index 0000000..732bcf3 --- /dev/null +++ b/backend/app/schemas/api_key.py @@ -0,0 +1,55 @@ +"""Pydantic schemas for API Key requests/responses.""" + +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel, Field + + +class APIKeyBase(BaseModel): + """Base API key schema.""" + name: str = Field(..., min_length=1, max_length=100) + scopes: Optional[List[str]] = None + expires_at: Optional[datetime] = None + + +class APIKeyCreate(APIKeyBase): + """Schema for creating an API key.""" + pass + + +class APIKeyUpdate(BaseModel): + """Schema for updating an API key.""" + name: Optional[str] = Field(None, min_length=1, max_length=100) + scopes: Optional[List[str]] = None + is_active: Optional[bool] = None + expires_at: Optional[datetime] = None + + +class APIKey(BaseModel): + """Schema for API key response (without the actual key).""" + id: str + user_id: str + name: str + key_prefix: str + scopes: Optional[List[str]] = None + is_active: bool + last_used_at: Optional[datetime] = None + last_used_ip: Optional[str] = None + usage_count: int = 0 + expires_at: Optional[datetime] = None + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class APIKeyWithSecret(APIKey): + """Schema for API key response with the actual key (only on creation).""" + key: str # The actual API key - only shown once + + +class APIKeyList(BaseModel): + """Schema for paginated API key list.""" + items: List[APIKey] + total: int diff --git a/backend/app/schemas/audit_log.py b/backend/app/schemas/audit_log.py new file mode 100644 index 0000000..da9c512 --- /dev/null +++ b/backend/app/schemas/audit_log.py @@ -0,0 +1,65 @@ +"""Pydantic schemas for Audit Log API requests/responses.""" + +from datetime import datetime +from typing import Optional, List, Any +from pydantic import BaseModel, Field + + +class AuditLogBase(BaseModel): + """Base audit log schema.""" + action: str = Field(..., max_length=50) + resource_type: Optional[str] = Field(None, max_length=50) + resource_id: Optional[str] = Field(None, max_length=255) + details: Optional[str] = None + ip_address: Optional[str] = Field(None, max_length=45) + user_agent: Optional[str] = Field(None, max_length=500) + status: str = Field(default="success", max_length=20) + + +class AuditLogCreate(AuditLogBase): + """Schema for creating an audit log entry.""" + user_id: Optional[str] = None + username: Optional[str] = None + + +class AuditLog(AuditLogBase): + """Schema for audit log response.""" + id: str + user_id: Optional[str] = None + username: Optional[str] = None + created_at: datetime + + class Config: + from_attributes = True + + +class AuditLogList(BaseModel): + """Schema for paginated audit log list.""" + items: List[AuditLog] + total: int + page: int + page_size: int + total_pages: int + + +class AuditLogFilter(BaseModel): + """Schema for filtering audit logs.""" + user_id: Optional[str] = None + username: Optional[str] = None + action: Optional[str] = None + resource_type: Optional[str] = None + resource_id: Optional[str] = None + status: Optional[str] = None + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + + +class AuditLogStats(BaseModel): + """Schema for audit log statistics.""" + total_entries: int + entries_today: int + entries_this_week: int + entries_this_month: int + actions_breakdown: dict[str, int] + top_users: List[dict[str, Any]] + recent_failures: int diff --git a/backend/app/schemas/auth.py b/backend/app/schemas/auth.py index 9fdceb9..26309fb 100644 --- a/backend/app/schemas/auth.py +++ b/backend/app/schemas/auth.py @@ -11,6 +11,15 @@ class Token(BaseModel): token_type: str = "bearer" +class TokenWith2FA(BaseModel): + """JWT token response with 2FA requirement indicator.""" + + access_token: Optional[str] = None + token_type: str = "bearer" + requires_2fa: bool = False + temp_token: Optional[str] = None # Temporary token for 2FA verification + + class TokenData(BaseModel): """Token payload data schema.""" @@ -22,6 +31,14 @@ class LoginRequest(BaseModel): username: str = Field(..., min_length=3, max_length=100) password: str = Field(..., min_length=1) + totp_code: Optional[str] = Field(None, min_length=6, max_length=8) # 6 digits or 8-char backup code + + +class Verify2FARequest(BaseModel): + """2FA verification request schema.""" + + temp_token: str + code: str = Field(..., min_length=6, max_length=8) class RegisterRequest(BaseModel): diff --git a/backend/app/schemas/file.py b/backend/app/schemas/file.py new file mode 100644 index 0000000..ce70e01 --- /dev/null +++ b/backend/app/schemas/file.py @@ -0,0 +1,87 @@ +"""File storage schemas.""" + +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel, Field + + +class FileBase(BaseModel): + """Base file schema.""" + description: Optional[str] = None + tags: Optional[List[str]] = None + is_public: bool = False + + +class FileCreate(FileBase): + """Schema for file upload metadata.""" + pass + + +class FileUpdate(BaseModel): + """Schema for updating file metadata.""" + description: Optional[str] = None + tags: Optional[List[str]] = None + is_public: Optional[bool] = None + + +class StoredFile(FileBase): + """File response schema.""" + id: str + original_filename: str + content_type: Optional[str] = None + size_bytes: int + storage_type: str + uploaded_by: Optional[str] = None + file_hash: Optional[str] = None + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class FileUploadResponse(BaseModel): + """Response after successful file upload.""" + id: str + original_filename: str + content_type: Optional[str] = None + size_bytes: int + download_url: str + + +class FileListResponse(BaseModel): + """Response for file listing.""" + files: List[StoredFile] + total: int + page: int + page_size: int + + +# Allowed file types for upload +ALLOWED_CONTENT_TYPES = [ + # Images + "image/jpeg", + "image/png", + "image/gif", + "image/webp", + "image/svg+xml", + # Documents + "application/pdf", + "application/msword", + "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "application/vnd.ms-excel", + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + "text/plain", + "text/csv", + # Archives + "application/zip", + "application/x-tar", + "application/gzip", + # JSON/XML + "application/json", + "application/xml", + "text/xml", +] + +# Maximum file size (10 MB by default) +MAX_FILE_SIZE = 10 * 1024 * 1024 diff --git a/backend/app/schemas/notification.py b/backend/app/schemas/notification.py new file mode 100644 index 0000000..57f44d4 --- /dev/null +++ b/backend/app/schemas/notification.py @@ -0,0 +1,75 @@ +"""Pydantic schemas for Notification API requests/responses.""" + +from datetime import datetime +from typing import Optional, List, Any +from pydantic import BaseModel, Field + + +class NotificationBase(BaseModel): + """Base notification schema.""" + title: str = Field(..., min_length=1, max_length=200) + message: Optional[str] = None + type: str = Field(default="info", pattern="^(info|success|warning|error|system)$") + link: Optional[str] = Field(None, max_length=500) + extra_data: Optional[dict] = None + + +class NotificationCreate(NotificationBase): + """Schema for creating a notification.""" + user_id: str # Required for system/admin notifications + + +class NotificationCreateForUser(NotificationBase): + """Schema for creating a notification for a specific user (admin use).""" + pass + + +class Notification(BaseModel): + """Schema for notification response.""" + id: str + user_id: str + title: str + message: Optional[str] = None + type: str + link: Optional[str] = None + extra_data: Optional[dict] = None + is_read: bool + created_at: datetime + read_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class NotificationList(BaseModel): + """Schema for paginated notification list.""" + items: List[Notification] + total: int + unread_count: int + + +class NotificationStats(BaseModel): + """Schema for notification statistics.""" + total: int + unread: int + by_type: dict[str, int] + + +class NotificationBulkAction(BaseModel): + """Schema for bulk notification actions.""" + notification_ids: List[str] + + +class NotificationPreferences(BaseModel): + """Schema for user notification preferences.""" + email_notifications: bool = True + push_notifications: bool = True + notification_types: dict[str, bool] = Field( + default_factory=lambda: { + "info": True, + "success": True, + "warning": True, + "error": True, + "system": True + } + ) diff --git a/backend/app/schemas/session.py b/backend/app/schemas/session.py new file mode 100644 index 0000000..1a7f06b --- /dev/null +++ b/backend/app/schemas/session.py @@ -0,0 +1,55 @@ +"""Pydantic schemas for User Session API requests/responses.""" + +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel, Field + + +class SessionBase(BaseModel): + """Base session schema.""" + device_name: Optional[str] = Field(None, max_length=200) + device_type: Optional[str] = Field(None, max_length=50) + browser: Optional[str] = Field(None, max_length=100) + os: Optional[str] = Field(None, max_length=100) + ip_address: Optional[str] = Field(None, max_length=45) + location: Optional[str] = Field(None, max_length=200) + + +class SessionCreate(SessionBase): + """Schema for creating a session.""" + user_id: str + token_hash: str + user_agent: Optional[str] = None + expires_at: Optional[datetime] = None + + +class Session(BaseModel): + """Schema for session response.""" + id: str + user_id: str + device_name: Optional[str] = None + device_type: Optional[str] = None + browser: Optional[str] = None + os: Optional[str] = None + ip_address: Optional[str] = None + location: Optional[str] = None + is_active: bool + is_current: bool + created_at: datetime + last_active_at: Optional[datetime] = None + expires_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class SessionList(BaseModel): + """Schema for session list.""" + items: List[Session] + total: int + active_count: int + + +class SessionRevokeRequest(BaseModel): + """Schema for revoking sessions.""" + session_ids: List[str] diff --git a/backend/app/schemas/webhook.py b/backend/app/schemas/webhook.py new file mode 100644 index 0000000..363f9f6 --- /dev/null +++ b/backend/app/schemas/webhook.py @@ -0,0 +1,98 @@ +"""Webhook schemas.""" + +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel, HttpUrl, Field + + +# Webhook Base +class WebhookBase(BaseModel): + """Base webhook schema.""" + name: str = Field(..., min_length=1, max_length=100) + url: str = Field(..., min_length=1, max_length=500) + events: List[str] = Field(default=["*"]) + is_active: bool = True + retry_count: int = Field(default=3, ge=0, le=10) + timeout_seconds: int = Field(default=30, ge=5, le=120) + + +class WebhookCreate(WebhookBase): + """Schema for creating a webhook.""" + pass + + +class WebhookUpdate(BaseModel): + """Schema for updating a webhook.""" + name: Optional[str] = Field(None, min_length=1, max_length=100) + url: Optional[str] = Field(None, min_length=1, max_length=500) + events: Optional[List[str]] = None + is_active: Optional[bool] = None + retry_count: Optional[int] = Field(None, ge=0, le=10) + timeout_seconds: Optional[int] = Field(None, ge=5, le=120) + + +class Webhook(WebhookBase): + """Webhook response schema.""" + id: str + secret: Optional[str] = None + created_by: Optional[str] = None + created_at: datetime + updated_at: datetime + last_triggered_at: Optional[datetime] = None + success_count: int + failure_count: int + + class Config: + from_attributes = True + + +class WebhookWithSecret(Webhook): + """Webhook response with secret (for creation).""" + secret: str + + +# Webhook Delivery +class WebhookDeliveryBase(BaseModel): + """Base webhook delivery schema.""" + webhook_id: str + event_type: str + payload: str + + +class WebhookDelivery(WebhookDeliveryBase): + """Webhook delivery response schema.""" + id: str + status: str + status_code: Optional[int] = None + response_body: Optional[str] = None + error_message: Optional[str] = None + attempt_count: int + next_retry_at: Optional[datetime] = None + created_at: datetime + delivered_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +# Event types +WEBHOOK_EVENTS = [ + "user.created", + "user.updated", + "user.deleted", + "user.login", + "user.logout", + "user.password_changed", + "user.2fa_enabled", + "user.2fa_disabled", + "settings.updated", + "api_key.created", + "api_key.revoked", + "*", # All events +] + + +class WebhookTest(BaseModel): + """Schema for testing a webhook.""" + event_type: str = "test.ping" + payload: Optional[dict] = None diff --git a/backend/migrate_db.py b/backend/migrate_db.py new file mode 100644 index 0000000..a8b1165 --- /dev/null +++ b/backend/migrate_db.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python3 +""" +Database migration script - Adds missing columns for new features. +Run this script to update an existing database without losing data. + +Usage: + python migrate_db.py [database_path] + +If no path provided, uses the default /config/config.db +""" + +import sqlite3 +import sys +from pathlib import Path + + +def get_existing_columns(cursor, table_name): + """Get list of existing column names for a table.""" + cursor.execute(f"PRAGMA table_info({table_name})") + return {row[1] for row in cursor.fetchall()} + + +def get_existing_tables(cursor): + """Get list of existing tables.""" + cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") + return {row[0] for row in cursor.fetchall()} + + +def migrate(db_path): + """Run migrations.""" + print(f"Migrating database: {db_path}") + + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + existing_tables = get_existing_tables(cursor) + print(f"Existing tables: {existing_tables}") + + # ==================== + # USERS TABLE MIGRATIONS + # ==================== + if "users" in existing_tables: + existing_cols = get_existing_columns(cursor, "users") + print(f"Users columns: {existing_cols}") + + # Add 2FA columns + if "totp_secret" not in existing_cols: + print(" Adding totp_secret column...") + cursor.execute("ALTER TABLE users ADD COLUMN totp_secret VARCHAR(32)") + + if "totp_enabled" not in existing_cols: + print(" Adding totp_enabled column...") + cursor.execute("ALTER TABLE users ADD COLUMN totp_enabled BOOLEAN DEFAULT 0") + + if "totp_backup_codes" not in existing_cols: + print(" Adding totp_backup_codes column...") + cursor.execute("ALTER TABLE users ADD COLUMN totp_backup_codes TEXT") + + # ==================== + # CREATE NEW TABLES IF MISSING + # ==================== + + # Audit Logs + if "audit_logs" not in existing_tables: + print("Creating audit_logs table...") + cursor.execute(""" + CREATE TABLE audit_logs ( + id VARCHAR(36) PRIMARY KEY, + user_id VARCHAR(36), + username VARCHAR(100), + action VARCHAR(50) NOT NULL, + resource_type VARCHAR(50), + resource_id VARCHAR(255), + details TEXT, + ip_address VARCHAR(45), + user_agent VARCHAR(500), + status VARCHAR(20) DEFAULT 'success', + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL + ) + """) + cursor.execute("CREATE INDEX ix_audit_logs_action ON audit_logs(action)") + cursor.execute("CREATE INDEX ix_audit_logs_resource_type ON audit_logs(resource_type)") + cursor.execute("CREATE INDEX ix_audit_logs_created_at ON audit_logs(created_at)") + + # API Keys + if "api_keys" not in existing_tables: + print("Creating api_keys table...") + cursor.execute(""" + CREATE TABLE api_keys ( + id VARCHAR(36) PRIMARY KEY, + user_id VARCHAR(36) NOT NULL, + name VARCHAR(100) NOT NULL, + key_hash VARCHAR(64) NOT NULL UNIQUE, + key_prefix VARCHAR(8) NOT NULL, + scopes TEXT, + is_active BOOLEAN DEFAULT 1, + expires_at DATETIME, + last_used_at DATETIME, + last_used_ip VARCHAR(45), + usage_count INTEGER DEFAULT 0, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ) + """) + cursor.execute("CREATE INDEX ix_api_keys_key_hash ON api_keys(key_hash)") + + # Notifications + if "notifications" not in existing_tables: + print("Creating notifications table...") + cursor.execute(""" + CREATE TABLE notifications ( + id VARCHAR(36) PRIMARY KEY, + user_id VARCHAR(36) NOT NULL, + title VARCHAR(200) NOT NULL, + message TEXT NOT NULL, + type VARCHAR(20) DEFAULT 'info', + link VARCHAR(500), + extra_data TEXT, + is_read BOOLEAN DEFAULT 0, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + read_at DATETIME, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ) + """) + cursor.execute("CREATE INDEX ix_notifications_user_id ON notifications(user_id)") + cursor.execute("CREATE INDEX ix_notifications_is_read ON notifications(is_read)") + + # User Sessions + if "user_sessions" not in existing_tables: + print("Creating user_sessions table...") + cursor.execute(""" + CREATE TABLE user_sessions ( + id VARCHAR(36) PRIMARY KEY, + user_id VARCHAR(36) NOT NULL, + token_hash VARCHAR(64) NOT NULL UNIQUE, + device_name VARCHAR(200), + device_type VARCHAR(50), + browser VARCHAR(100), + os VARCHAR(100), + user_agent VARCHAR(500), + ip_address VARCHAR(45), + location VARCHAR(200), + is_active BOOLEAN DEFAULT 1, + is_current BOOLEAN DEFAULT 0, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + last_active_at DATETIME DEFAULT CURRENT_TIMESTAMP, + expires_at DATETIME, + revoked_at DATETIME, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ) + """) + cursor.execute("CREATE INDEX ix_user_sessions_token_hash ON user_sessions(token_hash)") + cursor.execute("CREATE INDEX ix_user_sessions_user_id ON user_sessions(user_id)") + + # Webhooks + if "webhooks" not in existing_tables: + print("Creating webhooks table...") + cursor.execute(""" + CREATE TABLE webhooks ( + id VARCHAR(36) PRIMARY KEY, + name VARCHAR(100) NOT NULL, + url VARCHAR(500) NOT NULL, + secret VARCHAR(64), + events TEXT DEFAULT '["*"]', + is_active BOOLEAN DEFAULT 1, + retry_count INTEGER DEFAULT 3, + timeout_seconds INTEGER DEFAULT 30, + created_by VARCHAR(36), + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + last_triggered_at DATETIME, + success_count INTEGER DEFAULT 0, + failure_count INTEGER DEFAULT 0, + FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE SET NULL + ) + """) + + # Webhook Deliveries + if "webhook_deliveries" not in existing_tables: + print("Creating webhook_deliveries table...") + cursor.execute(""" + CREATE TABLE webhook_deliveries ( + id VARCHAR(36) PRIMARY KEY, + webhook_id VARCHAR(36) NOT NULL, + event_type VARCHAR(50) NOT NULL, + payload TEXT NOT NULL, + status VARCHAR(20) DEFAULT 'pending', + status_code INTEGER, + response_body TEXT, + error_message TEXT, + attempt_count INTEGER DEFAULT 0, + next_retry_at DATETIME, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + delivered_at DATETIME, + FOREIGN KEY (webhook_id) REFERENCES webhooks(id) ON DELETE CASCADE + ) + """) + + # Stored Files + if "stored_files" not in existing_tables: + print("Creating stored_files table...") + cursor.execute(""" + CREATE TABLE stored_files ( + id VARCHAR(36) PRIMARY KEY, + original_filename VARCHAR(255) NOT NULL, + content_type VARCHAR(100), + size_bytes BIGINT NOT NULL, + storage_path VARCHAR(500) NOT NULL, + storage_type VARCHAR(20) DEFAULT 'local', + description TEXT, + tags TEXT, + is_public BOOLEAN DEFAULT 0, + uploaded_by VARCHAR(36), + file_hash VARCHAR(64), + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_deleted BOOLEAN DEFAULT 0, + deleted_at DATETIME, + FOREIGN KEY (uploaded_by) REFERENCES users(id) ON DELETE SET NULL + ) + """) + cursor.execute("CREATE INDEX ix_stored_files_file_hash ON stored_files(file_hash)") + + conn.commit() + conn.close() + print("Migration completed successfully!") + + +if __name__ == "__main__": + # Default path for Docker container + default_path = "/config/config.db" + + if len(sys.argv) > 1: + db_path = sys.argv[1] + else: + # Check common locations + paths_to_try = [ + Path(default_path), + Path("./config.db"), + Path("./data/config.db"), + Path("../config/config.db"), + ] + + db_path = None + for p in paths_to_try: + if p.exists(): + db_path = str(p) + break + + if not db_path: + print(f"Database not found. Please provide path as argument.") + print(f"Usage: python migrate_db.py /path/to/config.db") + sys.exit(1) + + if not Path(db_path).exists(): + print(f"Error: Database file not found: {db_path}") + sys.exit(1) + + migrate(db_path) diff --git a/backend/requirements.txt b/backend/requirements.txt index f4f8418..81c53ad 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -30,6 +30,14 @@ python-engineio==4.9.0 # Utilities python-dotenv==1.0.0 click==8.1.7 +psutil==5.9.8 + +# 2FA / TOTP +pyotp==2.9.0 +qrcode[pil]==7.4.2 + +# Rate limiting +slowapi==0.1.9 # CORS fastapi-cors==0.0.6 diff --git a/docker-compose.yml b/docker-compose.yml index 3f2d538..a1afa87 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,7 +11,7 @@ services: ports: - "5174:8000" healthcheck: - test: [ "CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')" ] + test: [ "CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/v1/health')" ] interval: 30s timeout: 10s start_period: 40s diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 79c76a5..30e9d22 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,4 +1,4 @@ -import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom'; +import { BrowserRouter, Routes, Route, Navigate, useLocation } from 'react-router-dom'; import type { ReactElement } from 'react'; import { SiteConfigProvider } from './contexts/SiteConfigContext'; import { AuthProvider, useAuth } from './contexts/AuthContext'; @@ -7,17 +7,22 @@ import { ThemeProvider } from './contexts/ThemeContext'; import { SidebarProvider } from './contexts/SidebarContext'; import { ViewModeProvider } from './contexts/ViewModeContext'; import { ModulesProvider } from './contexts/ModulesContext'; +import { NotificationsProvider } from './contexts/NotificationsContext'; import MainLayout from './components/MainLayout'; import Login from './pages/Login'; import Dashboard from './pages/Dashboard'; import Feature1 from './pages/Feature1'; import Feature2 from './pages/Feature2'; import Feature3 from './pages/Feature3'; +import Notifications from './pages/Notifications'; +import APIKeys from './pages/APIKeys'; import AdminPanel from './pages/AdminPanel'; import Sources from './pages/admin/Sources'; import Features from './pages/admin/Features'; import Settings from './pages/Settings'; import ThemeSettings from './pages/admin/ThemeSettings'; +import Analytics from './pages/admin/Analytics'; +import AuditLogs from './pages/admin/AuditLogs'; import './App.css'; function PrivateRoute({ children }: { children: ReactElement }) { @@ -46,8 +51,13 @@ function AdminRoute({ children }: { children: ReactElement }) { function AppRoutes() { const { user, isLoading } = useAuth(); + const location = useLocation(); - if (isLoading) { + // Don't show loading screen on login page - it would unmount the Login component + // and lose the 2FA temp token state during the login flow + const isLoginPage = location.pathname === '/login'; + + if (isLoading && !isLoginPage) { return
Loading...
; } @@ -60,6 +70,8 @@ function AppRoutes() { } /> } /> } /> + } /> + } /> } /> } /> @@ -67,6 +79,8 @@ function AppRoutes() { } /> } /> } /> + } /> + } /> } /> @@ -84,9 +98,11 @@ function App() { - - - + + + + + diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts index e637051..642c863 100644 --- a/frontend/src/api/client.ts +++ b/frontend/src/api/client.ts @@ -31,6 +31,11 @@ export const authAPI = { return response.data; }, + verify2fa: async (data: { temp_token: string; code: string }): Promise => { + const response = await api.post('/auth/verify-2fa', data); + return response.data; + }, + register: async (data: RegisterRequest): Promise => { const response = await api.post('/auth/register', data); return response.data; @@ -40,6 +45,38 @@ export const authAPI = { const response = await api.get('/auth/me'); return response.data; }, + + logout: async (): Promise => { + await api.post('/auth/logout'); + }, +}; + +// 2FA / TOTP endpoints +export const twoFactorAPI = { + getStatus: async (): Promise<{ enabled: boolean; has_backup_codes: boolean }> => { + const response = await api.get<{ enabled: boolean; has_backup_codes: boolean }>('/2fa/status'); + return response.data; + }, + + setup: async (): Promise<{ secret: string; uri: string; qr_code: string }> => { + const response = await api.post<{ secret: string; uri: string; qr_code: string }>('/2fa/setup'); + return response.data; + }, + + verify: async (code: string): Promise<{ message: string; backup_codes: string[] }> => { + const response = await api.post<{ message: string; backup_codes: string[] }>('/2fa/verify', { code }); + return response.data; + }, + + disable: async (data: { password: string; code: string }): Promise<{ message: string }> => { + const response = await api.post<{ message: string }>('/2fa/disable', data); + return response.data; + }, + + regenerateBackupCodes: async (code: string): Promise<{ backup_codes: string[] }> => { + const response = await api.post<{ backup_codes: string[] }>('/2fa/regenerate-backup-codes', { code }); + return response.data; + }, }; // Settings endpoints @@ -64,8 +101,8 @@ export const settingsAPI = { return response.data; }, - updateModules: async (data: Record): Promise> => { - const response = await api.put>('/settings/modules', data); + updateModules: async (data: Record): Promise> => { + const response = await api.put>('/settings/modules', data); return response.data; }, @@ -128,4 +165,173 @@ export const usersAPI = { }, }; +// API Keys endpoints +export interface ApiKeyItem { + id: string; + user_id: string; + name: string; + key_prefix: string; + scopes: string[] | null; + is_active: boolean; + last_used_at: string | null; + last_used_ip: string | null; + usage_count: number; + expires_at: string | null; + created_at: string; + updated_at: string; +} + +export const apiKeysAPI = { + create: async (data: { name: string; scopes?: string[]; expires_at?: string | null }): Promise => { + const response = await api.post('/api-keys', data); + return response.data; + }, + + list: async (): Promise<{ items: ApiKeyItem[]; total: number }> => { + const response = await api.get<{ items: ApiKeyItem[]; total: number }>('/api-keys'); + return response.data; + }, + + revoke: async (keyId: string): Promise => { + const response = await api.post(`/api-keys/${keyId}/revoke`); + return response.data; + }, + + delete: async (keyId: string): Promise => { + await api.delete(`/api-keys/${keyId}`); + }, +}; + +// Sessions endpoints +export interface UserSession { + id: string; + user_id: string; + device_name: string | null; + device_type: string | null; + browser: string | null; + os: string | null; + ip_address: string | null; + location: string | null; + is_active: boolean; + is_current: boolean; + created_at: string; + last_active_at: string; + expires_at: string | null; +} + +export const sessionsAPI = { + list: async (): Promise<{ items: UserSession[]; total: number; active_count: number }> => { + const response = await api.get<{ items: UserSession[]; total: number; active_count: number }>('/sessions'); + return response.data; + }, + + revoke: async (sessionId: string): Promise => { + const response = await api.post(`/sessions/${sessionId}/revoke`); + return response.data; + }, + + revokeAllOther: async (): Promise<{ revoked: number }> => { + const response = await api.post<{ revoked: number }>('/sessions/revoke-all'); + return response.data; + }, +}; + +// Notifications endpoints +export interface NotificationItem { + id: string; + user_id: string; + title: string; + message: string | null; + type: 'info' | 'success' | 'warning' | 'error' | 'system'; + link: string | null; + metadata: Record | null; + is_read: boolean; + created_at: string; + read_at: string | null; +} + +export const notificationsAPI = { + list: async (params?: { skip?: number; limit?: number; unread_only?: boolean }): Promise<{ items: NotificationItem[]; total: number; unread_count: number }> => { + const response = await api.get<{ items: NotificationItem[]; total: number; unread_count: number }>('/notifications', { params }); + return response.data; + }, + + unreadCount: async (): Promise<{ unread_count: number }> => { + const response = await api.get<{ unread_count: number }>('/notifications/unread-count'); + return response.data; + }, + + markAsRead: async (notificationId: string): Promise => { + const response = await api.post(`/notifications/${notificationId}/read`); + return response.data; + }, + + markAllAsRead: async (): Promise<{ marked_as_read: number }> => { + const response = await api.post<{ marked_as_read: number }>('/notifications/read-all'); + return response.data; + }, + + delete: async (notificationId: string): Promise => { + await api.delete(`/notifications/${notificationId}`); + }, + + deleteAllRead: async (): Promise<{ deleted: number }> => { + const response = await api.delete<{ deleted: number }>('/notifications/read/all'); + return response.data; + }, +}; + +// Analytics endpoints (admin) +export type AnalyticsOverview = { + users: { total: number; active: number; new_today: number; new_this_week: number; new_this_month: number }; + sessions: { active: number }; + api_keys: { total: number; active: number }; + security: { logins_24h: number; failed_logins_24h: number }; + notifications: { unread_total: number }; + generated_at: string; +}; + +export const analyticsAPI = { + overview: async (): Promise => { + const response = await api.get('/analytics/overview'); + return response.data; + }, + + userActivity: async (days: number = 7): Promise<{ daily_stats: { date: string; active_users: number; new_users: number }[] }> => { + const response = await api.get<{ daily_stats: { date: string; active_users: number; new_users: number }[] }>('/analytics/users/activity', { + params: { days }, + }); + return response.data; + }, + + actionsBreakdown: async (hours: number = 24): Promise<{ period_hours: number; actions: { action: string; count: number }[] }> => { + const response = await api.get<{ period_hours: number; actions: { action: string; count: number }[] }>('/analytics/actions/breakdown', { + params: { hours }, + }); + return response.data; + }, +}; + +// Audit logs endpoints (admin) +export type AuditLogItem = { + id: string; + user_id: string | null; + username: string | null; + action: string; + resource_type: string | null; + resource_id: string | null; + details: string | null; + ip_address: string | null; + user_agent: string | null; + status: string; + created_at: string; +}; + +export const auditAPI = { + list: async (params?: Record): Promise<{ items: AuditLogItem[]; total: number; page: number; page_size: number; total_pages: number }> => { + const response = await api.get<{ items: AuditLogItem[]; total: number; page: number; page_size: number; total_pages: number }>('/audit', { params }); + return response.data; + }, +}; + export default api; diff --git a/frontend/src/components/Sidebar.tsx b/frontend/src/components/Sidebar.tsx index 4874d47..47e7b7d 100644 --- a/frontend/src/components/Sidebar.tsx +++ b/frontend/src/components/Sidebar.tsx @@ -7,6 +7,7 @@ import { useAuth } from '../contexts/AuthContext'; import { useModules } from '../contexts/ModulesContext'; import { useSiteConfig } from '../contexts/SiteConfigContext'; import { useTheme } from '../contexts/ThemeContext'; +import { useNotifications } from '../contexts/NotificationsContext'; import { appModules } from '../modules'; import UserMenu from './UserMenu'; import '../styles/Sidebar.css'; @@ -27,7 +28,8 @@ export default function Sidebar() { } = useSidebar(); const { viewMode, toggleViewMode, isUserModeEnabled } = useViewMode(); const { user } = useAuth(); - const { isModuleEnabled, isModuleEnabledForUser, moduleOrder, hasInitialized: modulesInitialized } = useModules(); + const { isModuleEnabled, isModuleEnabledForUser, moduleOrder, moduleStates, hasInitialized: modulesInitialized } = useModules(); + const { unreadCount } = useNotifications(); // When admin is in "user mode", show only user-permitted modules // Otherwise, show all globally enabled modules (admin view) @@ -38,6 +40,8 @@ export default function Sidebar() { .find((cat) => cat.id === 'main') ?.modules.filter((m) => { if (!m.enabled) return false; + // Dashboard is always shown + if (m.id === 'dashboard') return true; if (shouldUseUserPermissions) { return isModuleEnabledForUser(m.id, user?.permissions, user?.is_superuser || false); } @@ -45,7 +49,7 @@ export default function Sidebar() { }) || []); // Sort modules based on moduleOrder (dashboard always first, then ordered features) - const mainModules = [...mainModulesFiltered].sort((a, b) => { + const sortedModules = [...mainModulesFiltered].sort((a, b) => { // Dashboard always comes first if (a.id === 'dashboard') return -1; if (b.id === 'dashboard') return 1; @@ -63,6 +67,19 @@ export default function Sidebar() { return 0; }); + // Split modules by position (top = main nav, bottom = above footer) + const topModules = sortedModules.filter(m => { + if (m.id === 'dashboard') return true; // Dashboard always at top + const state = moduleStates[m.id as keyof typeof moduleStates]; + return !state || state.position === 'top'; + }); + + const bottomModules = sortedModules.filter(m => { + if (m.id === 'dashboard') return false; // Dashboard never at bottom + const state = moduleStates[m.id as keyof typeof moduleStates]; + return state && state.position === 'bottom'; + }); + const handleCollapseClick = () => { if (isMobileOpen) { closeMobileMenu(); @@ -222,7 +239,7 @@ export default function Sidebar() {
+ {bottomModules.map((module) => ( + `nav-item ${isActive ? 'active' : ''}`} + onClick={(e) => handleNavClick(e, module.path)} + onMouseEnter={(e) => handleItemMouseEnter(t.sidebar[module.id as keyof typeof t.sidebar], e)} + onMouseLeave={handleItemMouseLeave} + > + {module.icon} + {t.sidebar[module.id as keyof typeof t.sidebar]} + {module.id === 'notifications' && unreadCount > 0 && ( + + {unreadCount > 99 ? '99+' : unreadCount} + + )} + + ))} + {user?.is_superuser && isUserModeEnabled && ( +
+ vpn_key + {t.apiKeysPage.title} +
+
+ + +
+ {error &&
{error}
} + +
+

{t.apiKeysPage.createTitle}

+

{t.apiKeysPage.createDesc}

+ +
+ setName(e.target.value)} + placeholder={t.apiKeysPage.namePlaceholder} + disabled={busy} + /> + +
+ + {createdKey && ( +
+
+ {t.apiKeysPage.showOnce} + +
+ {createdKey} +
+ )} +
+ +
+

{t.apiKeysPage.listTitle}

+ + {loading ? ( +
{t.common.loading}
+ ) : items.length === 0 ? ( +
{t.apiKeysPage.empty}
+ ) : ( +
+ + + + + + + + + + + + + {items.map((k) => ( + + + + + + + + + ))} + +
{t.apiKeysPage.name}{t.apiKeysPage.prefix}{t.apiKeysPage.status}{t.apiKeysPage.lastUsed}{t.apiKeysPage.usage}{t.apiKeysPage.actions}
{k.name}{k.key_prefix} + + {k.is_active ? t.settings.enabled : t.settings.disabled} + + {k.last_used_at ? new Date(k.last_used_at).toLocaleString() : 'β€”'}{k.usage_count} + {k.is_active ? ( + + ) : ( + {t.apiKeysPage.revoked} + )} + +
+
+ )} +
+
+ + ); +} + diff --git a/frontend/src/pages/Login.tsx b/frontend/src/pages/Login.tsx index ddde431..1c3f0f8 100644 --- a/frontend/src/pages/Login.tsx +++ b/frontend/src/pages/Login.tsx @@ -12,14 +12,21 @@ export default function Login() { const [password, setPassword] = useState(''); const [email, setEmail] = useState(''); const [isRegister, setIsRegister] = useState(false); + const [tempToken, setTempToken] = useState(null); + const [twoFactorCode, setTwoFactorCode] = useState(''); const [error, setError] = useState(''); const [registrationEnabled, setRegistrationEnabled] = useState(null); - const { login, register } = useAuth(); + const { login, verify2fa, register } = useAuth(); const { t, language, setLanguage } = useTranslation(); const { theme, toggleTheme, showDarkModeLogin, showLanguageLogin, showDarkModeToggle, showLanguageToggle } = useTheme(); const { config } = useSiteConfig(); const navigate = useNavigate(); + // Debug: track tempToken state changes + useEffect(() => { + console.log('[Login] tempToken changed:', tempToken ? 'SET' : 'NULL'); + }, [tempToken]); + // Check if registration is enabled useEffect(() => { let isMounted = true; @@ -56,24 +63,47 @@ export default function Login() { try { if (isRegister) { await register(username, email, password); + navigate('/dashboard'); + return; + } + + if (tempToken) { + console.log('[Login] Verifying 2FA with tempToken'); + await verify2fa(tempToken, twoFactorCode); } else { - await login(username, password); + console.log('[Login] Calling login()'); + const result = await login(username, password); + console.log('[Login] Login result:', result); + if (result.requires_2fa) { + console.log('[Login] 2FA required, temp_token:', result.temp_token); + if (!result.temp_token) { + throw new Error('missing-temp-token'); + } + setTempToken(result.temp_token); + setTwoFactorCode(''); + console.log('[Login] tempToken state set, returning'); + return; + } } navigate('/dashboard'); } catch (err: any) { + if (err?.message === 'missing-temp-token') { + setError(t.auth.authenticationFailed); + return; + } + const detail = err.response?.data?.detail; if (Array.isArray(detail)) { - // Handle Pydantic validation errors - const messages = detail.map((e: any) => { - const field = e.loc[e.loc.length - 1]; - return `${field}: ${e.msg}`; - }).join('\n'); + const messages = detail + .map((e: any) => { + const field = e.loc[e.loc.length - 1]; + return `${field}: ${e.msg}`; + }) + .join('\n'); setError(messages); } else if (typeof detail === 'string') { - // Handle standard HTTP exceptions setError(detail); } else { - // Fallback setError(t.auth.authenticationFailed); } } @@ -101,6 +131,7 @@ export default function Login() { onChange={(e) => setUsername(e.target.value)} required minLength={3} + disabled={tempToken !== null} /> @@ -126,22 +157,55 @@ export default function Login() { onChange={(e) => setPassword(e.target.value)} required minLength={8} + disabled={tempToken !== null} /> + {tempToken && ( +
+ + setTwoFactorCode(e.target.value)} + required + minLength={6} + maxLength={8} + autoComplete="one-time-code" + /> +

{t.auth.twoFactorPrompt}

+
+ )} + {error &&
{error}
}
- {registrationEnabled === true && ( + {tempToken && ( + + )} + + {!tempToken && registrationEnabled === true && ( +
+ notifications + {t.notificationsPage.title} +
+
+ + +
+
+
+ {t.notificationsPage.unreadOnly} + +
+ +
+ + +
+
+ + {error &&
{error}
} + + {loading ? ( +
{t.common.loading}
+ ) : items.length === 0 ? ( +
{t.notificationsPage.empty}
+ ) : ( +
+ {items.map((n) => ( +
+
+
+
+ {n.type} + {n.title} +
+
{new Date(n.created_at).toLocaleString()}
+
+ {n.message &&
{n.message}
} +
+
+ {!n.is_read && ( + + )} + +
+
+ ))} +
+ )} +
+ + ); +} diff --git a/frontend/src/pages/Settings.tsx b/frontend/src/pages/Settings.tsx index 9cd3f36..7edc54b 100644 --- a/frontend/src/pages/Settings.tsx +++ b/frontend/src/pages/Settings.tsx @@ -1,11 +1,154 @@ +import { useEffect, useState } from 'react'; import { useTranslation } from '../contexts/LanguageContext'; import { useSidebar } from '../contexts/SidebarContext'; +import { sessionsAPI, twoFactorAPI } from '../api/client'; +import type { UserSession } from '../api/client'; import '../styles/SettingsPage.css'; export default function Settings() { const { t, language, setLanguage } = useTranslation(); const { toggleMobileMenu } = useSidebar(); + const [twoFactorStatus, setTwoFactorStatus] = useState<{ enabled: boolean; has_backup_codes: boolean } | null>(null); + const [twoFactorLoading, setTwoFactorLoading] = useState(true); + const [twoFactorBusy, setTwoFactorBusy] = useState(false); + const [twoFactorError, setTwoFactorError] = useState(''); + + const [setupData, setSetupData] = useState<{ secret: string; uri: string; qr_code: string } | null>(null); + const [verifyCode, setVerifyCode] = useState(''); + const [backupCodes, setBackupCodes] = useState(null); + + const [regenerateCode, setRegenerateCode] = useState(''); + const [disablePassword, setDisablePassword] = useState(''); + const [disableCode, setDisableCode] = useState(''); + + const [sessions, setSessions] = useState([]); + const [sessionsLoading, setSessionsLoading] = useState(true); + const [sessionsBusy, setSessionsBusy] = useState(false); + const [sessionsError, setSessionsError] = useState(''); + + const loadTwoFactorStatus = async () => { + try { + const status = await twoFactorAPI.getStatus(); + setTwoFactorStatus(status); + } catch (err: any) { + setTwoFactorError(err?.response?.data?.detail || t.common.error); + } finally { + setTwoFactorLoading(false); + } + }; + + useEffect(() => { + loadTwoFactorStatus(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const loadSessions = async () => { + setSessionsError(''); + setSessionsLoading(true); + try { + const data = await sessionsAPI.list(); + setSessions(data.items || []); + } catch (err: any) { + setSessionsError(err?.response?.data?.detail || t.common.error); + } finally { + setSessionsLoading(false); + } + }; + + useEffect(() => { + loadSessions(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const revokeSession = async (sessionId: string) => { + setSessionsError(''); + setSessionsBusy(true); + try { + await sessionsAPI.revoke(sessionId); + await loadSessions(); + } catch (err: any) { + setSessionsError(err?.response?.data?.detail || t.common.error); + } finally { + setSessionsBusy(false); + } + }; + + const revokeAllOtherSessions = async () => { + setSessionsError(''); + setSessionsBusy(true); + try { + await sessionsAPI.revokeAllOther(); + await loadSessions(); + } catch (err: any) { + setSessionsError(err?.response?.data?.detail || t.common.error); + } finally { + setSessionsBusy(false); + } + }; + + const startTwoFactorSetup = async () => { + setTwoFactorError(''); + setBackupCodes(null); + setTwoFactorBusy(true); + try { + const data = await twoFactorAPI.setup(); + setSetupData(data); + } catch (err: any) { + setTwoFactorError(err?.response?.data?.detail || t.common.error); + } finally { + setTwoFactorBusy(false); + } + }; + + const verifyAndEnableTwoFactor = async () => { + setTwoFactorError(''); + setTwoFactorBusy(true); + try { + const result = await twoFactorAPI.verify(verifyCode); + setBackupCodes(result.backup_codes || []); + setSetupData(null); + setVerifyCode(''); + await loadTwoFactorStatus(); + } catch (err: any) { + setTwoFactorError(err?.response?.data?.detail || t.common.error); + } finally { + setTwoFactorBusy(false); + } + }; + + const regenerateTwoFactorBackupCodes = async () => { + setTwoFactorError(''); + setTwoFactorBusy(true); + try { + const result = await twoFactorAPI.regenerateBackupCodes(regenerateCode); + setBackupCodes(result.backup_codes || []); + setRegenerateCode(''); + await loadTwoFactorStatus(); + } catch (err: any) { + setTwoFactorError(err?.response?.data?.detail || t.common.error); + } finally { + setTwoFactorBusy(false); + } + }; + + const disableTwoFactor = async () => { + setTwoFactorError(''); + setTwoFactorBusy(true); + try { + await twoFactorAPI.disable({ password: disablePassword, code: disableCode }); + setDisablePassword(''); + setDisableCode(''); + setBackupCodes(null); + setSetupData(null); + await loadTwoFactorStatus(); + } catch (err: any) { + setTwoFactorError(err?.response?.data?.detail || t.common.error); + } finally { + setTwoFactorBusy(false); + } + }; + return (
@@ -21,32 +164,240 @@ export default function Settings() {
-
- -
-
-
-
- language -
-
-

{t.settings.language}

-

{t.settings.languageDesc}

-
+
+

{t.settings.preferences}

+
+
+
+ language
-
- +
+

{t.settings.language}

+

{t.settings.languageDesc}

+
+ +
+ +
+

{t.settings.security}

+ +
+
+
+ shield +
+
+

{t.settings.twoFactorTitle}

+

{t.settings.twoFactorDesc}

+
+
+
+ {twoFactorLoading ? ( + {t.common.loading} + ) : ( + + {twoFactorStatus?.enabled ? t.settings.enabled : t.settings.disabled} + + )} +
+
+ + {twoFactorError &&
{twoFactorError}
} + + {!twoFactorLoading && twoFactorStatus && ( +
+ {!twoFactorStatus.enabled && ( + <> + {!setupData ? ( + + ) : ( +
+
+ {t.settings.qrCodeAlt} +
+
+
+
{t.settings.secret}
+ {setupData.secret} +
+
+ + setVerifyCode(e.target.value)} + minLength={6} + maxLength={8} + autoComplete="one-time-code" + /> +
+
+ + +
+
+
+ )} + + )} + + {twoFactorStatus.enabled && ( +
+
+

{t.settings.backupCodes}

+

{t.settings.backupCodesDesc}

+
+ + setRegenerateCode(e.target.value)} + minLength={6} + maxLength={8} + autoComplete="one-time-code" + /> +
+ +
+ +
+

{t.settings.disable2fa}

+

{t.settings.disable2faDesc}

+
+ + setDisablePassword(e.target.value)} + autoComplete="current-password" + /> +
+
+ + setDisableCode(e.target.value)} + minLength={6} + maxLength={8} + autoComplete="one-time-code" + /> +
+ +
+
+ )} + + {backupCodes && backupCodes.length > 0 && ( +
+

{t.settings.backupCodes}

+

{t.settings.backupCodesSaveHint}

+
+ {backupCodes.map((code) => ( + {code} + ))} +
+
+ )} +
+ )} +
+ +
+

{t.settings.sessionsTitle}

+

{t.settings.sessionsDesc}

+ + {sessionsError &&
{sessionsError}
} + +
+ +
+ + {sessionsLoading ? ( +
{t.common.loading}
+ ) : sessions.length === 0 ? ( +
{t.settings.sessionsEmpty}
+ ) : ( +
+ {sessions.map((s) => ( +
+
+
+ {s.device_name || t.settings.unknownDevice} + {s.is_current && {t.settings.currentSession}} + {!s.is_active && {t.settings.inactiveSession}} +
+
+ {s.browser || t.settings.unknownBrowser} β€’ {s.os || t.settings.unknownOs} + {s.ip_address && β€’ {s.ip_address}} +
+
+ {t.settings.lastActive}: {new Date(s.last_active_at).toLocaleString()} +
+
+
+ {!s.is_current && s.is_active && ( + + )} +
+
+ ))} +
+ )} +
); diff --git a/frontend/src/pages/admin/Analytics.tsx b/frontend/src/pages/admin/Analytics.tsx new file mode 100644 index 0000000..5ed8d54 --- /dev/null +++ b/frontend/src/pages/admin/Analytics.tsx @@ -0,0 +1,155 @@ +import { useEffect, useMemo, useState } from 'react'; +import { useTranslation } from '../../contexts/LanguageContext'; +import { useSidebar } from '../../contexts/SidebarContext'; +import { analyticsAPI } from '../../api/client'; +import type { AnalyticsOverview } from '../../api/client'; +import '../../styles/AdminAnalytics.css'; + +export default function Analytics() { + const { t } = useTranslation(); + const { toggleMobileMenu } = useSidebar(); + + const [overview, setOverview] = useState(null); + const [dailyStats, setDailyStats] = useState<{ date: string; active_users: number; new_users: number }[]>([]); + const [actions, setActions] = useState<{ action: string; count: number }[]>([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + + useEffect(() => { + const load = async () => { + setLoading(true); + setError(''); + try { + const [o, usersActivity, breakdown] = await Promise.all([ + analyticsAPI.overview(), + analyticsAPI.userActivity(7), + analyticsAPI.actionsBreakdown(24), + ]); + setOverview(o); + setDailyStats(usersActivity.daily_stats || []); + setActions(breakdown.actions || []); + } catch (err: any) { + setError(err?.response?.data?.detail || t.common.error); + } finally { + setLoading(false); + } + }; + + load(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const maxActiveUsers = useMemo(() => Math.max(1, ...dailyStats.map((d) => d.active_users)), [dailyStats]); + const maxNewUsers = useMemo(() => Math.max(1, ...dailyStats.map((d) => d.new_users)), [dailyStats]); + const maxActionCount = useMemo(() => Math.max(1, ...actions.map((a) => a.count)), [actions]); + + return ( +
+
+
+ +
+ analytics + {t.analyticsPage.title} +
+
+
+ +
+ {error &&
{error}
} + + {loading || !overview ? ( +
{t.common.loading}
+ ) : ( + <> +
+
+
{t.analyticsPage.usersTotal}
+
{overview.users.total}
+
+ {t.analyticsPage.usersActive}: {overview.users.active} +
+
+
+
{t.analyticsPage.sessionsActive}
+
{overview.sessions.active}
+
+
+
{t.analyticsPage.logins24h}
+
{overview.security.logins_24h}
+
+ {t.analyticsPage.failedLogins24h}: {overview.security.failed_logins_24h} +
+
+
+
{t.analyticsPage.notificationsUnread}
+
{overview.notifications.unread_total}
+
+
+ +
+
+

{t.analyticsPage.userActivity7d}

+
+ {dailyStats.map((d) => ( +
+
{d.date}
+
+
+
+
+
+ {d.active_users} + {d.new_users} +
+
+ ))} +
+
+ {t.analyticsPage.usersActive} + {t.analyticsPage.usersNew} +
+
+ +
+

{t.analyticsPage.actions24h}

+
+ {actions.slice(0, 12).map((a) => ( +
+
{a.action}
+
+
+
+
+ {a.count} +
+
+ ))} +
+
+
+ +
+ {t.analyticsPage.generatedAt}: {new Date(overview.generated_at).toLocaleString()} +
+ + )} +
+
+ ); +} + diff --git a/frontend/src/pages/admin/AuditLogs.tsx b/frontend/src/pages/admin/AuditLogs.tsx new file mode 100644 index 0000000..b90cf12 --- /dev/null +++ b/frontend/src/pages/admin/AuditLogs.tsx @@ -0,0 +1,167 @@ +import { useEffect, useMemo, useState } from 'react'; +import { useTranslation } from '../../contexts/LanguageContext'; +import { useSidebar } from '../../contexts/SidebarContext'; +import { auditAPI } from '../../api/client'; +import type { AuditLogItem } from '../../api/client'; +import '../../styles/AdminAudit.css'; + +export default function AuditLogs() { + const { t } = useTranslation(); + const { toggleMobileMenu } = useSidebar(); + + const [items, setItems] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(''); + + const [page, setPage] = useState(1); + const pageSize = 50; + + const [username, setUsername] = useState(''); + const [action, setAction] = useState(''); + const [resourceType, setResourceType] = useState(''); + const [status, setStatus] = useState(''); + + const params = useMemo(() => { + const p: Record = { page, page_size: pageSize }; + if (username.trim()) p.username = username.trim(); + if (action.trim()) p.action = action.trim(); + if (resourceType.trim()) p.resource_type = resourceType.trim(); + if (status.trim()) p.status = status.trim(); + return p; + }, [page, pageSize, username, action, resourceType, status]); + + const load = async () => { + setLoading(true); + setError(''); + try { + const data = await auditAPI.list(params); + setItems(data.items || []); + } catch (err: any) { + setError(err?.response?.data?.detail || t.common.error); + } finally { + setLoading(false); + } + }; + + useEffect(() => { + load(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [params]); + + const resetFilters = () => { + setUsername(''); + setAction(''); + setResourceType(''); + setStatus(''); + setPage(1); + }; + + return ( +
+
+
+ +
+ history + {t.auditPage.title} +
+
+
+ +
+
+ { setUsername(e.target.value); setPage(1); }} + /> + { setAction(e.target.value); setPage(1); }} + /> + { setResourceType(e.target.value); setPage(1); }} + /> + + +
+ + {error &&
{error}
} + + {loading ? ( +
{t.common.loading}
+ ) : items.length === 0 ? ( +
{t.auditPage.empty}
+ ) : ( +
+ + + + + + + + + + + + + {items.map((log) => ( + + + + + + + + + ))} + +
{t.auditPage.time}{t.auditPage.user}{t.auditPage.action}{t.auditPage.resource}{t.auditPage.status}{t.auditPage.ip}
{new Date(log.created_at).toLocaleString()}{log.username || 'β€”'}{log.action}{log.resource_type || 'β€”'}{log.resource_id ? `:${log.resource_id}` : ''} + {log.status} + {log.ip_address || 'β€”'}
+
+ )} + +
+ + + {t.auditPage.page} {page} + + +
+
+
+ ); +} + diff --git a/frontend/src/pages/admin/Features.tsx b/frontend/src/pages/admin/Features.tsx index f0fc0ec..6e193c4 100644 --- a/frontend/src/pages/admin/Features.tsx +++ b/frontend/src/pages/admin/Features.tsx @@ -7,13 +7,13 @@ import type { ModuleId } from '../../contexts/ModulesContext'; import Feature1Tab from '../../components/admin/Feature1Tab'; import '../../styles/AdminPanel.css'; -type TabId = 'config' | 'feature1' | 'feature2' | 'feature3'; +type TabId = 'config' | 'feature1' | 'feature2' | 'feature3' | 'search' | 'notifications'; export default function Features() { const { user: currentUser } = useAuth(); const { t } = useTranslation(); const { toggleMobileMenu } = useSidebar(); - const { moduleStates, moduleOrder, setModuleEnabled, setModuleOrder, saveModulesToBackend, saveModuleOrder, hasInitialized, isLoading } = useModules(); + const { moduleStates, moduleOrder, setModuleEnabled, setModulePosition, setModuleOrder, saveModulesToBackend, saveModuleOrder, hasInitialized, isLoading } = useModules(); const [activeTab, setActiveTab] = useState('config'); const hasUserMadeChanges = useRef(false); const saveRef = useRef(saveModulesToBackend); @@ -63,47 +63,44 @@ export default function Features() { }; }, []); - const getModuleDescription = (moduleId: string): string => { - const key = `${moduleId}Desc` as keyof typeof t.admin; - return t.admin[key] || t.admin.moduleDefaultDesc; - }; - const renderModuleToggle = (moduleId: ModuleId) => { const state = moduleStates[moduleId]; const adminEnabled = state?.admin ?? true; const userEnabled = state?.user ?? true; return ( -
-
-

{getModuleDescription(moduleId)}

-
-
-
- {adminEnabled ? t.admin.active : t.admin.inactive} +
+
+
+

{t.featuresPage?.visibility || 'VisibilitΓ '}

-
- {t.admin.adminRole} - -
-
- {t.admin.userRole} - +
+
+ {adminEnabled ? t.admin.active : t.admin.inactive} +
+
+ {t.admin.adminRole} + +
+
+ {t.admin.userRole} + +
@@ -131,17 +128,35 @@ export default function Features() { e.dataTransfer.dropEffect = 'move'; }; - const handleDrop = (e: React.DragEvent, targetModuleId: string) => { + const handleDrop = (e: React.DragEvent, targetModuleId: string, targetSection: 'top' | 'bottom') => { e.preventDefault(); - if (!draggedItem || draggedItem === targetModuleId) return; + e.stopPropagation(); + if (!draggedItem) return; + const draggedPosition = moduleStates[draggedItem as ModuleId]?.position || 'top'; + + // If dropping on same item, just change section if different + if (draggedItem === targetModuleId) { + if (draggedPosition !== targetSection) { + hasUserMadeChanges.current = true; + setModulePosition(draggedItem as ModuleId, targetSection); + } + return; + } + + // Change position if moving to different section + if (draggedPosition !== targetSection) { + hasUserMadeChanges.current = true; + setModulePosition(draggedItem as ModuleId, targetSection); + } + + // Reorder within the list const newOrder = [...localOrder]; const draggedIndex = newOrder.indexOf(draggedItem); const targetIndex = newOrder.indexOf(targetModuleId); if (draggedIndex === -1 || targetIndex === -1) return; - // Remove dragged item and insert at target position newOrder.splice(draggedIndex, 1); newOrder.splice(targetIndex, 0, draggedItem); @@ -149,6 +164,19 @@ export default function Features() { setHasOrderChanges(true); }; + const handleSectionDrop = (e: React.DragEvent, section: 'top' | 'bottom') => { + e.preventDefault(); + if (!draggedItem) return; + + const draggedPosition = moduleStates[draggedItem as ModuleId]?.position || 'top'; + + // Change position if moving to different section + if (draggedPosition !== section) { + hasUserMadeChanges.current = true; + setModulePosition(draggedItem as ModuleId, section); + } + }; + const handleApplyOrder = async () => { try { setModuleOrder(localOrder); @@ -164,15 +192,30 @@ export default function Features() { return module || { id: moduleId, icon: 'extension', defaultEnabled: true }; }; + // Split modules by position for the config tab + const topOrderModules = localOrder.filter(id => { + const state = moduleStates[id as ModuleId]; + return !state || state.position === 'top'; + }); + + const bottomOrderModules = localOrder.filter(id => { + const state = moduleStates[id as ModuleId]; + return state && state.position === 'bottom'; + }); + const renderConfigTab = () => { return (
-

{t.featuresPage?.orderSection || 'Ordine nella Sidebar'}

+

{t.featuresPage?.topSection || 'Sezione Principale'}

-
- {localOrder.map((moduleId, index) => { +
handleSectionDrop(e, 'top')} + > + {topOrderModules.map((moduleId) => { const moduleInfo = getModuleInfo(moduleId); const moduleName = t.sidebar[moduleId as keyof typeof t.sidebar] || moduleId; return ( @@ -183,34 +226,76 @@ export default function Features() { onDragStart={(e) => handleDragStart(e, moduleId)} onDragEnd={handleDragEnd} onDragOver={handleDragOver} - onDrop={(e) => handleDrop(e, moduleId)} + onDrop={(e) => handleDrop(e, moduleId, 'top')} >
- {index + 1} + {moduleInfo.icon}
{moduleName} {t.featuresPage?.orderDesc || 'Trascina per riordinare'}
-
- {moduleInfo.icon} -
drag_indicator
); })} + {topOrderModules.length === 0 && ( +
{t.featuresPage?.noModulesTop || 'Nessun modulo in questa sezione'}
+ )}
- {hasOrderChanges && ( -
- -
- )}
+ +
+
+

{t.featuresPage?.bottomSection || 'Sezione Inferiore'}

+
+
handleSectionDrop(e, 'bottom')} + > + {bottomOrderModules.map((moduleId) => { + const moduleInfo = getModuleInfo(moduleId); + const moduleName = t.sidebar[moduleId as keyof typeof t.sidebar] || moduleId; + return ( +
handleDragStart(e, moduleId)} + onDragEnd={handleDragEnd} + onDragOver={handleDragOver} + onDrop={(e) => handleDrop(e, moduleId, 'bottom')} + > +
+ {moduleInfo.icon} +
+
+ {moduleName} + {t.featuresPage?.orderDesc || 'Trascina per riordinare'} +
+
+ drag_indicator +
+
+ ); + })} + {bottomOrderModules.length === 0 && ( +
{t.featuresPage?.noModulesBottom || 'Nessun modulo in questa sezione'}
+ )} +
+
+ + {hasOrderChanges && ( +
+ +
+ )}
); }; @@ -255,6 +340,32 @@ export default function Features() {
); + case 'search': + return ( + <> + {renderModuleToggle('search')} +
+
+ search +
+

{t.sidebar.search}

+

{t.features.comingSoon}

+
+ + ); + case 'notifications': + return ( + <> + {renderModuleToggle('notifications')} +
+
+ notifications +
+

{t.sidebar.notifications}

+

{t.features.comingSoon}

+
+ + ); default: return null; } @@ -279,16 +390,15 @@ export default function Features() { tune {t.featuresPage?.configTab || 'Configurazione'} - {(localOrder.length > 0 ? localOrder : ['feature1', 'feature2', 'feature3']).map((moduleId) => { - const moduleInfo = getModuleInfo(moduleId); - const moduleName = t.sidebar[moduleId as keyof typeof t.sidebar] || moduleId; + {TOGGLEABLE_MODULES.map((module) => { + const moduleName = t.sidebar[module.id as keyof typeof t.sidebar] || module.id; return ( ); diff --git a/frontend/src/styles/APIKeys.css b/frontend/src/styles/APIKeys.css new file mode 100644 index 0000000..da38654 --- /dev/null +++ b/frontend/src/styles/APIKeys.css @@ -0,0 +1,154 @@ +.api-keys-root .page-content { + max-width: var(--container-lg); +} + +/* Section Layout - matches theme-section spacing */ +.api-keys-section { + margin-bottom: 3rem; +} + +.api-keys-section:last-child { + margin-bottom: 0; +} + +.api-keys-desc { + margin: 0.25rem 0 1rem; + color: var(--color-text-secondary); + font-size: 0.9rem; + line-height: 1.5; +} + +.api-keys-create-row { + display: flex; + gap: 0.75rem; + align-items: center; + flex-wrap: wrap; +} + +.api-keys-input { + flex: 1; + min-width: 240px; + height: var(--height-input); + padding: 0 0.875rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + background: var(--color-bg-elevated); + color: var(--color-text-primary); + font-size: var(--input-font-size); + transition: border-color var(--transition-base), box-shadow var(--transition-base), background-color var(--transition-base); +} + +.api-keys-input:focus { + outline: none; + border-color: rgba(var(--color-accent-rgb), 0.45); + box-shadow: var(--shadow-ring); +} + +.api-keys-created { + margin-top: 1rem; + padding: 0.9rem; + border: 1px solid rgba(var(--color-accent-rgb), 0.25); + border-radius: var(--radius-lg); + background: rgba(var(--color-accent-rgb), 0.06); +} + +.api-keys-created-header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 1rem; + margin-bottom: 0.75rem; +} + +.api-keys-created-key { + display: block; + padding: 0.75rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + background: var(--color-bg-elevated); + overflow-x: auto; + font-family: var(--font-mono); + font-size: 0.9rem; + line-height: 1.4; +} + +.api-keys-empty { + padding: 1.25rem; + border: 1px dashed var(--color-card-outline); + border-radius: var(--radius-lg); + color: var(--color-text-secondary); + text-align: center; + font-size: 0.9rem; + margin-top: 1rem; +} + +.api-keys-table-card { + margin-top: 1rem; + background: var(--color-bg-card); + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + box-shadow: var(--shadow-sm); + overflow: hidden; +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .api-keys-table-card { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.api-keys-table { + width: 100%; + border-collapse: collapse; +} + +.api-keys-table th, +.api-keys-table td { + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--color-card-outline); + text-align: left; + vertical-align: middle; + font-size: 0.9rem; +} + +.api-keys-table th { + color: var(--color-text-secondary); + font-size: 0.82rem; + font-weight: 700; + letter-spacing: 0.02em; + text-transform: uppercase; + background: var(--color-bg-elevated); +} + +.api-keys-table tbody tr:hover { + background: rgba(var(--color-accent-rgb), 0.05); +} + +.api-keys-table tr:last-child td { + border-bottom: none; +} + +.api-keys-actions { + display: inline-flex; + gap: 0.75rem; + align-items: center; +} + +.api-keys-muted { + color: var(--color-text-secondary); + font-size: 0.85rem; +} + +/* ========== DARK THEME + AUTO ACCENT OVERRIDES ========== */ + +/* Input focus */ +[data-theme='dark'][data-accent='auto'] .api-keys-input:focus { + border-color: rgba(229, 231, 235, 0.45); +} + +/* Created key box */ +[data-theme='dark'][data-accent='auto'] .api-keys-created { + border-color: rgba(229, 231, 235, 0.25); + background: rgba(229, 231, 235, 0.06); +} diff --git a/frontend/src/styles/AdminAnalytics.css b/frontend/src/styles/AdminAnalytics.css new file mode 100644 index 0000000..bdba368 --- /dev/null +++ b/frontend/src/styles/AdminAnalytics.css @@ -0,0 +1,196 @@ +.admin-analytics-root .page-content { + max-width: 1100px; +} + +.analytics-cards { + display: grid; + grid-template-columns: repeat(4, minmax(0, 1fr)); + gap: var(--section-gap-sm); + margin-bottom: var(--section-gap); +} + +.analytics-card { + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + padding: var(--space-5); + background: var(--color-bg-card); + box-shadow: var(--shadow-sm); + transition: box-shadow var(--transition-base), border-color var(--transition-base); +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .analytics-card { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.analytics-card:hover { + box-shadow: var(--shadow-md); + border-color: rgba(var(--color-accent-rgb), 0.22); +} + +.analytics-card-title { + color: var(--color-text-secondary); + font-size: var(--badge-font-size); + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.analytics-card-value { + font-size: var(--text-2xl); + font-weight: var(--weight-bold); + color: var(--color-text-primary); + margin-top: var(--space-1); + font-variant-numeric: tabular-nums; + line-height: var(--leading-tight); +} + +.analytics-card-sub { + margin-top: var(--space-2); + color: var(--color-text-secondary); + font-size: var(--text-base); +} + +.analytics-grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: var(--section-gap-sm); +} + +.analytics-panel { + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + padding: var(--space-5); + background: var(--color-bg-card); + box-shadow: var(--shadow-sm); + transition: box-shadow var(--transition-base), border-color var(--transition-base); +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .analytics-panel { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.analytics-panel:hover { + box-shadow: var(--shadow-md); + border-color: rgba(var(--color-accent-rgb), 0.18); +} + +.analytics-panel .section-title { + margin: 0 0 var(--space-3) 0; +} + +.mini-chart { + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.mini-chart-row { + display: grid; + grid-template-columns: 95px 1fr 64px; + gap: 0.75rem; + align-items: center; +} + +.mini-chart-label { + color: var(--color-text-secondary); + font-size: 0.82rem; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.mini-chart-bars { + display: flex; + flex-direction: column; + gap: 4px; +} + +.mini-bar { + height: 10px; + border-radius: 999px; +} + +.mini-bar.bar-accent { + background: rgba(var(--color-accent-rgb), 0.75); +} + +.mini-bar.bar-muted { + background: rgba(156, 163, 175, 0.6); +} + +.mini-chart-values { + display: flex; + justify-content: flex-end; + gap: 0.5rem; + color: var(--color-text-secondary); + font-size: 0.82rem; + font-variant-numeric: tabular-nums; +} + +.mini-chart-legend { + display: flex; + gap: 1rem; + margin-top: 0.75rem; + color: var(--color-text-secondary); + font-size: 0.85rem; +} + +.legend-item { + display: inline-flex; + align-items: center; + gap: 0.5rem; +} + +.legend-dot { + width: 10px; + height: 10px; + border-radius: 999px; +} + +.legend-dot.accent { + background: rgba(var(--color-accent-rgb), 0.75); +} + +.legend-dot.muted { + background: rgba(156, 163, 175, 0.6); +} + +.analytics-footnote { + margin-top: 1rem; + color: var(--color-text-secondary); + font-size: 0.85rem; +} + +@media (max-width: 1000px) { + .analytics-cards { + grid-template-columns: repeat(2, minmax(0, 1fr)); + } + + .analytics-grid { + grid-template-columns: 1fr; + } +} + +@media (max-width: 520px) { + .mini-chart-row { + grid-template-columns: 80px 1fr 56px; + } +} + +/* ========== DARK THEME + AUTO ACCENT OVERRIDES ========== */ + +/* Mini bar accent color */ +[data-theme='dark'][data-accent='auto'] .mini-bar.bar-accent { + background: rgba(229, 231, 235, 0.75); +} + +/* Legend dot accent */ +[data-theme='dark'][data-accent='auto'] .legend-dot.accent { + background: rgba(229, 231, 235, 0.75); +} diff --git a/frontend/src/styles/AdminAudit.css b/frontend/src/styles/AdminAudit.css new file mode 100644 index 0000000..1d4609b --- /dev/null +++ b/frontend/src/styles/AdminAudit.css @@ -0,0 +1,149 @@ +.admin-audit-root .page-content { + max-width: 1200px; +} + +.audit-filters { + display: flex; + gap: 0.75rem; + flex-wrap: wrap; + align-items: center; + margin-bottom: var(--section-gap); +} + +.audit-reset-btn { + margin-left: auto; + height: var(--height-input); + padding: 0 1rem; + background: var(--color-bg-card); + color: var(--color-text-primary); + border: 1px solid var(--color-border); + border-radius: var(--radius-md); + font-size: 0.85rem; + font-weight: 500; + cursor: pointer; + transition: background-color var(--transition-base), border-color var(--transition-base); +} + +.audit-reset-btn:hover:not(:disabled) { + background: var(--color-bg-hover); + border-color: var(--color-text-secondary); +} + +.audit-reset-btn:disabled { + opacity: 0.5; + cursor: not-allowed; +} + +.audit-input { + height: var(--height-input); + padding: 0 0.875rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + background: var(--color-bg-elevated); + color: var(--color-text-primary); + font-size: 0.85rem; + min-width: 160px; + transition: border-color var(--transition-base), box-shadow var(--transition-base), background-color var(--transition-base); +} + +.audit-input:focus { + outline: none; + border-color: rgba(var(--color-accent-rgb), 0.45); + box-shadow: var(--shadow-ring); +} + +.audit-table-card { + background: var(--color-bg-card); + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + box-shadow: var(--shadow-sm); + overflow: hidden; +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .audit-table-card { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.audit-table { + width: 100%; + border-collapse: collapse; +} + +.audit-table th, +.audit-table td { + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--color-card-outline); + text-align: left; + vertical-align: middle; + font-size: 0.9rem; +} + +.audit-table th { + color: var(--color-text-secondary); + font-size: 0.82rem; + font-weight: 700; + letter-spacing: 0.02em; + text-transform: uppercase; + background: var(--color-bg-elevated); +} + +.audit-table tbody tr:hover { + background: rgba(var(--color-accent-rgb), 0.05); +} + +.audit-table tr:last-child td { + border-bottom: none; +} + +.audit-table .mono { + font-size: 0.85rem; + font-variant-numeric: tabular-nums; + color: var(--color-text-secondary); +} + +.audit-empty { + padding: 2rem 1.25rem; + border: 1px dashed var(--color-card-outline); + border-radius: var(--radius-lg); + color: var(--color-text-secondary); + text-align: center; + font-size: 0.9rem; +} + +.audit-pagination { + display: flex; + align-items: center; + justify-content: center; + gap: 1.25rem; + margin-top: 1.25rem; + padding-top: 1rem; + border-top: 1px solid var(--color-card-outline); +} + +.audit-page-indicator { + color: var(--color-text-secondary); + font-size: 0.85rem; + font-weight: 500; +} + +/* ========== DARK THEME OVERRIDES ========== */ + +/* Reset button - light in dark mode */ +[data-theme='dark'] .audit-reset-btn { + background: #e2e8f0; + color: #1e293b; + border-color: #cbd5e1; +} + +[data-theme='dark'] .audit-reset-btn:hover:not(:disabled) { + background: #f1f5f9; + border-color: #94a3b8; +} + +/* Input focus - auto accent */ +[data-theme='dark'][data-accent='auto'] .audit-input:focus { + border-color: rgba(229, 231, 235, 0.45); +} diff --git a/frontend/src/styles/AdminPanel.css b/frontend/src/styles/AdminPanel.css index bbba2b5..18bd1cf 100644 --- a/frontend/src/styles/AdminPanel.css +++ b/frontend/src/styles/AdminPanel.css @@ -52,6 +52,18 @@ border: 1px solid rgba(var(--color-accent-rgb), 0.2); } +.badge-error { + background: rgba(239, 68, 68, 0.1); + color: #dc2626; + border: 1px solid rgba(239, 68, 68, 0.2); +} + +.badge-warning { + background: rgba(245, 158, 11, 0.1); + color: #d97706; + border: 1px solid rgba(245, 158, 11, 0.2); +} + /* Toolbar - single row with search left, badges+button right */ .admin-panel-root .users-toolbar, .users-root .users-toolbar { @@ -101,6 +113,123 @@ font-size: var(--icon-md); } +/* Link-style Button */ +.btn-link { + background: none; + border: none; + color: var(--color-accent); + cursor: pointer; + text-decoration: none; + font-size: 0.9rem; + font-weight: 600; + padding: 0.25rem 0.5rem; + border-radius: var(--radius-sm); + transition: all 0.2s ease; +} + +.btn-link:hover { + color: var(--color-accent-hover); + background: rgba(var(--color-accent-rgb), 0.08); +} + +.btn-link.danger { + color: var(--color-error); +} + +.btn-link.danger:hover { + color: #b91c1c; + background: rgba(239, 68, 68, 0.08); +} + +.btn-link:disabled { + opacity: 0.5; + cursor: not-allowed; +} + +/* Danger Button */ +.btn-danger { + display: inline-flex; + align-items: center; + gap: 0.5rem; + padding: var(--btn-padding-md); + background: linear-gradient(135deg, #ef4444 0%, #dc2626 100%); + color: white; + border: none; + border-radius: var(--radius-md); + font-weight: 600; + font-size: var(--btn-font-size); + cursor: pointer; + transition: transform var(--transition-base), box-shadow var(--transition-base), filter var(--transition-base); + box-shadow: var(--shadow-md); +} + +.btn-danger:hover { + filter: brightness(1.05); + transform: translateY(-1px); + box-shadow: var(--shadow-lg); +} + +.btn-danger:disabled { + opacity: 0.6; + cursor: not-allowed; + transform: none; +} + +/* Ghost Button */ +.btn-ghost { + display: inline-flex; + align-items: center; + gap: 0.5rem; + padding: var(--btn-padding-md); + background: transparent; + color: var(--color-text-primary); + border: 1px solid var(--color-border); + border-radius: var(--radius-md); + font-weight: 500; + font-size: var(--btn-font-size); + cursor: pointer; + transition: all 0.2s ease; +} + +.btn-ghost:hover { + background: var(--color-bg-elevated); + border-color: var(--color-accent); +} + +.btn-ghost.danger { + color: var(--color-error); + border-color: rgba(239, 68, 68, 0.3); +} + +.btn-ghost.danger:hover { + background: rgba(239, 68, 68, 0.08); + border-color: var(--color-error); +} + +.btn-ghost:disabled { + opacity: 0.5; + cursor: not-allowed; +} + +/* Error Message */ +.error-message { + background: rgba(239, 68, 68, 0.1); + color: var(--color-error); + padding: 0.75rem 1rem; + border-radius: var(--radius-md); + margin-bottom: 1rem; + border: 1px solid rgba(239, 68, 68, 0.25); + font-weight: 500; +} + +/* Loading State */ +.loading { + padding: 2rem 1.25rem; + color: var(--color-text-secondary); + text-align: center; + font-size: 0.95rem; +} + /* Small button variant */ .btn-sm { padding: var(--btn-padding-sm) !important; @@ -389,21 +518,26 @@ .admin-panel-root .users-table td, .users-root .users-table th, .users-root .users-table td { - padding: var(--table-cell-padding); + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--color-card-outline); text-align: left; vertical-align: middle; + font-size: 0.9rem; } -.admin-panel-root .users-table tbody tr:not(:last-child), -.users-root .users-table tbody tr:not(:last-child) { - border-bottom: 1px solid var(--color-border); +.admin-panel-root .users-table tr:last-child td, +.users-root .users-table tr:last-child td { + border-bottom: none; } .admin-panel-root .users-table th, .users-root .users-table th { font-weight: 700; + font-size: 0.82rem; color: var(--color-text-secondary); background: var(--color-bg-elevated); + text-transform: uppercase; + letter-spacing: 0.02em; white-space: nowrap; } @@ -436,7 +570,7 @@ .admin-panel-root .users-table tbody tr:hover, .users-root .users-table tbody tr:hover { - background: var(--color-bg-elevated); + background: rgba(var(--color-accent-rgb), 0.05); } .admin-panel-root .user-cell, @@ -1112,16 +1246,22 @@ border-collapse: collapse; } -.modern-table th { - padding: 1rem 1.5rem; +.modern-table th, +.modern-table td { + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--color-card-outline); text-align: left; - font-weight: 600; - font-size: 0.85rem; + vertical-align: middle; + font-size: 0.9rem; +} + +.modern-table th { + font-weight: 700; + font-size: 0.82rem; text-transform: uppercase; - letter-spacing: 0.5px; + letter-spacing: 0.02em; color: var(--color-text-secondary); background: var(--color-bg-elevated); - border-bottom: 2px solid var(--color-border); } .modern-table th.actions-col { @@ -1129,18 +1269,12 @@ text-align: center; } -.modern-table td { - padding: 1.25rem 1.5rem; - border-bottom: 1px solid var(--color-border); - vertical-align: middle; -} - .modern-table tbody tr:last-child td { border-bottom: none; } .modern-table tbody tr:hover { - background: var(--color-bg-elevated); + background: rgba(var(--color-accent-rgb), 0.05); } .modern-table .user-info { @@ -2134,43 +2268,12 @@ } } -/* Feature Header - Clean style for feature toggles */ -.feature-header { - display: flex; - align-items: flex-start; - justify-content: space-between; - gap: 2rem; - margin-bottom: 2rem; - padding-bottom: 1.5rem; - border-bottom: 1px solid var(--color-border); -} - -.feature-header-info { - display: flex; - flex-direction: column; - gap: 0.5rem; -} - -.feature-header-info h2 { - margin: 0; - font-size: 1.25rem; - font-weight: 600; - color: var(--color-text-primary); -} - -.feature-header-info p { - margin: 0; - color: var(--color-text-secondary); - font-size: 0.95rem; - line-height: 1.5; -} - -.feature-header-actions { +/* Feature Config Options */ +.feature-config-options { display: flex; align-items: center; gap: 1rem; - padding-top: 0.25rem; - /* Align with text top */ + flex-wrap: wrap; } /* Status badge in header */ @@ -2188,9 +2291,15 @@ } .feature-status-badge.active { - background: rgba(var(--color-accent-rgb), 0.1); - color: var(--color-accent); - border-color: rgba(var(--color-accent-rgb), 0.2); + background: rgba(5, 150, 105, 0.15); + color: #047857; + border-color: rgba(5, 150, 105, 0.3); +} + +[data-theme='dark'] .feature-status-badge.active { + background: rgba(16, 185, 129, 0.2); + color: #34d399; + border-color: rgba(16, 185, 129, 0.35); } .feature-status-badge::before { @@ -2227,38 +2336,10 @@ background: var(--color-bg-elevated); } -/* Feature Header Mobile - Stack description above toggles */ +/* Mobile styles */ @media (max-width: 768px) { - .feature-header { - flex-direction: column; - align-items: stretch; - gap: 1.25rem; - padding-bottom: 1.25rem; - margin-bottom: 1.5rem; - } - - .feature-header-info { - width: 100%; - text-align: center; - } - - .feature-header-info h2 { - font-size: 1.1rem; - } - - .feature-header-info p { - font-size: 0.9rem; - } - - .feature-header-actions { - width: 100%; - justify-content: center; - border-top: 1px solid var(--color-border); - padding-top: 1rem; - } - - .feature-header-actions .toggle-group { - justify-content: center; + .feature-config-options { + justify-content: flex-start; } /* Admin Tab Tooltip - Mobile Only */ @@ -2342,11 +2423,9 @@ color: #e5e7eb; } -/* Feature status badge active */ -[data-theme='dark'][data-accent='auto'] .feature-status-badge.active { - background: rgba(229, 231, 235, 0.1); - color: #e5e7eb; - border-color: rgba(229, 231, 235, 0.2); +/* Ghost button hover */ +[data-theme='dark'][data-accent='auto'] .btn-ghost:hover { + border-color: #e5e7eb; } /* Focus states */ @@ -2379,6 +2458,22 @@ background: #111827; } +/* Link button with auto accent */ +[data-theme='dark'][data-accent='auto'] .btn-link { + color: #e5e7eb; +} + +[data-theme='dark'][data-accent='auto'] .btn-link:hover { + color: #f3f4f6; + background: rgba(229, 231, 235, 0.12); +} + +/* Order card icon with auto accent */ +[data-theme='dark'][data-accent='auto'] .order-card-preview .material-symbols-outlined { + color: #e5e7eb; +} + + /* =========================================== ORDER CARDS - Feature Ordering (Theme Editor Style) =========================================== */ @@ -2436,9 +2531,8 @@ border: 1px solid var(--color-border); } -.order-card-number { - font-size: 1.1rem; - font-weight: 700; +.order-card-preview .material-symbols-outlined { + font-size: 20px; color: var(--color-accent); } @@ -2462,23 +2556,6 @@ color: var(--color-text-secondary); } -/* Order Card Icon */ -.order-card-icon { - flex-shrink: 0; - display: flex; - align-items: center; - justify-content: center; - width: 36px; - height: 36px; - background: rgba(var(--color-accent-rgb), 0.1); - border-radius: var(--radius-md); -} - -.order-card-icon .material-symbols-outlined { - font-size: 20px; - color: var(--color-accent); -} - /* Order Card Handle */ .order-card-handle { flex-shrink: 0; @@ -2503,6 +2580,48 @@ color: var(--color-text-secondary); } +/* Order Card Position Button */ +.order-card-position-btn { + flex-shrink: 0; + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + padding: 0; + background: var(--color-bg-elevated); + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + cursor: pointer; + transition: all 0.2s ease; +} + +.order-card-position-btn:hover { + background: rgba(var(--color-accent-rgb), 0.1); + border-color: rgba(var(--color-accent-rgb), 0.3); +} + +.order-card-position-btn .material-symbols-outlined { + font-size: 18px; + color: var(--color-text-secondary); + transition: color 0.2s ease; +} + +.order-card-position-btn:hover .material-symbols-outlined { + color: var(--color-accent); +} + +/* Order Empty State */ +.order-empty { + padding: 1.5rem; + text-align: center; + color: var(--color-text-muted); + font-size: 0.9rem; + border: 1px dashed var(--color-card-outline); + border-radius: var(--radius-lg); + background: rgba(var(--color-accent-rgb), 0.02); +} + /* Order Actions */ .order-actions { margin-top: 1.5rem; diff --git a/frontend/src/styles/Layout.css b/frontend/src/styles/Layout.css index 6941426..523912a 100644 --- a/frontend/src/styles/Layout.css +++ b/frontend/src/styles/Layout.css @@ -160,7 +160,7 @@ /* Standard Section Title */ .section-title { - margin: 0; + margin: 0 0 1rem 0; font-size: 0.8rem; font-weight: 600; text-transform: uppercase; @@ -203,7 +203,7 @@ } .mobile-menu-btn:hover { - background: rgba(var(--color-accent-rgb), 0.1); + background-color: rgba(var(--color-accent-rgb), 0.1); color: var(--color-accent); } @@ -211,6 +211,7 @@ font-size: var(--icon-lg); } + /* ========== ACTION BUTTONS IN SLIDER ========== */ /* Action buttons that appear in the slider (like Add User) */ @@ -286,9 +287,30 @@ margin-left: 0; } - /* Show mobile menu button */ + /* Show mobile menu button with logo */ .mobile-menu-btn { display: flex; + position: absolute; + left: 4px; + top: 50%; + transform: translateY(-50%); + z-index: 1; + background-image: url('/logo_black.svg'); + background-size: 28px 28px; + background-repeat: no-repeat; + background-position: center; + } + + .mobile-menu-btn .material-symbols-outlined { + display: none; + } + + [data-theme='dark'] .mobile-menu-btn { + background-image: url('/logo_white.svg'); + } + + .mobile-menu-btn:hover { + background-color: transparent; } .page-tabs-container, @@ -302,6 +324,7 @@ flex-wrap: wrap; justify-content: flex-start; gap: 8px; + position: relative; } .page-title-section, @@ -309,6 +332,7 @@ flex: 1; justify-content: flex-start; padding: 0.5rem 0.75rem; + padding-left: 48px; font-size: 1rem; } @@ -323,6 +347,35 @@ display: none; } + /* Hide title section when tabs are present on mobile */ + .page-tabs-slider:has(.page-tab-btn) .page-title-section, + .admin-tabs-slider:has(.admin-tab-btn) .admin-title-section { + display: none; + } + + /* Center title section absolutely when no tabs are present on mobile */ + .page-tabs-slider:not(:has(.page-tab-btn)), + .admin-tabs-slider:not(:has(.admin-tab-btn)) { + justify-content: center; + min-height: 48px; + } + + .page-tabs-slider:not(:has(.page-tab-btn)) .page-title-section, + .admin-tabs-slider:not(:has(.admin-tab-btn)) .admin-title-section { + position: absolute; + left: 50%; + top: 50%; + transform: translate(-50%, -50%); + padding: 0.5rem 0.75rem; + flex: none; + } + + /* Lighter icon color in dark theme when only title is shown */ + .page-tabs-slider:not(:has(.page-tab-btn)) .page-title-section .material-symbols-outlined, + .admin-tabs-slider:not(:has(.admin-tab-btn)) .admin-title-section .material-symbols-outlined { + color: var(--color-text-secondary); + } + /* Tabs on second row - full width */ .page-tab-btn, .admin-tab-btn { diff --git a/frontend/src/styles/Login.css b/frontend/src/styles/Login.css index 8e52f9d..b5e722a 100644 --- a/frontend/src/styles/Login.css +++ b/frontend/src/styles/Login.css @@ -198,7 +198,7 @@ transform: translateY(-1px); } -.error-message { +.login-container .error-message { background: rgba(245, 101, 101, 0.1); color: var(--color-error); padding: 0.75rem; diff --git a/frontend/src/styles/Notifications.css b/frontend/src/styles/Notifications.css new file mode 100644 index 0000000..4263c62 --- /dev/null +++ b/frontend/src/styles/Notifications.css @@ -0,0 +1,202 @@ +.notifications-root .page-content { + max-width: 900px; +} + +.notifications-toolbar { + display: flex; + align-items: center; + justify-content: space-between; + gap: var(--toolbar-gap); + margin-bottom: var(--section-gap); + flex-wrap: wrap; +} + +.notifications-toggle { + display: inline-flex; + align-items: center; + gap: var(--element-gap-lg); +} + +.notifications-toggle-label { + color: var(--color-text-secondary); + font-size: 0.85rem; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.notifications-actions { + display: inline-flex; + align-items: center; + gap: var(--element-gap-lg); +} + +.notifications-empty { + padding: 2rem 1.25rem; + border: 1px dashed var(--color-card-outline); + border-radius: var(--radius-lg); + color: var(--color-text-secondary); + text-align: center; + font-size: 0.9rem; + background: var(--color-bg-card); + box-shadow: var(--shadow-sm); +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .notifications-empty { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.notifications-list { + display: flex; + flex-direction: column; + gap: var(--space-3); +} + +.notification-item { + display: flex; + gap: 1rem; + align-items: flex-start; + justify-content: space-between; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + padding: var(--space-5); + background: var(--color-bg-card); + box-shadow: var(--shadow-sm); + transition: box-shadow var(--transition-base), border-color var(--transition-base); +} + +@supports (color: color-mix(in srgb, black, transparent)) { + .notification-item { + background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); + backdrop-filter: blur(14px) saturate(1.1); + } +} + +.notification-item:hover { + box-shadow: var(--shadow-md); + border-color: rgba(var(--color-accent-rgb), 0.18); +} + +.notification-item.unread { + border-color: rgba(var(--color-accent-rgb), 0.35); + box-shadow: var(--shadow-sm), 0 0 0 3px rgba(var(--color-accent-rgb), 0.08); +} + +.notification-main { + min-width: 0; + flex: 1; +} + +.notification-header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 1rem; +} + +.notification-title { + display: inline-flex; + align-items: center; + gap: 0.5rem; + font-weight: 600; + font-size: 0.9rem; + color: var(--color-text-primary); + min-width: 0; +} + +.notification-title span:last-child { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.notification-date { + color: var(--color-text-secondary); + font-size: 0.85rem; + flex: 0 0 auto; +} + +.notification-message { + margin-top: 0.5rem; + color: var(--color-text-secondary); + font-size: 0.9rem; + white-space: pre-wrap; + line-height: 1.5; +} + +.notification-actions { + display: inline-flex; + flex-direction: column; + gap: 0.5rem; + align-items: flex-end; + flex: 0 0 auto; +} + +.notification-type { + display: inline-flex; + align-items: center; + justify-content: center; + padding: 0.15rem 0.5rem; + font-size: 0.75rem; + font-weight: 700; + border-radius: 999px; + border: 1px solid var(--color-card-outline); + background: var(--color-bg-elevated); + color: var(--color-text-secondary); + text-transform: uppercase; +} + +.notification-type.type-success { + background: rgba(34, 197, 94, 0.1); + border-color: rgba(34, 197, 94, 0.25); + color: #16a34a; +} + +.notification-type.type-warning { + background: rgba(245, 158, 11, 0.12); + border-color: rgba(245, 158, 11, 0.3); + color: #b45309; +} + +.notification-type.type-error { + background: rgba(239, 68, 68, 0.1); + border-color: rgba(239, 68, 68, 0.25); + color: #dc2626; +} + +.notification-type.type-system { + background: rgba(var(--color-accent-rgb), 0.1); + border-color: rgba(var(--color-accent-rgb), 0.2); + color: var(--color-accent); +} + +@media (max-width: 768px) { + .notification-item { + flex-direction: column; + gap: 0.75rem; + } + + .notification-actions { + flex-direction: row; + width: 100%; + justify-content: flex-end; + } +} + +/* ========== DARK THEME + AUTO ACCENT OVERRIDES ========== */ + +/* Unread notification border */ +[data-theme='dark'][data-accent='auto'] .notification-item.unread { + border-color: rgba(229, 231, 235, 0.35); + box-shadow: var(--shadow-sm), 0 0 0 3px rgba(229, 231, 235, 0.08); +} + +/* System type badge */ +[data-theme='dark'][data-accent='auto'] .notification-type.type-system { + background: rgba(229, 231, 235, 0.1); + border-color: rgba(229, 231, 235, 0.2); + color: #e5e7eb; +} diff --git a/frontend/src/styles/SettingsPage.css b/frontend/src/styles/SettingsPage.css index 6d270fb..26a4f5b 100644 --- a/frontend/src/styles/SettingsPage.css +++ b/frontend/src/styles/SettingsPage.css @@ -17,27 +17,22 @@ max-width: 800px; } -/* Settings Sections */ -.settings-section-modern { - background: var(--color-bg-card); - border: 1px solid var(--color-card-outline); - border-radius: var(--radius-lg); - padding: 2rem; - box-shadow: var(--shadow-sm); - margin-bottom: 1.5rem; +/* Settings Sections - no card, just spacing */ +.settings-section { + margin-bottom: 3rem; } -@supports (color: color-mix(in srgb, black, transparent)) { - .settings-section-modern { - background: color-mix(in srgb, var(--color-bg-card) 88%, transparent); - backdrop-filter: blur(14px) saturate(1.1); - } -} - -.settings-section-modern:last-child { +.settings-section:last-child { margin-bottom: 0; } +.settings-section-desc { + margin: 0 0 1rem; + color: var(--color-text-secondary); + font-size: 0.9rem; + line-height: 1.5; +} + /* Section title uses standard .section-title from Layout.css */ .setting-item-modern { @@ -45,17 +40,14 @@ justify-content: space-between; align-items: center; gap: 2rem; - padding: 1.5rem 0; - border-bottom: 1px solid var(--color-card-outline); + padding: 1rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + background: var(--color-bg-card); } -.setting-item-modern:last-child { - border-bottom: none; - padding-bottom: 0; -} - -.setting-item-modern:first-child { - padding-top: 0; +.setting-item-modern + .setting-item-modern { + margin-top: 0.75rem; } .setting-info-modern { @@ -181,3 +173,213 @@ gap: 1rem; } } + +/* Security section helpers */ +.settings-security-details { + margin-top: 1.25rem; + display: flex; + flex-direction: column; + gap: 1rem; +} + +.settings-security-details > .btn-primary { + align-self: flex-start; + justify-content: center; + min-width: 140px; +} + +.settings-backup-codes-section { + margin-top: 1rem; + padding: 1rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + background: var(--color-bg-card); +} + +.settings-backup-codes-section h4 { + margin: 0 0 0.25rem; +} + +.settings-backup-codes-section p { + margin: 0 0 0.75rem; + color: var(--color-text-secondary); + font-size: 0.9rem; +} + +.settings-twofa-setup { + display: flex; + gap: 1.25rem; + align-items: flex-start; +} + +.settings-twofa-qr { + flex: 0 0 auto; + padding: 0.75rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + background: white; +} + +.settings-twofa-qr img { + width: 160px; + height: 160px; + display: block; +} + +.settings-twofa-meta { + flex: 1; + min-width: 0; +} + +.settings-twofa-secret { + display: flex; + flex-direction: column; + gap: 0.35rem; +} + +.settings-twofa-secret-label { + font-size: 0.85rem; + color: var(--color-text-secondary); +} + +.settings-twofa-secret-value { + display: inline-block; + padding: 0.5rem 0.75rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + background: var(--color-bg-card); + overflow-x: auto; +} + +.settings-twofa-actions { + display: flex; + gap: 0.75rem; + align-items: center; + margin-top: 0.75rem; + flex-wrap: wrap; +} + +.settings-twofa-actions-grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 1rem; +} + +.settings-twofa-action h4 { + margin: 0 0 0.25rem; +} + +.settings-twofa-action p { + margin: 0 0 0.75rem; + color: var(--color-text-secondary); + font-size: 0.9rem; + line-height: 1.5; +} + +.settings-twofa-action.danger { + border: 1px solid rgba(239, 68, 68, 0.25); + border-radius: var(--radius-lg); + padding: 1rem; + background: rgba(239, 68, 68, 0.04); +} + +.settings-backup-codes { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(140px, 1fr)); + gap: 0.5rem; + margin-top: 0.75rem; +} + +.settings-backup-code { + display: inline-block; + padding: 0.5rem 0.75rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-md); + background: var(--color-bg-card); + text-align: center; +} + +.settings-sessions-header { + display: flex; + align-items: center; + justify-content: flex-start; + gap: 1rem; + margin-bottom: 1rem; +} + +.settings-sessions-list { + display: flex; + flex-direction: column; + gap: 0.75rem; + margin-top: 0.75rem; +} + +.settings-session-row { + display: flex; + align-items: flex-start; + justify-content: space-between; + gap: 1rem; + padding: 0.85rem; + border: 1px solid var(--color-card-outline); + border-radius: var(--radius-lg); + background: var(--color-bg-card); + box-shadow: var(--shadow-sm); +} + +.settings-session-meta { + min-width: 0; + flex: 1; +} + +.settings-session-title { + display: flex; + align-items: center; + gap: 0.5rem; + flex-wrap: wrap; +} + +.settings-session-device { + font-weight: 650; + color: var(--color-text-primary); +} + +.settings-session-details { + margin-top: 0.25rem; + color: var(--color-text-secondary); + font-size: 0.85rem; + line-height: 1.4; +} + +.settings-session-actions { + flex: 0 0 auto; +} + +.settings-empty { + margin-top: 0.75rem; + padding: 0.85rem; + border: 1px dashed var(--color-card-outline); + border-radius: var(--radius-lg); + color: var(--color-text-secondary); + text-align: center; +} + +@media (max-width: 768px) { + .settings-twofa-setup { + flex-direction: column; + align-items: stretch; + } + + .settings-twofa-actions-grid { + grid-template-columns: 1fr; + } + + .settings-session-row { + flex-direction: column; + } + + .settings-session-actions { + width: 100%; + display: flex; + justify-content: flex-end; + } +} diff --git a/frontend/src/styles/Sidebar.css b/frontend/src/styles/Sidebar.css index e9529ee..9fedca2 100644 --- a/frontend/src/styles/Sidebar.css +++ b/frontend/src/styles/Sidebar.css @@ -139,7 +139,7 @@ .sidebar.dynamic.collapsed.expanded-force .view-mode-toggle { justify-content: flex-start; - padding: 0.75rem; + padding: 0.75rem 1rem; gap: 0.75rem; } @@ -191,7 +191,7 @@ } .sidebar.dynamic.collapsed .view-mode-toggle { - padding: 0.75rem; + padding: 0.75rem 0.5rem; } /* Re-enable transitions ONLY on hover or when forced expanded */ @@ -356,6 +356,15 @@ /* Taller touch target */ } +button.nav-item { + width: 100%; + background: none; + border: none; + cursor: pointer; + text-align: left; + font: inherit; +} + .sidebar.collapsed .nav-item { justify-content: center; padding: 0.75rem 0.5rem; @@ -422,21 +431,50 @@ transition: opacity 0.3s ease, width 0.3s ease; } +.nav-badge { + margin-left: auto; + display: inline-flex; + align-items: center; + justify-content: center; + min-width: 22px; + height: 22px; + padding: 0 6px; + border-radius: 999px; + font-size: 0.75rem; + font-weight: 700; + background: rgba(239, 68, 68, 0.18); + color: #ef4444; + border: 1px solid rgba(239, 68, 68, 0.28); +} + +.sidebar.collapsed .nav-badge { + position: absolute; + top: 6px; + right: 10px; + margin-left: 0; + min-width: 18px; + height: 18px; + padding: 0 4px; + font-size: 0.7rem; +} + +.sidebar-bottom-actions { + padding: 0 0.75rem; +} + .sidebar-footer { - padding: 0.75rem; + padding: 0.5rem 0.75rem 0.75rem; border-top: 1px solid var(--color-sidebar-border); flex-shrink: 0; display: flex; flex-direction: column; gap: 0; - margin-top: auto; - /* Push to bottom */ margin-left: 0; margin-right: 0; margin-bottom: 0; } -.sidebar-footer>*:not(:last-child) { +.sidebar-footer>*:not(:last-child):not(.nav-item) { margin-bottom: 0.15rem; } @@ -445,7 +483,8 @@ display: flex; align-items: center; gap: 0.75rem; - padding: 0.75rem; + padding: 0.75rem 1rem; + margin: 0.25rem 0; background: transparent; border: 1px solid transparent; border-radius: var(--radius-md); @@ -486,7 +525,7 @@ .sidebar.collapsed .view-mode-toggle { justify-content: center; - padding: 0.75rem; + padding: 0.75rem 0.5rem; gap: 0; /* Ensure no gap affects centering */ } @@ -667,6 +706,8 @@ .sidebar.collapsed .view-mode-toggle { justify-content: flex-start; + padding: 0.75rem 1rem; + gap: 0.75rem; } .sidebar.collapsed .user-info-compact { @@ -1050,16 +1091,58 @@ } } -/* ========== DARK THEME + AUTO ACCENT OVERRIDES ========== */ +/* ========== AUTO ACCENT OVERRIDES ========== */ -/* Nav items with auto accent in dark mode: use off-white background with dark text */ -[data-theme='dark'][data-accent='auto'] .nav-item.active { +/* + * Sidebar has a dark background in BOTH light and dark themes. + * With auto accent color, we need high-contrast active states. + * Use off-white background with dark text for visibility. + */ + +/* Nav items with auto accent: use off-white background with dark text */ +[data-accent='auto'] .nav-item.active { + background: #f3f4f6; + color: #111827 !important; + box-shadow: 0 0 16px 2px rgba(243, 244, 246, 0.5); +} + +[data-accent='auto'] .nav-item.active .nav-icon, +[data-accent='auto'] .nav-item.active .nav-label { + color: #111827 !important; +} + +/* Active indicator bar */ +[data-accent='auto'] .nav-item.active::before { + background: #111827; +} + +/* Hover on active item - slightly darker */ +[data-accent='auto'] .nav-item.active:hover { background: #e5e7eb; - color: #111827 !important; - box-shadow: 0 0 20px 2px rgba(229, 231, 235, 0.4); } -[data-theme='dark'][data-accent='auto'] .nav-item.active .nav-icon, -[data-theme='dark'][data-accent='auto'] .nav-item.active .nav-label { - color: #111827 !important; +/* Hover states for non-active items */ +[data-accent='auto'] .nav-item:not(.active):hover { + background: rgba(229, 231, 235, 0.15); +} + +/* View mode toggle with auto accent */ +[data-accent='auto'] .view-mode-toggle:hover { + background: rgba(229, 231, 235, 0.12); + border-color: rgba(229, 231, 235, 0.2); +} + +[data-accent='auto'] .view-mode-toggle.user-mode { + background: rgba(229, 231, 235, 0.18); + border-color: rgba(229, 231, 235, 0.25); +} + +[data-accent='auto'] .view-mode-toggle.user-mode:hover { + background: rgba(229, 231, 235, 0.22); + border-color: rgba(229, 231, 235, 0.3); +} + +/* User initial badge with auto accent */ +[data-accent='auto'] .user-initial { + background: rgba(229, 231, 235, 0.25); } diff --git a/frontend/src/styles/ThemeSettings.css b/frontend/src/styles/ThemeSettings.css index fe2d86c..ffbd927 100644 --- a/frontend/src/styles/ThemeSettings.css +++ b/frontend/src/styles/ThemeSettings.css @@ -544,7 +544,7 @@ } /* Badge Styles */ -.badge { +.theme-settings-root .badge { padding: 0.35rem 0.75rem; border-radius: var(--radius-md); font-size: 0.8rem; @@ -552,12 +552,13 @@ white-space: nowrap; } -.badge-accent { +.theme-settings-root .badge-accent { background: var(--color-accent); color: white; + border: none; } -.badge-success { +.theme-settings-root .badge-success { background: rgba(34, 197, 94, 0.15); color: #16a34a; border: 1px solid rgba(34, 197, 94, 0.3); diff --git a/frontend/src/styles/Users.css b/frontend/src/styles/Users.css index 86a2d30..34681d9 100644 --- a/frontend/src/styles/Users.css +++ b/frontend/src/styles/Users.css @@ -91,22 +91,24 @@ .users-root .users-table th, .users-root .users-table td { - padding: var(--table-cell-padding); + padding: 0.75rem 1rem; + border-bottom: 1px solid var(--color-card-outline); text-align: left; vertical-align: middle; + font-size: 0.9rem; } -.users-root .users-table tbody tr:not(:last-child) { - border-bottom: 1px solid var(--color-card-outline); +.users-root .users-table tr:last-child td { + border-bottom: none; } .users-root .users-table th { - font-weight: 600; - font-size: 0.85rem; + font-weight: 700; + font-size: 0.82rem; color: var(--color-text-secondary); background: var(--color-bg-elevated); text-transform: uppercase; - letter-spacing: 0.03em; + letter-spacing: 0.02em; } .users-root .users-table tbody tr:hover { diff --git a/frontend/src/styles/theme/index.css b/frontend/src/styles/theme/index.css index 09e0d0a..efef570 100644 --- a/frontend/src/styles/theme/index.css +++ b/frontend/src/styles/theme/index.css @@ -46,6 +46,12 @@ body { cursor: text; } +/* Utility for monospace text (IDs, keys, timestamps) */ +.mono { + font-family: var(--font-mono); + font-variant-numeric: tabular-nums; +} + #root { min-height: 100vh; width: 100%; diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts index cdcc0ae..19fcc9d 100644 --- a/frontend/src/types/index.ts +++ b/frontend/src/types/index.ts @@ -15,6 +15,7 @@ export interface User { export interface LoginRequest { username: string; password: string; + totp_code?: string; } export interface RegisterRequest { @@ -42,14 +43,17 @@ export interface UserUpdatePayload { } export interface Token { - access_token: string; + access_token: string | null; token_type: string; + requires_2fa?: boolean; + temp_token?: string | null; } export interface AuthContextType { user: User | null; token: string | null; - login: (username: string, password: string) => Promise; + login: (username: string, password: string) => Promise<{ requires_2fa: boolean; temp_token?: string | null }>; + verify2fa: (tempToken: string, code: string) => Promise; register: (username: string, email: string, password: string) => Promise; logout: () => void; isLoading: boolean;