commit 09359f30be6be6b4aa666e4d87737ccf39fc7888 Author: basilro Date: Sun Mar 1 07:44:19 2026 +0900 초기 커밋 diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..41b51ec --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,8 @@ +{ + "permissions": { + "allow": [ + "WebSearch", + "Bash(dir:*)" + ] + } +} diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..529b8e7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,17 @@ +__pycache__ +*.py[cod] +.venv +venv +.env +.git +.idea +.vscode +.mypy_cache +.ruff_cache +.pytest_cache +htmlcov +.coverage +*.log +data/ +tests/ +docs/ diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..2635566 --- /dev/null +++ b/.env.example @@ -0,0 +1,56 @@ +# ── Application ────────────────────────────────────── +APP_NAME=core-api +APP_ENV=development +DEBUG=true +SECRET_KEY=change-me-to-a-random-secret-key +API_V1_PREFIX=/api/v1 + +# ── MariaDB ───────────────────────────────────────── +MARIADB_HOST=127.0.0.1 +MARIADB_PORT=3306 +MARIADB_USER=root +MARIADB_PASSWORD=changeme +MARIADB_DATABASE=core_api + +# ── MongoDB ───────────────────────────────────────── +MONGODB_URL=mongodb://127.0.0.1:27017 +MONGODB_DATABASE=core_api + +# ── Redis ─────────────────────────────────────────── +REDIS_URL=redis://127.0.0.1:6379/0 + +# ── JWT ───────────────────────────────────────────── +JWT_SECRET_KEY=change-me-jwt-secret +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 +JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + +# ── MQTT ──────────────────────────────────────────── +MQTT_HOST=127.0.0.1 +MQTT_PORT=1883 +MQTT_USERNAME= +MQTT_PASSWORD= + +# ── Celery ────────────────────────────────────────── +CELERY_BROKER_URL=redis://127.0.0.1:6379/1 +CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/2 + +# ── CORS ──────────────────────────────────────────── +CORS_ORIGINS=["http://localhost:3000","http://localhost:8080"] + +# ── OAuth ─────────────────────────────────────────── +GOOGLE_CLIENT_ID= +GOOGLE_CLIENT_SECRET= +KAKAO_CLIENT_ID= +KAKAO_CLIENT_SECRET= +NAVER_CLIENT_ID= +NAVER_CLIENT_SECRET= + +# ── SMTP (Email) ──────────────────────────────────── +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USERNAME= +SMTP_PASSWORD= + +# ── Logging ───────────────────────────────────────── +LOG_LEVEL=DEBUG diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5ef182d --- /dev/null +++ b/.gitignore @@ -0,0 +1,52 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +*.egg-info/ +dist/ +build/ +.eggs/ + +# Virtual environments +.venv/ +venv/ +env/ + +# Environment +.env + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Testing +.coverage +htmlcov/ +.pytest_cache/ + +# mypy +.mypy_cache/ + +# Ruff +.ruff_cache/ + +# Logs +*.log +logs/ + +# Docker volumes +data/ + +# Celery +celerybeat-schedule +celerybeat.pid + +# Alembic +alembic/versions/*.pyc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..a1ddd0f --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,15 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.7.0 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..9d702e0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.11-slim + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY pyproject.toml . +RUN pip install --no-cache-dir . + +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "app.asgi:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/Dockerfile.worker b/Dockerfile.worker new file mode 100644 index 0000000..6e84399 --- /dev/null +++ b/Dockerfile.worker @@ -0,0 +1,14 @@ +FROM python:3.11-slim + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY pyproject.toml . +RUN pip install --no-cache-dir . + +COPY . . + +CMD ["celery", "-A", "app.tasks.celery_app", "worker", "--loglevel=info", "--concurrency=4"] diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..f45d8d0 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,37 @@ +[alembic] +script_location = alembic +prepend_sys_path = . + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..299cecb --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool +from sqlmodel import SQLModel + +from app.core.config import settings + +# Alembic Config +config = context.config +config.set_main_option("sqlalchemy.url", settings.MARIADB_DSN_SYNC) + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Import all models so they register with SQLModel.metadata +from app.models.mariadb import auth, device, monitoring, system, user # noqa: F401 + +target_metadata = SQLModel.metadata + + +def run_migrations_offline() -> None: + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "format"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..4a3ea2b --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} +""" +from __future__ import annotations + +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/admin/__init__.py b/app/admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/admin/setup.py b/app/admin/setup.py new file mode 100644 index 0000000..e067c8f --- /dev/null +++ b/app/admin/setup.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from fastapi import FastAPI +from sqladmin import Admin +from sqladmin.authentication import AuthenticationBackend +from starlette.requests import Request + +from app.core.config import settings +from app.core.constants import Role +from app.core.security import decode_token +from app.db.mariadb import async_engine + + +class AdminAuth(AuthenticationBackend): + async def login(self, request: Request) -> bool: + form = await request.form() + token = str(form.get("token", "")) + payload = decode_token(token) + if payload and payload.get("role") in Role.ADMIN_ROLES: + request.session["token"] = token + return True + return False + + async def logout(self, request: Request) -> bool: + request.session.clear() + return True + + async def authenticate(self, request: Request) -> bool: + token = request.session.get("token") + if not token: + return False + payload = decode_token(token) + return payload is not None and payload.get("role") in Role.ADMIN_ROLES + + +def setup_admin(app: FastAPI) -> Admin: + auth_backend = AdminAuth(secret_key=settings.SECRET_KEY) + admin = Admin( + app, + engine=async_engine, + authentication_backend=auth_backend, + title=f"{settings.APP_NAME} Admin", + ) + + from app.admin.views.device_admin import DeviceAdmin, DeviceGroupAdmin + from app.admin.views.system_admin import AuditLogAdmin, SystemConfigAdmin + from app.admin.views.user_admin import UserAdmin, UserProfileAdmin + + admin.add_view(UserAdmin) + admin.add_view(UserProfileAdmin) + admin.add_view(DeviceAdmin) + admin.add_view(DeviceGroupAdmin) + admin.add_view(SystemConfigAdmin) + admin.add_view(AuditLogAdmin) + + return admin diff --git a/app/admin/views/__init__.py b/app/admin/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/admin/views/device_admin.py b/app/admin/views/device_admin.py new file mode 100644 index 0000000..200be3f --- /dev/null +++ b/app/admin/views/device_admin.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from sqladmin import ModelView + +from app.models.mariadb.device import Device, DeviceGroup + + +class DeviceAdmin(ModelView, model=Device): + column_list = [ + Device.id, Device.device_uid, Device.name, Device.device_type, + Device.status, Device.last_seen_at, Device.created_at, + ] + column_searchable_list = [Device.device_uid, Device.name] + column_sortable_list = [Device.id, Device.name, Device.status, Device.created_at] + column_default_sort = ("id", True) + can_create = True + can_edit = True + can_delete = False + name = "Device" + name_plural = "Devices" + icon = "fa-solid fa-microchip" + + +class DeviceGroupAdmin(ModelView, model=DeviceGroup): + column_list = [DeviceGroup.id, DeviceGroup.name, DeviceGroup.description] + column_searchable_list = [DeviceGroup.name] + can_create = True + can_edit = True + can_delete = True + name = "Device Group" + name_plural = "Device Groups" + icon = "fa-solid fa-layer-group" diff --git a/app/admin/views/system_admin.py b/app/admin/views/system_admin.py new file mode 100644 index 0000000..1d8bbe0 --- /dev/null +++ b/app/admin/views/system_admin.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from sqladmin import ModelView + +from app.models.mariadb.system import AuditLog, SystemConfig + + +class SystemConfigAdmin(ModelView, model=SystemConfig): + column_list = [SystemConfig.id, SystemConfig.key, SystemConfig.value, SystemConfig.is_secret] + column_searchable_list = [SystemConfig.key] + can_create = True + can_edit = True + can_delete = True + name = "System Config" + name_plural = "System Configs" + icon = "fa-solid fa-gear" + + +class AuditLogAdmin(ModelView, model=AuditLog): + column_list = [ + AuditLog.id, AuditLog.user_id, AuditLog.action, + AuditLog.resource_type, AuditLog.resource_id, AuditLog.created_at, + ] + column_sortable_list = [AuditLog.id, AuditLog.created_at] + column_default_sort = ("id", True) + can_create = False + can_edit = False + can_delete = False + name = "Audit Log" + name_plural = "Audit Logs" + icon = "fa-solid fa-clipboard-list" diff --git a/app/admin/views/user_admin.py b/app/admin/views/user_admin.py new file mode 100644 index 0000000..5f4cd9e --- /dev/null +++ b/app/admin/views/user_admin.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from sqladmin import ModelView + +from app.models.mariadb.user import User, UserProfile + + +class UserAdmin(ModelView, model=User): + column_list = [User.id, User.email, User.role, User.is_active, User.is_verified, User.created_at] + column_searchable_list = [User.email] + column_sortable_list = [User.id, User.email, User.created_at] + column_default_sort = ("id", True) + can_create = True + can_edit = True + can_delete = False + name = "User" + name_plural = "Users" + icon = "fa-solid fa-user" + + +class UserProfileAdmin(ModelView, model=UserProfile): + column_list = [UserProfile.id, UserProfile.user_id, UserProfile.full_name, UserProfile.organization] + column_searchable_list = [UserProfile.full_name] + can_create = False + can_delete = False + name = "User Profile" + name_plural = "User Profiles" + icon = "fa-solid fa-address-card" diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/deps.py b/app/api/deps.py new file mode 100644 index 0000000..a6de8d5 --- /dev/null +++ b/app/api/deps.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.mariadb import get_db + + +async def get_session() -> AsyncGenerator[AsyncSession, None]: + async for session in get_db(): + yield session diff --git a/app/api/v1/__init__.py b/app/api/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/v1/endpoints/__init__.py b/app/api/v1/endpoints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/v1/endpoints/analytics.py b/app/api/v1/endpoints/analytics.py new file mode 100644 index 0000000..ea85c89 --- /dev/null +++ b/app/api/v1/endpoints/analytics.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from datetime import datetime + +from fastapi import APIRouter, Depends, Query + +from app.core.constants import Role +from app.core.dependencies import require_role +from app.schemas.analytics import ( + AnalyticsResultRead, + ReportResponse, + TelemetryAggregateResponse, +) +from app.services.analytics_service import AnalyticsService + +router = APIRouter(prefix="/analytics", tags=["analytics"]) + + +@router.get("/telemetry/{device_id}", response_model=TelemetryAggregateResponse) +async def get_telemetry_aggregate( + device_id: str, + start: datetime = Query(...), + end: datetime = Query(...), + interval: str = Query("1h"), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), +) -> TelemetryAggregateResponse: + service = AnalyticsService() + return await service.get_telemetry_aggregate(device_id, start, end, interval) + + +@router.post("/reports/{device_id}", response_model=ReportResponse) +async def generate_report( + device_id: str, + start: datetime = Query(...), + end: datetime = Query(...), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), +) -> ReportResponse: + service = AnalyticsService() + return await service.generate_report(device_id, start, end) + + +@router.get("/status/{device_id}") +async def device_status_analysis( + device_id: str, + start: datetime = Query(...), + end: datetime = Query(...), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), +) -> dict: + service = AnalyticsService() + return await service.get_device_status_analysis(device_id, start, end) + + +@router.get("/trends/{device_id}") +async def trend_analysis( + device_id: str, + start: datetime = Query(...), + end: datetime = Query(...), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), +) -> dict: + service = AnalyticsService() + return await service.get_trend_analysis(device_id, start, end) + + +@router.get("/results", response_model=list[AnalyticsResultRead]) +async def list_analytics_results( + analysis_type: str = Query(...), + device_id: str | None = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(20, ge=1, le=100), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), +) -> list[AnalyticsResultRead]: + service = AnalyticsService() + return await service.list_results(analysis_type, device_id, skip, limit) diff --git a/app/api/v1/endpoints/auth.py b/app/api/v1/endpoints/auth.py new file mode 100644 index 0000000..7549ef0 --- /dev/null +++ b/app/api/v1/endpoints/auth.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_session +from app.core.dependencies import get_current_user_id +from app.schemas.auth import ( + LoginRequest, + RefreshTokenRequest, + RegisterRequest, + TokenResponse, +) +from app.services.auth_service import AuthService + +router = APIRouter(prefix="/auth", tags=["auth"]) + + +@router.post("/register", response_model=TokenResponse, status_code=201) +async def register( + body: RegisterRequest, + session: AsyncSession = Depends(get_session), +) -> TokenResponse: + service = AuthService(session) + user = await service.register(body.email, body.password, body.full_name) + return await service._create_tokens(user) + + +@router.post("/login", response_model=TokenResponse) +async def login( + body: LoginRequest, + session: AsyncSession = Depends(get_session), +) -> TokenResponse: + service = AuthService(session) + return await service.login(body.email, body.password) + + +@router.post("/refresh", response_model=TokenResponse) +async def refresh_token( + body: RefreshTokenRequest, + session: AsyncSession = Depends(get_session), +) -> TokenResponse: + service = AuthService(session) + return await service.refresh(body.refresh_token) + + +@router.post("/logout", status_code=204) +async def logout( + user_id: int = Depends(get_current_user_id), + session: AsyncSession = Depends(get_session), +) -> None: + service = AuthService(session) + await service.logout(user_id) diff --git a/app/api/v1/endpoints/devices.py b/app/api/v1/endpoints/devices.py new file mode 100644 index 0000000..1a3a52a --- /dev/null +++ b/app/api/v1/endpoints/devices.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_session +from app.core.constants import Role +from app.core.dependencies import get_current_user_payload, require_role +from app.schemas.common import PaginatedResponse +from app.schemas.device import DeviceCreate, DeviceRead, DeviceUpdate +from app.services.device_service import DeviceService + +router = APIRouter(prefix="/devices", tags=["devices"]) + + +@router.get("", response_model=PaginatedResponse[DeviceRead]) +async def list_devices( + page: int = Query(1, ge=1), + size: int = Query(20, ge=1, le=100), + _: dict = Depends(get_current_user_payload), + session: AsyncSession = Depends(get_session), +) -> PaginatedResponse[DeviceRead]: + service = DeviceService(session) + skip = (page - 1) * size + items = await service.list_devices(skip=skip, limit=size) + total = await service.count_devices() + return PaginatedResponse( + items=items, total=total, page=page, size=size, pages=(total + size - 1) // size + ) + + +@router.get("/{device_id}", response_model=DeviceRead) +async def get_device( + device_id: int, + _: dict = Depends(get_current_user_payload), + session: AsyncSession = Depends(get_session), +) -> DeviceRead: + service = DeviceService(session) + return await service.get_device(device_id) + + +@router.post("", response_model=DeviceRead, status_code=201) +async def create_device( + body: DeviceCreate, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)), + session: AsyncSession = Depends(get_session), +) -> DeviceRead: + service = DeviceService(session) + return await service.create_device(body) + + +@router.patch("/{device_id}", response_model=DeviceRead) +async def update_device( + device_id: int, + body: DeviceUpdate, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)), + session: AsyncSession = Depends(get_session), +) -> DeviceRead: + service = DeviceService(session) + return await service.update_device(device_id, body) + + +@router.delete("/{device_id}", status_code=204) +async def delete_device( + device_id: int, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> None: + service = DeviceService(session) + await service.delete_device(device_id) diff --git a/app/api/v1/endpoints/monitoring.py b/app/api/v1/endpoints/monitoring.py new file mode 100644 index 0000000..7813821 --- /dev/null +++ b/app/api/v1/endpoints/monitoring.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_session +from app.core.constants import Role +from app.core.dependencies import get_current_user_id, require_role +from app.schemas.monitoring import AlertRead, AlertRuleCreate, AlertRuleRead, SystemHealthResponse +from app.services.monitoring_service import MonitoringService + +router = APIRouter(prefix="/monitoring", tags=["monitoring"]) + + +@router.get("/health", response_model=SystemHealthResponse) +async def system_health( + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), + session: AsyncSession = Depends(get_session), +) -> SystemHealthResponse: + service = MonitoringService(session) + return await service.get_system_health() + + +@router.get("/alerts", response_model=list[AlertRead]) +async def list_alerts( + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=200), + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), + session: AsyncSession = Depends(get_session), +) -> list[AlertRead]: + service = MonitoringService(session) + return await service.list_active_alerts(skip=skip, limit=limit) + + +@router.post("/alerts/{alert_id}/acknowledge", response_model=AlertRead) +async def acknowledge_alert( + alert_id: int, + user_id: int = Depends(get_current_user_id), + session: AsyncSession = Depends(get_session), +) -> AlertRead: + service = MonitoringService(session) + return await service.acknowledge_alert(alert_id, user_id) + + +@router.get("/alert-rules", response_model=list[AlertRuleRead]) +async def list_alert_rules( + _: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)), + session: AsyncSession = Depends(get_session), +) -> list[AlertRuleRead]: + service = MonitoringService(session) + return await service.list_alert_rules() + + +@router.post("/alert-rules", response_model=AlertRuleRead, status_code=201) +async def create_alert_rule( + body: AlertRuleCreate, + user_id: int = Depends(get_current_user_id), + session: AsyncSession = Depends(get_session), +) -> AlertRuleRead: + service = MonitoringService(session) + return await service.create_alert_rule(body, user_id) diff --git a/app/api/v1/endpoints/system.py b/app/api/v1/endpoints/system.py new file mode 100644 index 0000000..bb6538a --- /dev/null +++ b/app/api/v1/endpoints/system.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_session +from app.core.config import settings +from app.core.constants import Role +from app.core.dependencies import require_role + +router = APIRouter(prefix="/system", tags=["system"]) + + +@router.get("/health") +async def health_check() -> dict: + return { + "status": "ok", + "service": settings.APP_NAME, + "version": "0.1.0", + } + + +@router.get("/info") +async def system_info( + _: dict = Depends(require_role(Role.SUPERADMIN)), +) -> dict: + return { + "app_name": settings.APP_NAME, + "environment": settings.APP_ENV, + "debug": settings.DEBUG, + "api_prefix": settings.API_V1_PREFIX, + } diff --git a/app/api/v1/endpoints/users.py b/app/api/v1/endpoints/users.py new file mode 100644 index 0000000..efe5b94 --- /dev/null +++ b/app/api/v1/endpoints/users.py @@ -0,0 +1,91 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.deps import get_session +from app.core.constants import Role +from app.core.dependencies import get_current_user_id, require_role +from app.schemas.common import PaginatedResponse +from app.schemas.user import UserCreate, UserRead, UserUpdate +from app.services.user_service import UserService + +router = APIRouter(prefix="/users", tags=["users"]) + + +@router.get("/me", response_model=UserRead) +async def get_me( + user_id: int = Depends(get_current_user_id), + session: AsyncSession = Depends(get_session), +) -> UserRead: + service = UserService(session) + return await service.get_user(user_id) + + +@router.patch("/me", response_model=UserRead) +async def update_me( + body: UserUpdate, + user_id: int = Depends(get_current_user_id), + session: AsyncSession = Depends(get_session), +) -> UserRead: + body.role = None + body.is_active = None + service = UserService(session) + return await service.update_user(user_id, body) + + +@router.get("", response_model=PaginatedResponse[UserRead]) +async def list_users( + page: int = Query(1, ge=1), + size: int = Query(20, ge=1, le=100), + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> PaginatedResponse[UserRead]: + service = UserService(session) + skip = (page - 1) * size + items = await service.list_users(skip=skip, limit=size) + total = await service.count_users() + return PaginatedResponse( + items=items, total=total, page=page, size=size, pages=(total + size - 1) // size + ) + + +@router.get("/{user_id}", response_model=UserRead) +async def get_user( + user_id: int, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> UserRead: + service = UserService(session) + return await service.get_user(user_id) + + +@router.post("", response_model=UserRead, status_code=201) +async def create_user( + body: UserCreate, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> UserRead: + service = UserService(session) + return await service.create_user(body) + + +@router.patch("/{user_id}", response_model=UserRead) +async def update_user( + user_id: int, + body: UserUpdate, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> UserRead: + service = UserService(session) + return await service.update_user(user_id, body) + + +@router.delete("/{user_id}", status_code=204) +async def delete_user( + user_id: int, + _: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)), + session: AsyncSession = Depends(get_session), +) -> None: + service = UserService(session) + await service.delete_user(user_id) diff --git a/app/api/v1/router.py b/app/api/v1/router.py new file mode 100644 index 0000000..caf3492 --- /dev/null +++ b/app/api/v1/router.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from fastapi import APIRouter + +from app.api.v1.endpoints import analytics, auth, devices, monitoring, system, users + +v1_router = APIRouter() +v1_router.include_router(system.router) +v1_router.include_router(auth.router) +v1_router.include_router(users.router) +v1_router.include_router(devices.router) +v1_router.include_router(monitoring.router) +v1_router.include_router(analytics.router) diff --git a/app/asgi.py b/app/asgi.py new file mode 100644 index 0000000..c7ca8a5 --- /dev/null +++ b/app/asgi.py @@ -0,0 +1,15 @@ +import socketio + +from app.communication.socketio.server import sio +from app.main import create_app + +# Import namespace handlers to register them +import app.communication.socketio.events # noqa: F401 +import app.communication.socketio.namespaces.device_ns # noqa: F401 +import app.communication.socketio.namespaces.monitoring_ns # noqa: F401 +import app.communication.socketio.namespaces.notification_ns # noqa: F401 + +fastapi_app = create_app() + +# Socket.IO wraps FastAPI as the outermost ASGI app +app = socketio.ASGIApp(sio, other_app=fastapi_app) diff --git a/app/communication/__init__.py b/app/communication/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/communication/external/__init__.py b/app/communication/external/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/communication/external/http_client.py b/app/communication/external/http_client.py new file mode 100644 index 0000000..530ccd1 --- /dev/null +++ b/app/communication/external/http_client.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +import httpx + +_client: httpx.AsyncClient | None = None + + +async def get_http_client() -> httpx.AsyncClient: + global _client + if _client is None or _client.is_closed: + _client = httpx.AsyncClient(timeout=30.0) + return _client + + +async def close_http_client() -> None: + global _client + if _client and not _client.is_closed: + await _client.aclose() + _client = None diff --git a/app/communication/external/oauth_providers.py b/app/communication/external/oauth_providers.py new file mode 100644 index 0000000..b5b74b2 --- /dev/null +++ b/app/communication/external/oauth_providers.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from app.communication.external.http_client import get_http_client +from app.core.config import settings + + +@dataclass +class OAuthUserInfo: + provider: str + provider_user_id: str + email: str + name: str + + +async def get_google_user_info(code: str, redirect_uri: str) -> OAuthUserInfo: + client = await get_http_client() + + token_resp = await client.post( + "https://oauth2.googleapis.com/token", + data={ + "code": code, + "client_id": settings.GOOGLE_CLIENT_ID, + "client_secret": settings.GOOGLE_CLIENT_SECRET, + "redirect_uri": redirect_uri, + "grant_type": "authorization_code", + }, + ) + token_resp.raise_for_status() + access_token = token_resp.json()["access_token"] + + user_resp = await client.get( + "https://www.googleapis.com/oauth2/v2/userinfo", + headers={"Authorization": f"Bearer {access_token}"}, + ) + user_resp.raise_for_status() + data = user_resp.json() + + return OAuthUserInfo( + provider="google", + provider_user_id=data["id"], + email=data["email"], + name=data.get("name", ""), + ) + + +async def get_kakao_user_info(code: str, redirect_uri: str) -> OAuthUserInfo: + client = await get_http_client() + + token_resp = await client.post( + "https://kauth.kakao.com/oauth/token", + data={ + "grant_type": "authorization_code", + "client_id": settings.KAKAO_CLIENT_ID, + "client_secret": settings.KAKAO_CLIENT_SECRET, + "redirect_uri": redirect_uri, + "code": code, + }, + ) + token_resp.raise_for_status() + access_token = token_resp.json()["access_token"] + + user_resp = await client.get( + "https://kapi.kakao.com/v2/user/me", + headers={"Authorization": f"Bearer {access_token}"}, + ) + user_resp.raise_for_status() + data = user_resp.json() + + account = data.get("kakao_account", {}) + return OAuthUserInfo( + provider="kakao", + provider_user_id=str(data["id"]), + email=account.get("email", ""), + name=account.get("profile", {}).get("nickname", ""), + ) + + +async def get_naver_user_info(code: str, redirect_uri: str) -> OAuthUserInfo: + client = await get_http_client() + + token_resp = await client.post( + "https://nid.naver.com/oauth2.0/token", + data={ + "grant_type": "authorization_code", + "client_id": settings.NAVER_CLIENT_ID, + "client_secret": settings.NAVER_CLIENT_SECRET, + "code": code, + }, + ) + token_resp.raise_for_status() + access_token = token_resp.json()["access_token"] + + user_resp = await client.get( + "https://openapi.naver.com/v1/nid/me", + headers={"Authorization": f"Bearer {access_token}"}, + ) + user_resp.raise_for_status() + data = user_resp.json()["response"] + + return OAuthUserInfo( + provider="naver", + provider_user_id=data["id"], + email=data.get("email", ""), + name=data.get("name", ""), + ) + + +OAUTH_PROVIDERS = { + "google": get_google_user_info, + "kakao": get_kakao_user_info, + "naver": get_naver_user_info, +} diff --git a/app/communication/mqtt/__init__.py b/app/communication/mqtt/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/communication/mqtt/client.py b/app/communication/mqtt/client.py new file mode 100644 index 0000000..f0e50ed --- /dev/null +++ b/app/communication/mqtt/client.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from fastapi_mqtt import FastMQTT, MQTTConfig + +from app.core.config import settings +from app.communication.mqtt.topics import SUBSCRIBE_TOPICS + +mqtt_config = MQTTConfig( + host=settings.MQTT_HOST, + port=settings.MQTT_PORT, + username=settings.MQTT_USERNAME or None, + password=settings.MQTT_PASSWORD or None, + keepalive=60, +) + +mqtt = FastMQTT(config=mqtt_config) + + +async def mqtt_startup() -> None: + await mqtt.mqtt_startup() + for topic in SUBSCRIBE_TOPICS: + mqtt.client.subscribe(topic) + + +async def mqtt_shutdown() -> None: + await mqtt.mqtt_shutdown() diff --git a/app/communication/mqtt/handlers.py b/app/communication/mqtt/handlers.py new file mode 100644 index 0000000..30298f0 --- /dev/null +++ b/app/communication/mqtt/handlers.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import json + +import structlog + +from app.communication.mqtt.client import mqtt +from app.models.mongodb.device_log import DeviceLog +from app.models.mongodb.telemetry import TelemetryData + +logger = structlog.get_logger("mqtt") + + +def _extract_device_uid(topic: str) -> str: + parts = topic.split("/") + return parts[1] if len(parts) >= 3 else "unknown" + + +@mqtt.on_message() +async def on_message(client, topic: str, payload: bytes, qos: int, properties) -> None: # type: ignore[no-untyped-def] + device_uid = _extract_device_uid(topic) + + try: + data = json.loads(payload.decode()) + except (json.JSONDecodeError, UnicodeDecodeError): + logger.warning("invalid_mqtt_payload", topic=topic) + return + + if "/telemetry" in topic: + await _handle_telemetry(device_uid, data) + elif "/status" in topic: + await _handle_status(device_uid, data) + elif "/log" in topic: + await _handle_log(device_uid, data) + elif "/response" in topic: + await _handle_response(device_uid, data) + + +async def _handle_telemetry(device_uid: str, data: dict) -> None: + telemetry = TelemetryData(device_id=device_uid, metrics=data) + await telemetry.insert() + + # Broadcast via Socket.IO + from app.communication.socketio.server import sio + + await sio.emit( + "telemetry", + {"device_uid": device_uid, "data": data}, + namespace="/monitoring", + ) + logger.debug("telemetry_saved", device_uid=device_uid) + + +async def _handle_status(device_uid: str, data: dict) -> None: + log = DeviceLog(device_id=device_uid, event_type="status_change", payload=data) + await log.insert() + + from app.communication.socketio.server import sio + + await sio.emit( + "device_status", + {"device_uid": device_uid, "status": data}, + namespace="/device", + ) + logger.debug("status_update", device_uid=device_uid) + + +async def _handle_log(device_uid: str, data: dict) -> None: + log = DeviceLog( + device_id=device_uid, + event_type=data.get("event_type", "log"), + payload=data, + ) + await log.insert() + logger.debug("device_log_saved", device_uid=device_uid) + + +async def _handle_response(device_uid: str, data: dict) -> None: + from app.communication.socketio.server import sio + + await sio.emit( + "device_response", + {"device_uid": device_uid, "data": data}, + namespace="/device", + ) + logger.debug("device_response", device_uid=device_uid) diff --git a/app/communication/mqtt/publisher.py b/app/communication/mqtt/publisher.py new file mode 100644 index 0000000..909c14f --- /dev/null +++ b/app/communication/mqtt/publisher.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import json + +from app.communication.mqtt.client import mqtt +from app.communication.mqtt.topics import DEVICE_COMMAND, DEVICE_CONFIG, DEVICE_OTA + + +async def publish_command(device_uid: str, command: dict) -> None: + topic = DEVICE_COMMAND.format(device_uid=device_uid) + mqtt.client.publish(topic, json.dumps(command)) + + +async def publish_config(device_uid: str, config: dict) -> None: + topic = DEVICE_CONFIG.format(device_uid=device_uid) + mqtt.client.publish(topic, json.dumps(config)) + + +async def publish_ota(device_uid: str, ota_info: dict) -> None: + topic = DEVICE_OTA.format(device_uid=device_uid) + mqtt.client.publish(topic, json.dumps(ota_info)) diff --git a/app/communication/mqtt/topics.py b/app/communication/mqtt/topics.py new file mode 100644 index 0000000..ff66b79 --- /dev/null +++ b/app/communication/mqtt/topics.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +# ── Device → Server ────────────────────────────────── +DEVICE_TELEMETRY = "devices/{device_uid}/telemetry" +DEVICE_STATUS = "devices/{device_uid}/status" +DEVICE_LOG = "devices/{device_uid}/log" +DEVICE_RESPONSE = "devices/{device_uid}/response" + +# ── Server → Device ────────────────────────────────── +DEVICE_COMMAND = "devices/{device_uid}/command" +DEVICE_CONFIG = "devices/{device_uid}/config" +DEVICE_OTA = "devices/{device_uid}/ota" + +# ── Wildcard subscriptions ─────────────────────────── +SUB_ALL_TELEMETRY = "devices/+/telemetry" +SUB_ALL_STATUS = "devices/+/status" +SUB_ALL_LOG = "devices/+/log" +SUB_ALL_RESPONSE = "devices/+/response" + +SUBSCRIBE_TOPICS = [ + SUB_ALL_TELEMETRY, + SUB_ALL_STATUS, + SUB_ALL_LOG, + SUB_ALL_RESPONSE, +] diff --git a/app/communication/socketio/__init__.py b/app/communication/socketio/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/communication/socketio/events.py b/app/communication/socketio/events.py new file mode 100644 index 0000000..ada8bf0 --- /dev/null +++ b/app/communication/socketio/events.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import structlog + +from app.communication.socketio.server import sio + +logger = structlog.get_logger("socketio") + + +@sio.event +async def connect(sid: str, environ: dict) -> None: + logger.info("client_connected", sid=sid) + + +@sio.event +async def disconnect(sid: str) -> None: + logger.info("client_disconnected", sid=sid) diff --git a/app/communication/socketio/namespaces/__init__.py b/app/communication/socketio/namespaces/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/communication/socketio/namespaces/device_ns.py b/app/communication/socketio/namespaces/device_ns.py new file mode 100644 index 0000000..d5c937b --- /dev/null +++ b/app/communication/socketio/namespaces/device_ns.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import structlog + +from app.communication.socketio.server import sio + +logger = structlog.get_logger("socketio.device") + + +@sio.on("connect", namespace="/device") +async def device_connect(sid: str, environ: dict) -> None: + logger.info("device_ns_connected", sid=sid) + + +@sio.on("disconnect", namespace="/device") +async def device_disconnect(sid: str) -> None: + logger.info("device_ns_disconnected", sid=sid) + + +@sio.on("send_command", namespace="/device") +async def send_command(sid: str, data: dict) -> None: + device_uid = data.get("device_uid") + command = data.get("command") + if device_uid and command: + from app.communication.mqtt.publisher import publish_command + + await publish_command(device_uid, command) + logger.info("command_sent", device_uid=device_uid) diff --git a/app/communication/socketio/namespaces/monitoring_ns.py b/app/communication/socketio/namespaces/monitoring_ns.py new file mode 100644 index 0000000..94fbf7f --- /dev/null +++ b/app/communication/socketio/namespaces/monitoring_ns.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +import structlog + +from app.communication.socketio.server import sio + +logger = structlog.get_logger("socketio.monitoring") + + +@sio.on("connect", namespace="/monitoring") +async def monitoring_connect(sid: str, environ: dict) -> None: + logger.info("monitoring_connected", sid=sid) + + +@sio.on("disconnect", namespace="/monitoring") +async def monitoring_disconnect(sid: str) -> None: + logger.info("monitoring_disconnected", sid=sid) + + +@sio.on("subscribe_device", namespace="/monitoring") +async def subscribe_device(sid: str, data: dict) -> None: + device_uid = data.get("device_uid") + if device_uid: + await sio.enter_room(sid, f"device:{device_uid}", namespace="/monitoring") + logger.info("subscribed_device", sid=sid, device_uid=device_uid) + + +@sio.on("unsubscribe_device", namespace="/monitoring") +async def unsubscribe_device(sid: str, data: dict) -> None: + device_uid = data.get("device_uid") + if device_uid: + await sio.leave_room(sid, f"device:{device_uid}", namespace="/monitoring") diff --git a/app/communication/socketio/namespaces/notification_ns.py b/app/communication/socketio/namespaces/notification_ns.py new file mode 100644 index 0000000..204ff03 --- /dev/null +++ b/app/communication/socketio/namespaces/notification_ns.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import structlog + +from app.communication.socketio.server import sio + +logger = structlog.get_logger("socketio.notification") + + +@sio.on("connect", namespace="/notification") +async def notification_connect(sid: str, environ: dict) -> None: + logger.info("notification_connected", sid=sid) + + +@sio.on("disconnect", namespace="/notification") +async def notification_disconnect(sid: str) -> None: + logger.info("notification_disconnected", sid=sid) + + +@sio.on("join_user_room", namespace="/notification") +async def join_user_room(sid: str, data: dict) -> None: + user_id = data.get("user_id") + if user_id: + await sio.enter_room(sid, f"user:{user_id}", namespace="/notification") + logger.info("joined_user_room", sid=sid, user_id=user_id) diff --git a/app/communication/socketio/server.py b/app/communication/socketio/server.py new file mode 100644 index 0000000..fd68f0e --- /dev/null +++ b/app/communication/socketio/server.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +import socketio + +from app.core.config import settings + +sio = socketio.AsyncServer( + async_mode="asgi", + cors_allowed_origins=settings.CORS_ORIGINS, + logger=settings.DEBUG, + engineio_logger=False, +) + +sio_app = socketio.ASGIApp(sio) diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..6398822 --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from pydantic import field_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + case_sensitive=False, + ) + + # ── Application ────────────────────────────────── + APP_NAME: str = "core-api" + APP_ENV: str = "development" + DEBUG: bool = True + SECRET_KEY: str = "change-me-to-a-random-secret-key" + API_V1_PREFIX: str = "/api/v1" + + # ── MariaDB ────────────────────────────────────── + MARIADB_HOST: str = "127.0.0.1" + MARIADB_PORT: int = 3306 + MARIADB_USER: str = "root" + MARIADB_PASSWORD: str = "changeme" + MARIADB_DATABASE: str = "core_api" + + @property + def MARIADB_DSN(self) -> str: + return ( + f"mysql+aiomysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}" + f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}" + ) + + @property + def MARIADB_DSN_SYNC(self) -> str: + return ( + f"mysql+pymysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}" + f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}" + ) + + # ── MongoDB ────────────────────────────────────── + MONGODB_URL: str = "mongodb://127.0.0.1:27017" + MONGODB_DATABASE: str = "core_api" + + # ── Redis ──────────────────────────────────────── + REDIS_URL: str = "redis://127.0.0.1:6379/0" + + # ── JWT ────────────────────────────────────────── + JWT_SECRET_KEY: str = "change-me-jwt-secret" + JWT_ALGORITHM: str = "HS256" + JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 + JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = 7 + + # ── MQTT ───────────────────────────────────────── + MQTT_HOST: str = "127.0.0.1" + MQTT_PORT: int = 1883 + MQTT_USERNAME: str = "" + MQTT_PASSWORD: str = "" + + # ── Celery ─────────────────────────────────────── + CELERY_BROKER_URL: str = "redis://127.0.0.1:6379/1" + CELERY_RESULT_BACKEND: str = "redis://127.0.0.1:6379/2" + + # ── CORS ───────────────────────────────────────── + CORS_ORIGINS: list[str] = ["http://localhost:3000", "http://localhost:8080"] + + @field_validator("CORS_ORIGINS", mode="before") + @classmethod + def assemble_cors_origins(cls, v: str | list[str]) -> list[str]: + if isinstance(v, str): + return [i.strip() for i in v.strip("[]").split(",") if i.strip()] + return v + + # ── OAuth ──────────────────────────────────────── + GOOGLE_CLIENT_ID: str = "" + GOOGLE_CLIENT_SECRET: str = "" + KAKAO_CLIENT_ID: str = "" + KAKAO_CLIENT_SECRET: str = "" + NAVER_CLIENT_ID: str = "" + NAVER_CLIENT_SECRET: str = "" + + # ── SMTP ───────────────────────────────────────── + SMTP_HOST: str = "smtp.gmail.com" + SMTP_PORT: int = 587 + SMTP_USERNAME: str = "" + SMTP_PASSWORD: str = "" + + # ── Logging ────────────────────────────────────── + LOG_LEVEL: str = "DEBUG" + + +settings = Settings() diff --git a/app/core/constants.py b/app/core/constants.py new file mode 100644 index 0000000..4a0d422 --- /dev/null +++ b/app/core/constants.py @@ -0,0 +1,31 @@ +from __future__ import annotations + + +class Role: + SUPERADMIN = "superadmin" + ADMIN = "admin" + MANAGER = "manager" + USER = "user" + DEVICE = "device" + + ALL = [SUPERADMIN, ADMIN, MANAGER, USER, DEVICE] + ADMIN_ROLES = [SUPERADMIN, ADMIN] + MANAGEMENT_ROLES = [SUPERADMIN, ADMIN, MANAGER] + + +class DeviceStatus: + ONLINE = "online" + OFFLINE = "offline" + ERROR = "error" + MAINTENANCE = "maintenance" + + +class AlertSeverity: + CRITICAL = "critical" + WARNING = "warning" + INFO = "info" + + +class TokenType: + ACCESS = "access" + REFRESH = "refresh" diff --git a/app/core/dependencies.py b/app/core/dependencies.py new file mode 100644 index 0000000..966b449 --- /dev/null +++ b/app/core/dependencies.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from fastapi import Depends +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer + +from app.core.constants import TokenType +from app.core.exceptions import ForbiddenException, UnauthorizedException +from app.core.security import decode_token + +bearer_scheme = HTTPBearer() + + +async def get_current_user_payload( + credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme), +) -> dict: + payload = decode_token(credentials.credentials) + if payload is None: + raise UnauthorizedException("Invalid or expired token") + if payload.get("type") != TokenType.ACCESS: + raise UnauthorizedException("Invalid token type") + return payload + + +async def get_current_user_id( + payload: dict = Depends(get_current_user_payload), +) -> int: + return int(payload["sub"]) + + +def require_role(*allowed_roles: str): + async def _check(payload: dict = Depends(get_current_user_payload)) -> dict: + if payload.get("role") not in allowed_roles: + raise ForbiddenException("Insufficient permissions") + return payload + + return _check diff --git a/app/core/error_handlers.py b/app/core/error_handlers.py new file mode 100644 index 0000000..a6fceb6 --- /dev/null +++ b/app/core/error_handlers.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from fastapi import FastAPI, Request +from fastapi.responses import JSONResponse + +from app.core.exceptions import AppException + + +def register_error_handlers(app: FastAPI) -> None: + @app.exception_handler(AppException) + async def app_exception_handler(request: Request, exc: AppException) -> JSONResponse: + return JSONResponse( + status_code=exc.status_code, + content={"detail": exc.detail}, + ) + + @app.exception_handler(Exception) + async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse: + return JSONResponse( + status_code=500, + content={"detail": "Internal server error"}, + ) diff --git a/app/core/exceptions.py b/app/core/exceptions.py new file mode 100644 index 0000000..1e3fe46 --- /dev/null +++ b/app/core/exceptions.py @@ -0,0 +1,32 @@ +from __future__ import annotations + + +class AppException(Exception): + def __init__(self, status_code: int, detail: str): + self.status_code = status_code + self.detail = detail + + +class NotFoundException(AppException): + def __init__(self, detail: str = "Resource not found"): + super().__init__(status_code=404, detail=detail) + + +class UnauthorizedException(AppException): + def __init__(self, detail: str = "Not authenticated"): + super().__init__(status_code=401, detail=detail) + + +class ForbiddenException(AppException): + def __init__(self, detail: str = "Permission denied"): + super().__init__(status_code=403, detail=detail) + + +class ConflictException(AppException): + def __init__(self, detail: str = "Resource already exists"): + super().__init__(status_code=409, detail=detail) + + +class ValidationException(AppException): + def __init__(self, detail: str = "Validation error"): + super().__init__(status_code=422, detail=detail) diff --git a/app/core/logging_config.py b/app/core/logging_config.py new file mode 100644 index 0000000..81a8288 --- /dev/null +++ b/app/core/logging_config.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import logging +import sys + +import structlog + +from app.core.config import settings + + +def setup_logging() -> None: + log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO) + + structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.stdlib.filter_by_level, + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.StackInfoRenderer(), + structlog.processors.format_exc_info, + structlog.processors.UnicodeDecoder(), + structlog.dev.ConsoleRenderer() + if settings.DEBUG + else structlog.processors.JSONRenderer(), + ], + wrapper_class=structlog.stdlib.BoundLogger, + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + logging.basicConfig( + format="%(message)s", + stream=sys.stdout, + level=log_level, + ) + + +def get_logger(name: str) -> structlog.stdlib.BoundLogger: + return structlog.get_logger(name) diff --git a/app/core/permissions.py b/app/core/permissions.py new file mode 100644 index 0000000..3c6ed3a --- /dev/null +++ b/app/core/permissions.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from app.core.constants import Role + + +def is_admin(role: str) -> bool: + return role in Role.ADMIN_ROLES + + +def is_management(role: str) -> bool: + return role in Role.MANAGEMENT_ROLES + + +def can_manage_user(actor_role: str, target_role: str) -> bool: + hierarchy = {Role.SUPERADMIN: 4, Role.ADMIN: 3, Role.MANAGER: 2, Role.USER: 1, Role.DEVICE: 0} + return hierarchy.get(actor_role, 0) > hierarchy.get(target_role, 0) diff --git a/app/core/security.py b/app/core/security.py new file mode 100644 index 0000000..d435f5c --- /dev/null +++ b/app/core/security.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from datetime import datetime, timedelta + +from jose import JWTError, jwt +from passlib.context import CryptContext + +from app.core.config import settings +from app.core.constants import TokenType + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +def hash_password(password: str) -> str: + return pwd_context.hash(password) + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return pwd_context.verify(plain_password, hashed_password) + + +def create_access_token(subject: int | str, role: str) -> str: + expire = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) + payload = { + "sub": str(subject), + "role": role, + "type": TokenType.ACCESS, + "exp": expire, + } + return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) + + +def create_refresh_token(subject: int | str) -> str: + expire = datetime.utcnow() + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS) + payload = { + "sub": str(subject), + "type": TokenType.REFRESH, + "exp": expire, + } + return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) + + +def decode_token(token: str) -> dict | None: + try: + return jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) + except JWTError: + return None diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/db/base.py b/app/db/base.py new file mode 100644 index 0000000..68e3d76 --- /dev/null +++ b/app/db/base.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import func +from sqlmodel import Field, SQLModel + + +class TimestampMixin(SQLModel): + created_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column_kwargs={"server_default": func.now()}, + ) + updated_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column_kwargs={"server_default": func.now(), "onupdate": func.now()}, + ) + + +class SoftDeleteMixin(SQLModel): + is_deleted: bool = Field(default=False) + deleted_at: datetime | None = Field(default=None) diff --git a/app/db/mariadb.py b/app/db/mariadb.py new file mode 100644 index 0000000..1649d3f --- /dev/null +++ b/app/db/mariadb.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker +from sqlmodel import SQLModel + +from app.core.config import settings + +async_engine = create_async_engine( + settings.MARIADB_DSN, + echo=settings.DEBUG, + pool_pre_ping=True, + pool_size=10, + max_overflow=20, +) + +AsyncSessionLocal = sessionmaker( + bind=async_engine, + class_=AsyncSession, + expire_on_commit=False, +) + + +async def init_mariadb() -> None: + async with async_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + +async def close_mariadb() -> None: + await async_engine.dispose() + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + async with AsyncSessionLocal() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise diff --git a/app/db/mongodb.py b/app/db/mongodb.py new file mode 100644 index 0000000..7905332 --- /dev/null +++ b/app/db/mongodb.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase + +from app.core.config import settings + +mongo_client: AsyncIOMotorClient | None = None +mongo_db: AsyncIOMotorDatabase | None = None + + +async def init_mongodb() -> None: + global mongo_client, mongo_db + + mongo_client = AsyncIOMotorClient(settings.MONGODB_URL) + mongo_db = mongo_client[settings.MONGODB_DATABASE] + + from app.models.mongodb.analytics_result import AnalyticsResult + from app.models.mongodb.device_log import DeviceLog + from app.models.mongodb.notification import Notification + from app.models.mongodb.telemetry import TelemetryData + + await init_beanie( + database=mongo_db, + document_models=[DeviceLog, TelemetryData, AnalyticsResult, Notification], + ) + + +async def close_mongodb() -> None: + global mongo_client + if mongo_client: + mongo_client.close() + + +def get_mongo_db() -> AsyncIOMotorDatabase: + assert mongo_db is not None, "MongoDB not initialized" + return mongo_db diff --git a/app/db/redis.py b/app/db/redis.py new file mode 100644 index 0000000..3e4fe2a --- /dev/null +++ b/app/db/redis.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from redis.asyncio import Redis, from_url + +from app.core.config import settings + +redis_client: Redis | None = None + + +async def init_redis() -> None: + global redis_client + redis_client = from_url( + settings.REDIS_URL, + encoding="utf-8", + decode_responses=True, + ) + + +async def close_redis() -> None: + global redis_client + if redis_client: + await redis_client.close() + + +def get_redis() -> Redis: + assert redis_client is not None, "Redis not initialized" + return redis_client diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..e07d888 --- /dev/null +++ b/app/main.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager + +from fastapi import FastAPI + +from app.core.config import settings +from app.core.logging_config import setup_logging + + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: + # ── Startup ────────────────────────────────────── + setup_logging() + + from app.db.mariadb import init_mariadb + from app.db.mongodb import init_mongodb + from app.db.redis import init_redis + + await init_mariadb() + await init_mongodb() + await init_redis() + + yield + + # ── Shutdown ───────────────────────────────────── + from app.communication.external.http_client import close_http_client + from app.db.mariadb import close_mariadb + from app.db.mongodb import close_mongodb + from app.db.redis import close_redis + + await close_http_client() + await close_redis() + await close_mongodb() + await close_mariadb() + + +def create_app() -> FastAPI: + app = FastAPI( + title=settings.APP_NAME, + version="0.1.0", + docs_url="/docs", + redoc_url="/redoc", + lifespan=lifespan, + ) + + # ── Middleware (order matters: last added = first executed) ─── + from app.middleware.cors import add_cors_middleware + from app.middleware.request_id import RequestIDMiddleware + from app.middleware.request_logging import RequestLoggingMiddleware + + add_cors_middleware(app) + app.add_middleware(RequestLoggingMiddleware) + app.add_middleware(RequestIDMiddleware) + + # ── Error handlers ─────────────────────────────── + from app.core.error_handlers import register_error_handlers + + register_error_handlers(app) + + # ── Routers ────────────────────────────────────── + from app.api.v1.router import v1_router + + app.include_router(v1_router, prefix=settings.API_V1_PREFIX) + + return app diff --git a/app/middleware/__init__.py b/app/middleware/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/middleware/cors.py b/app/middleware/cors.py new file mode 100644 index 0000000..1bee1e0 --- /dev/null +++ b/app/middleware/cors.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings + + +def add_cors_middleware(app: FastAPI) -> None: + app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) diff --git a/app/middleware/rate_limit.py b/app/middleware/rate_limit.py new file mode 100644 index 0000000..9263583 --- /dev/null +++ b/app/middleware/rate_limit.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + +from app.db.redis import get_redis + + +class RateLimitMiddleware(BaseHTTPMiddleware): + def __init__(self, app, max_requests: int = 100, window_seconds: int = 60): # type: ignore[no-untyped-def] + super().__init__(app) + self.max_requests = max_requests + self.window_seconds = window_seconds + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + if request.url.path.startswith("/docs") or request.url.path.startswith("/redoc"): + return await call_next(request) + + client_ip = request.client.host if request.client else "unknown" + key = f"rate_limit:{client_ip}" + + try: + redis = get_redis() + current = await redis.incr(key) + if current == 1: + await redis.expire(key, self.window_seconds) + + if current > self.max_requests: + return JSONResponse( + status_code=429, + content={"detail": "Too many requests"}, + ) + except Exception: + pass + + return await call_next(request) diff --git a/app/middleware/request_id.py b/app/middleware/request_id.py new file mode 100644 index 0000000..a490124 --- /dev/null +++ b/app/middleware/request_id.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import uuid + +import structlog +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import Response + + +class RequestIDMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + request_id = request.headers.get("X-Request-ID", str(uuid.uuid4())) + structlog.contextvars.bind_contextvars(request_id=request_id) + response = await call_next(request) + response.headers["X-Request-ID"] = request_id + return response diff --git a/app/middleware/request_logging.py b/app/middleware/request_logging.py new file mode 100644 index 0000000..96eb959 --- /dev/null +++ b/app/middleware/request_logging.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import time + +import structlog +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import Response + +logger = structlog.get_logger("request") + + +class RequestLoggingMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + start = time.perf_counter() + response = await call_next(request) + elapsed_ms = round((time.perf_counter() - start) * 1000, 2) + + logger.info( + "request", + method=request.method, + path=request.url.path, + status=response.status_code, + elapsed_ms=elapsed_ms, + ) + return response diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/mariadb/__init__.py b/app/models/mariadb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/mariadb/auth.py b/app/models/mariadb/auth.py new file mode 100644 index 0000000..5300948 --- /dev/null +++ b/app/models/mariadb/auth.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlmodel import Field, Relationship, SQLModel + +from app.db.base import TimestampMixin + + +class RefreshToken(TimestampMixin, SQLModel, table=True): + __tablename__ = "refresh_tokens" + + id: int | None = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="users.id", index=True) + token: str = Field(max_length=500, unique=True, index=True) + expires_at: datetime + is_revoked: bool = Field(default=False) + device_info: str = Field(default="", max_length=255) + + # Relationships + user: User | None = Relationship(back_populates="refresh_tokens") + + +class OAuthAccount(TimestampMixin, SQLModel, table=True): + __tablename__ = "oauth_accounts" + + id: int | None = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="users.id", index=True) + provider: str = Field(max_length=50) + provider_user_id: str = Field(max_length=255) + access_token: str = Field(default="", max_length=500) + refresh_token: str = Field(default="", max_length=500) + expires_at: datetime | None = Field(default=None) + + +# Avoid circular import +from app.models.mariadb.user import User # noqa: E402, F811 + +RefreshToken.model_rebuild() diff --git a/app/models/mariadb/device.py b/app/models/mariadb/device.py new file mode 100644 index 0000000..95143df --- /dev/null +++ b/app/models/mariadb/device.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlmodel import Field, Relationship, SQLModel + +from app.core.constants import DeviceStatus +from app.db.base import SoftDeleteMixin, TimestampMixin + + +class DeviceGroup(TimestampMixin, SQLModel, table=True): + __tablename__ = "device_groups" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(max_length=100, unique=True) + description: str = Field(default="", max_length=500) + + devices: list[Device] = Relationship(back_populates="group") + + +class Device(TimestampMixin, SoftDeleteMixin, SQLModel, table=True): + __tablename__ = "devices" + + id: int | None = Field(default=None, primary_key=True) + device_uid: str = Field(max_length=100, unique=True, index=True) + name: str = Field(max_length=100) + device_type: str = Field(default="", max_length=50) + status: str = Field(default=DeviceStatus.OFFLINE, max_length=20) + firmware_version: str = Field(default="", max_length=50) + ip_address: str = Field(default="", max_length=45) + group_id: int | None = Field(default=None, foreign_key="device_groups.id") + owner_id: int | None = Field(default=None, foreign_key="users.id") + last_seen_at: datetime | None = Field(default=None) + metadata_json: str = Field(default="{}", max_length=2000) + + group: DeviceGroup | None = Relationship(back_populates="devices") diff --git a/app/models/mariadb/monitoring.py b/app/models/mariadb/monitoring.py new file mode 100644 index 0000000..4e37039 --- /dev/null +++ b/app/models/mariadb/monitoring.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlmodel import Field, SQLModel + +from app.core.constants import AlertSeverity +from app.db.base import TimestampMixin + + +class AlertRule(TimestampMixin, SQLModel, table=True): + __tablename__ = "alert_rules" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(max_length=100) + description: str = Field(default="", max_length=500) + metric: str = Field(max_length=100) + condition: str = Field(max_length=50) + threshold: float + severity: str = Field(default=AlertSeverity.WARNING, max_length=20) + is_enabled: bool = Field(default=True) + device_group_id: int | None = Field(default=None, foreign_key="device_groups.id") + created_by: int | None = Field(default=None, foreign_key="users.id") + + +class Alert(TimestampMixin, SQLModel, table=True): + __tablename__ = "alerts" + + id: int | None = Field(default=None, primary_key=True) + rule_id: int | None = Field(default=None, foreign_key="alert_rules.id") + device_id: int | None = Field(default=None, foreign_key="devices.id") + severity: str = Field(default=AlertSeverity.WARNING, max_length=20) + message: str = Field(max_length=500) + is_acknowledged: bool = Field(default=False) + acknowledged_by: int | None = Field(default=None, foreign_key="users.id") + acknowledged_at: datetime | None = Field(default=None) diff --git a/app/models/mariadb/system.py b/app/models/mariadb/system.py new file mode 100644 index 0000000..da225a4 --- /dev/null +++ b/app/models/mariadb/system.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from sqlmodel import Field, SQLModel + +from app.db.base import TimestampMixin + + +class SystemConfig(TimestampMixin, SQLModel, table=True): + __tablename__ = "system_configs" + + id: int | None = Field(default=None, primary_key=True) + key: str = Field(max_length=100, unique=True, index=True) + value: str = Field(default="", max_length=2000) + description: str = Field(default="", max_length=500) + is_secret: bool = Field(default=False) + + +class AuditLog(TimestampMixin, SQLModel, table=True): + __tablename__ = "audit_logs" + + id: int | None = Field(default=None, primary_key=True) + user_id: int | None = Field(default=None, foreign_key="users.id") + action: str = Field(max_length=100) + resource_type: str = Field(max_length=50) + resource_id: str = Field(default="", max_length=50) + details: str = Field(default="{}", max_length=2000) + ip_address: str = Field(default="", max_length=45) diff --git a/app/models/mariadb/user.py b/app/models/mariadb/user.py new file mode 100644 index 0000000..e9dc1da --- /dev/null +++ b/app/models/mariadb/user.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlmodel import Field, Relationship, SQLModel + +from app.core.constants import Role +from app.db.base import SoftDeleteMixin, TimestampMixin + + +class User(TimestampMixin, SoftDeleteMixin, SQLModel, table=True): + __tablename__ = "users" + + id: int | None = Field(default=None, primary_key=True) + email: str = Field(max_length=255, unique=True, index=True) + hashed_password: str = Field(max_length=255) + role: str = Field(default=Role.USER, max_length=20) + is_active: bool = Field(default=True) + is_verified: bool = Field(default=False) + last_login_at: datetime | None = Field(default=None) + + # Relationships + profile: UserProfile | None = Relationship(back_populates="user") + refresh_tokens: list[RefreshToken] = Relationship(back_populates="user") + + +class UserProfile(TimestampMixin, SQLModel, table=True): + __tablename__ = "user_profiles" + + id: int | None = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="users.id", unique=True, index=True) + full_name: str = Field(default="", max_length=100) + phone: str = Field(default="", max_length=20) + organization: str = Field(default="", max_length=100) + avatar_url: str = Field(default="", max_length=500) + + # Relationships + user: User | None = Relationship(back_populates="profile") + + +# Forward reference resolution +from app.models.mariadb.auth import RefreshToken # noqa: E402 + +User.model_rebuild() diff --git a/app/models/mongodb/__init__.py b/app/models/mongodb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/models/mongodb/analytics_result.py b/app/models/mongodb/analytics_result.py new file mode 100644 index 0000000..c909ef5 --- /dev/null +++ b/app/models/mongodb/analytics_result.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from datetime import datetime + +from beanie import Document +from pydantic import Field + + +class AnalyticsResult(Document): + analysis_type: str + parameters: dict = Field(default_factory=dict) + result: dict = Field(default_factory=dict) + device_id: str | None = None + period_start: datetime | None = None + period_end: datetime | None = None + created_at: datetime = Field(default_factory=datetime.utcnow) + + class Settings: + name = "analytics_results" + indexes = [ + "analysis_type", + "device_id", + [("created_at", -1)], + ] diff --git a/app/models/mongodb/device_log.py b/app/models/mongodb/device_log.py new file mode 100644 index 0000000..76788c9 --- /dev/null +++ b/app/models/mongodb/device_log.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from datetime import datetime + +from beanie import Document +from pydantic import Field + + +class DeviceLog(Document): + device_id: str + event_type: str + payload: dict = Field(default_factory=dict) + ip_address: str | None = None + timestamp: datetime = Field(default_factory=datetime.utcnow) + + class Settings: + name = "device_logs" + indexes = [ + "device_id", + "event_type", + [("timestamp", -1)], + ] + # TTL: 90일 후 자동 삭제 + timeseries = { + "timeField": "timestamp", + "expireAfterSeconds": 90 * 24 * 3600, + } diff --git a/app/models/mongodb/notification.py b/app/models/mongodb/notification.py new file mode 100644 index 0000000..0b1d866 --- /dev/null +++ b/app/models/mongodb/notification.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from datetime import datetime + +from beanie import Document +from pydantic import Field + + +class Notification(Document): + user_id: int + title: str + message: str + notification_type: str = "info" + is_read: bool = False + read_at: datetime | None = None + created_at: datetime = Field(default_factory=datetime.utcnow) + + class Settings: + name = "notifications" + indexes = [ + "user_id", + [("user_id", 1), ("is_read", 1)], + [("created_at", -1)], + ] diff --git a/app/models/mongodb/telemetry.py b/app/models/mongodb/telemetry.py new file mode 100644 index 0000000..b629a5d --- /dev/null +++ b/app/models/mongodb/telemetry.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from datetime import datetime + +from beanie import Document +from pydantic import Field + + +class TelemetryData(Document): + device_id: str + metrics: dict = Field(default_factory=dict) + timestamp: datetime = Field(default_factory=datetime.utcnow) + + class Settings: + name = "telemetry_data" + indexes = [ + "device_id", + [("timestamp", -1)], + [("device_id", 1), ("timestamp", -1)], + ] diff --git a/app/processing/__init__.py b/app/processing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/processing/analyzers/__init__.py b/app/processing/analyzers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/processing/analyzers/device_analyzer.py b/app/processing/analyzers/device_analyzer.py new file mode 100644 index 0000000..a3891f0 --- /dev/null +++ b/app/processing/analyzers/device_analyzer.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from datetime import datetime + +from app.models.mongodb.device_log import DeviceLog + + +async def analyze_device_status( + device_id: str, start: datetime, end: datetime +) -> dict: + """Analyze device status changes over a period.""" + logs = await ( + DeviceLog.find( + DeviceLog.device_id == device_id, + DeviceLog.event_type == "status_change", + DeviceLog.timestamp >= start, + DeviceLog.timestamp <= end, + ) + .sort("+timestamp") + .to_list() + ) + + status_counts: dict[str, int] = {} + for log in logs: + status = log.payload.get("status", "unknown") + status_counts[status] = status_counts.get(status, 0) + 1 + + total_events = len(logs) + uptime_events = status_counts.get("online", 0) + uptime_ratio = uptime_events / total_events if total_events > 0 else 0.0 + + return { + "total_events": total_events, + "status_counts": status_counts, + "uptime_ratio": round(uptime_ratio, 4), + "period": {"start": start.isoformat(), "end": end.isoformat()}, + } diff --git a/app/processing/analyzers/trend_analyzer.py b/app/processing/analyzers/trend_analyzer.py new file mode 100644 index 0000000..42f2e0c --- /dev/null +++ b/app/processing/analyzers/trend_analyzer.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from datetime import datetime + +import numpy as np + +from app.models.mongodb.telemetry import TelemetryData + + +async def analyze_trend( + device_id: str, start: datetime, end: datetime +) -> dict: + """Analyze telemetry data trends using linear regression.""" + docs = await ( + TelemetryData.find( + TelemetryData.device_id == device_id, + TelemetryData.timestamp >= start, + TelemetryData.timestamp <= end, + ) + .sort("+timestamp") + .to_list() + ) + + if len(docs) < 2: + return {"status": "insufficient_data", "count": len(docs)} + + timestamps = np.array([d.timestamp.timestamp() for d in docs]) + values = np.array([d.metrics.get("value", 0) for d in docs], dtype=float) + + # Normalize timestamps + t_norm = timestamps - timestamps[0] + + # Linear regression + coeffs = np.polyfit(t_norm, values, 1) + slope = float(coeffs[0]) + + return { + "count": len(docs), + "mean": float(np.mean(values)), + "std": float(np.std(values)), + "min": float(np.min(values)), + "max": float(np.max(values)), + "slope": slope, + "trend": "increasing" if slope > 0.001 else "decreasing" if slope < -0.001 else "stable", + } diff --git a/app/processing/pipelines/__init__.py b/app/processing/pipelines/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/processing/pipelines/report_pipeline.py b/app/processing/pipelines/report_pipeline.py new file mode 100644 index 0000000..a0d45a0 --- /dev/null +++ b/app/processing/pipelines/report_pipeline.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from datetime import datetime + +from app.models.mongodb.analytics_result import AnalyticsResult +from app.processing.analyzers.device_analyzer import analyze_device_status +from app.processing.analyzers.trend_analyzer import analyze_trend + + +async def generate_device_report( + device_id: str, start: datetime, end: datetime +) -> AnalyticsResult: + """Generate a comprehensive device report.""" + status_report = await analyze_device_status(device_id, start, end) + trend_report = await analyze_trend(device_id, start, end) + + result = AnalyticsResult( + analysis_type="device_report", + device_id=device_id, + parameters={"start": start.isoformat(), "end": end.isoformat()}, + result={ + "status": status_report, + "trends": trend_report, + }, + period_start=start, + period_end=end, + ) + await result.insert() + return result diff --git a/app/processing/pipelines/telemetry_pipeline.py b/app/processing/pipelines/telemetry_pipeline.py new file mode 100644 index 0000000..8b743ab --- /dev/null +++ b/app/processing/pipelines/telemetry_pipeline.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from datetime import datetime + +import polars as pl + +from app.models.mongodb.telemetry import TelemetryData + + +async def aggregate_telemetry( + device_id: str, + start: datetime, + end: datetime, + interval: str = "1h", +) -> pl.DataFrame: + """Aggregate telemetry data for a device over a time range.""" + docs = await ( + TelemetryData.find( + TelemetryData.device_id == device_id, + TelemetryData.timestamp >= start, + TelemetryData.timestamp <= end, + ) + .sort("+timestamp") + .to_list() + ) + + if not docs: + return pl.DataFrame() + + records = [ + {"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics} + for d in docs + ] + + df = pl.DataFrame(records) + return df.sort("timestamp").group_by_dynamic("timestamp", every=interval).agg( + pl.all().exclude("timestamp", "device_id").mean() + ) + + +async def get_latest_telemetry(device_id: str, limit: int = 100) -> pl.DataFrame: + """Get latest telemetry records as a Polars DataFrame.""" + docs = await ( + TelemetryData.find(TelemetryData.device_id == device_id) + .sort("-timestamp") + .limit(limit) + .to_list() + ) + + if not docs: + return pl.DataFrame() + + records = [ + {"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics} + for d in docs + ] + return pl.DataFrame(records) diff --git a/app/processing/utils/__init__.py b/app/processing/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/processing/utils/dataframe_utils.py b/app/processing/utils/dataframe_utils.py new file mode 100644 index 0000000..3a5a4db --- /dev/null +++ b/app/processing/utils/dataframe_utils.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from datetime import datetime + +import polars as pl + + +def filter_time_range( + df: pl.DataFrame, column: str, start: datetime, end: datetime +) -> pl.DataFrame: + return df.filter( + (pl.col(column) >= start) & (pl.col(column) <= end) + ) + + +def resample(df: pl.DataFrame, time_column: str, interval: str) -> pl.DataFrame: + numeric_cols = [ + c for c in df.columns if c != time_column and df[c].dtype.is_numeric() + ] + return df.sort(time_column).group_by_dynamic(time_column, every=interval).agg( + [pl.col(c).mean().alias(c) for c in numeric_cols] + ) + + +def to_records(df: pl.DataFrame) -> list[dict]: + return df.to_dicts() diff --git a/app/processing/utils/statistics.py b/app/processing/utils/statistics.py new file mode 100644 index 0000000..0b2e7ad --- /dev/null +++ b/app/processing/utils/statistics.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +import numpy as np + + +def moving_average(values: list[float], window: int = 5) -> list[float]: + if len(values) < window: + return values + arr = np.array(values, dtype=float) + return np.convolve(arr, np.ones(window) / window, mode="valid").tolist() + + +def detect_anomalies( + values: list[float], threshold: float = 2.0 +) -> list[dict]: + """Detect anomalies using Z-score method.""" + arr = np.array(values, dtype=float) + mean = np.mean(arr) + std = np.std(arr) + + if std == 0: + return [] + + z_scores = np.abs((arr - mean) / std) + anomalies = [] + for i, (val, z) in enumerate(zip(values, z_scores)): + if z > threshold: + anomalies.append({"index": i, "value": val, "z_score": float(z)}) + return anomalies + + +def percentile_stats(values: list[float]) -> dict: + arr = np.array(values, dtype=float) + return { + "p50": float(np.percentile(arr, 50)), + "p90": float(np.percentile(arr, 90)), + "p95": float(np.percentile(arr, 95)), + "p99": float(np.percentile(arr, 99)), + } diff --git a/app/repositories/__init__.py b/app/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/repositories/analytics_repo.py b/app/repositories/analytics_repo.py new file mode 100644 index 0000000..7cb1de0 --- /dev/null +++ b/app/repositories/analytics_repo.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from datetime import datetime + +from app.models.mongodb.analytics_result import AnalyticsResult + + +class AnalyticsRepository: + async def create(self, result: AnalyticsResult) -> AnalyticsResult: + return await result.insert() + + async def get_by_type( + self, + analysis_type: str, + device_id: str | None = None, + skip: int = 0, + limit: int = 20, + ) -> list[AnalyticsResult]: + query: dict = {"analysis_type": analysis_type} + if device_id: + query["device_id"] = device_id + return await ( + AnalyticsResult.find(query) + .sort("-created_at") + .skip(skip) + .limit(limit) + .to_list() + ) + + async def get_by_period( + self, + analysis_type: str, + start: datetime, + end: datetime, + device_id: str | None = None, + ) -> list[AnalyticsResult]: + query: dict = { + "analysis_type": analysis_type, + "period_start": {"$gte": start}, + "period_end": {"$lte": end}, + } + if device_id: + query["device_id"] = device_id + return await AnalyticsResult.find(query).sort("-created_at").to_list() diff --git a/app/repositories/auth_repo.py b/app/repositories/auth_repo.py new file mode 100644 index 0000000..083a608 --- /dev/null +++ b/app/repositories/auth_repo.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.mariadb.auth import OAuthAccount, RefreshToken +from app.repositories.base import BaseRepository + + +class AuthRepository(BaseRepository[RefreshToken]): + def __init__(self, session: AsyncSession): + super().__init__(RefreshToken, session) + + async def get_by_token(self, token: str) -> RefreshToken | None: + stmt = select(RefreshToken).where( + RefreshToken.token == token, + RefreshToken.is_revoked == False, # noqa: E712 + RefreshToken.expires_at > datetime.utcnow(), + ) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def revoke_all_for_user(self, user_id: int) -> None: + stmt = select(RefreshToken).where( + RefreshToken.user_id == user_id, + RefreshToken.is_revoked == False, # noqa: E712 + ) + result = await self.session.execute(stmt) + for token in result.scalars().all(): + token.is_revoked = True + self.session.add(token) + await self.session.flush() + + async def get_oauth_account( + self, provider: str, provider_user_id: str + ) -> OAuthAccount | None: + stmt = select(OAuthAccount).where( + OAuthAccount.provider == provider, + OAuthAccount.provider_user_id == provider_user_id, + ) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def create_oauth_account(self, account: OAuthAccount) -> OAuthAccount: + self.session.add(account) + await self.session.flush() + await self.session.refresh(account) + return account diff --git a/app/repositories/base.py b/app/repositories/base.py new file mode 100644 index 0000000..ae085c0 --- /dev/null +++ b/app/repositories/base.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import Generic, Sequence, TypeVar + +from sqlalchemy import func, select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlmodel import SQLModel + +T = TypeVar("T", bound=SQLModel) + + +class BaseRepository(Generic[T]): + def __init__(self, model: type[T], session: AsyncSession): + self.model = model + self.session = session + + async def get_by_id(self, id: int) -> T | None: + return await self.session.get(self.model, id) + + async def get_all( + self, *, skip: int = 0, limit: int = 100, filters: dict | None = None + ) -> Sequence[T]: + stmt = select(self.model) + if filters: + for key, value in filters.items(): + if hasattr(self.model, key): + stmt = stmt.where(getattr(self.model, key) == value) + stmt = stmt.offset(skip).limit(limit) + result = await self.session.execute(stmt) + return result.scalars().all() + + async def count(self, filters: dict | None = None) -> int: + stmt = select(func.count()).select_from(self.model) + if filters: + for key, value in filters.items(): + if hasattr(self.model, key): + stmt = stmt.where(getattr(self.model, key) == value) + result = await self.session.execute(stmt) + return result.scalar_one() + + async def create(self, obj: T) -> T: + self.session.add(obj) + await self.session.flush() + await self.session.refresh(obj) + return obj + + async def update(self, obj: T, data: dict) -> T: + for key, value in data.items(): + if value is not None and hasattr(obj, key): + setattr(obj, key, value) + self.session.add(obj) + await self.session.flush() + await self.session.refresh(obj) + return obj + + async def delete(self, obj: T) -> None: + await self.session.delete(obj) + await self.session.flush() diff --git a/app/repositories/device_log_repo.py b/app/repositories/device_log_repo.py new file mode 100644 index 0000000..c1bbc11 --- /dev/null +++ b/app/repositories/device_log_repo.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from datetime import datetime + +from app.models.mongodb.device_log import DeviceLog + + +class DeviceLogRepository: + async def create(self, log: DeviceLog) -> DeviceLog: + return await log.insert() + + async def get_by_device( + self, + device_id: str, + event_type: str | None = None, + since: datetime | None = None, + skip: int = 0, + limit: int = 100, + ) -> list[DeviceLog]: + query: dict = {"device_id": device_id} + if event_type: + query["event_type"] = event_type + if since: + query["timestamp"] = {"$gte": since} + + return await ( + DeviceLog.find(query) + .sort("-timestamp") + .skip(skip) + .limit(limit) + .to_list() + ) + + async def count_by_device( + self, device_id: str, event_type: str | None = None + ) -> int: + query: dict = {"device_id": device_id} + if event_type: + query["event_type"] = event_type + return await DeviceLog.find(query).count() diff --git a/app/repositories/device_repo.py b/app/repositories/device_repo.py new file mode 100644 index 0000000..c49d45a --- /dev/null +++ b/app/repositories/device_repo.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.mariadb.device import Device, DeviceGroup +from app.repositories.base import BaseRepository + + +class DeviceRepository(BaseRepository[Device]): + def __init__(self, session: AsyncSession): + super().__init__(Device, session) + + async def get_by_uid(self, device_uid: str) -> Device | None: + stmt = select(Device).where(Device.device_uid == device_uid, Device.is_deleted == False) # noqa: E712 + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def get_by_owner(self, owner_id: int, skip: int = 0, limit: int = 100) -> list[Device]: + stmt = ( + select(Device) + .where(Device.owner_id == owner_id, Device.is_deleted == False) # noqa: E712 + .offset(skip) + .limit(limit) + ) + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + async def get_by_group(self, group_id: int, skip: int = 0, limit: int = 100) -> list[Device]: + stmt = ( + select(Device) + .where(Device.group_id == group_id, Device.is_deleted == False) # noqa: E712 + .offset(skip) + .limit(limit) + ) + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + +class DeviceGroupRepository(BaseRepository[DeviceGroup]): + def __init__(self, session: AsyncSession): + super().__init__(DeviceGroup, session) + + async def get_by_name(self, name: str) -> DeviceGroup | None: + stmt = select(DeviceGroup).where(DeviceGroup.name == name) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() diff --git a/app/repositories/monitoring_repo.py b/app/repositories/monitoring_repo.py new file mode 100644 index 0000000..89cdf4a --- /dev/null +++ b/app/repositories/monitoring_repo.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.mariadb.monitoring import Alert, AlertRule +from app.repositories.base import BaseRepository + + +class AlertRuleRepository(BaseRepository[AlertRule]): + def __init__(self, session: AsyncSession): + super().__init__(AlertRule, session) + + async def get_enabled_rules(self) -> list[AlertRule]: + stmt = select(AlertRule).where(AlertRule.is_enabled == True) # noqa: E712 + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + +class AlertRepository(BaseRepository[Alert]): + def __init__(self, session: AsyncSession): + super().__init__(Alert, session) + + async def get_unacknowledged(self, skip: int = 0, limit: int = 50) -> list[Alert]: + stmt = ( + select(Alert) + .where(Alert.is_acknowledged == False) # noqa: E712 + .order_by(Alert.created_at.desc()) + .offset(skip) + .limit(limit) + ) + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + async def count_active(self) -> int: + return await self.count(filters={"is_acknowledged": False}) diff --git a/app/repositories/user_repo.py b/app/repositories/user_repo.py new file mode 100644 index 0000000..d6787f7 --- /dev/null +++ b/app/repositories/user_repo.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.models.mariadb.user import User, UserProfile +from app.repositories.base import BaseRepository + + +class UserRepository(BaseRepository[User]): + def __init__(self, session: AsyncSession): + super().__init__(User, session) + + async def get_by_email(self, email: str) -> User | None: + stmt = select(User).where(User.email == email) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def get_with_profile(self, user_id: int) -> User | None: + stmt = ( + select(User) + .options(selectinload(User.profile)) + .where(User.id == user_id) + ) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def create_with_profile( + self, user: User, full_name: str = "", phone: str = "", organization: str = "" + ) -> User: + self.session.add(user) + await self.session.flush() + + profile = UserProfile( + user_id=user.id, # type: ignore[arg-type] + full_name=full_name, + phone=phone, + organization=organization, + ) + self.session.add(profile) + await self.session.flush() + await self.session.refresh(user) + return user diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/schemas/analytics.py b/app/schemas/analytics.py new file mode 100644 index 0000000..025ecb4 --- /dev/null +++ b/app/schemas/analytics.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + + +class AnalyticsRequest(BaseModel): + device_id: str | None = None + analysis_type: str = "telemetry_summary" + start: datetime + end: datetime + parameters: dict = {} + + +class AnalyticsResultRead(BaseModel): + id: str + analysis_type: str + device_id: str | None + result: dict + parameters: dict + period_start: datetime | None + period_end: datetime | None + created_at: datetime + + +class ReportResponse(BaseModel): + report_id: str + device_id: str + status: dict + trends: dict + created_at: datetime + + +class TelemetryAggregateResponse(BaseModel): + device_id: str + records: list[dict] + count: int diff --git a/app/schemas/auth.py b/app/schemas/auth.py new file mode 100644 index 0000000..5774ba8 --- /dev/null +++ b/app/schemas/auth.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from pydantic import BaseModel, EmailStr + + +class LoginRequest(BaseModel): + email: EmailStr + password: str + + +class RegisterRequest(BaseModel): + email: EmailStr + password: str + full_name: str = "" + + +class TokenResponse(BaseModel): + access_token: str + refresh_token: str + token_type: str = "bearer" + + +class RefreshTokenRequest(BaseModel): + refresh_token: str + + +class OAuthCallbackRequest(BaseModel): + provider: str + code: str + redirect_uri: str diff --git a/app/schemas/common.py b/app/schemas/common.py new file mode 100644 index 0000000..84808c9 --- /dev/null +++ b/app/schemas/common.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from typing import Generic, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T") + + +class ErrorResponse(BaseModel): + detail: str + + +class PaginatedResponse(BaseModel, Generic[T]): + items: list[T] + total: int + page: int + size: int + pages: int diff --git a/app/schemas/device.py b/app/schemas/device.py new file mode 100644 index 0000000..3e8afcf --- /dev/null +++ b/app/schemas/device.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + + +class DeviceGroupRead(BaseModel): + id: int + name: str + description: str + + model_config = {"from_attributes": True} + + +class DeviceGroupCreate(BaseModel): + name: str + description: str = "" + + +class DeviceRead(BaseModel): + id: int + device_uid: str + name: str + device_type: str + status: str + firmware_version: str + ip_address: str + group_id: int | None + owner_id: int | None + last_seen_at: datetime | None + created_at: datetime + + model_config = {"from_attributes": True} + + +class DeviceCreate(BaseModel): + device_uid: str + name: str + device_type: str = "" + group_id: int | None = None + owner_id: int | None = None + firmware_version: str = "" + + +class DeviceUpdate(BaseModel): + name: str | None = None + device_type: str | None = None + status: str | None = None + firmware_version: str | None = None + group_id: int | None = None + owner_id: int | None = None diff --git a/app/schemas/monitoring.py b/app/schemas/monitoring.py new file mode 100644 index 0000000..6a3192e --- /dev/null +++ b/app/schemas/monitoring.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + + +class AlertRuleRead(BaseModel): + id: int + name: str + description: str + metric: str + condition: str + threshold: float + severity: str + is_enabled: bool + device_group_id: int | None + created_at: datetime + + model_config = {"from_attributes": True} + + +class AlertRuleCreate(BaseModel): + name: str + description: str = "" + metric: str + condition: str + threshold: float + severity: str = "warning" + device_group_id: int | None = None + + +class AlertRead(BaseModel): + id: int + rule_id: int | None + device_id: int | None + severity: str + message: str + is_acknowledged: bool + acknowledged_by: int | None + acknowledged_at: datetime | None + created_at: datetime + + model_config = {"from_attributes": True} + + +class SystemHealthResponse(BaseModel): + status: str + mariadb: str + mongodb: str + redis: str + mqtt: str + active_devices: int + active_alerts: int diff --git a/app/schemas/user.py b/app/schemas/user.py new file mode 100644 index 0000000..bd27d9d --- /dev/null +++ b/app/schemas/user.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, EmailStr + + +class UserRead(BaseModel): + id: int + email: str + role: str + is_active: bool + is_verified: bool + full_name: str = "" + phone: str = "" + organization: str = "" + avatar_url: str = "" + created_at: datetime + + model_config = {"from_attributes": True} + + +class UserCreate(BaseModel): + email: EmailStr + password: str + role: str = "user" + full_name: str = "" + phone: str = "" + organization: str = "" + + +class UserUpdate(BaseModel): + full_name: str | None = None + phone: str | None = None + organization: str | None = None + avatar_url: str | None = None + is_active: bool | None = None + role: str | None = None diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/services/analytics_service.py b/app/services/analytics_service.py new file mode 100644 index 0000000..74a84aa --- /dev/null +++ b/app/services/analytics_service.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from datetime import datetime + +from app.processing.analyzers.device_analyzer import analyze_device_status +from app.processing.analyzers.trend_analyzer import analyze_trend +from app.processing.pipelines.report_pipeline import generate_device_report +from app.processing.pipelines.telemetry_pipeline import aggregate_telemetry +from app.processing.utils.dataframe_utils import to_records +from app.repositories.analytics_repo import AnalyticsRepository +from app.schemas.analytics import ( + AnalyticsResultRead, + ReportResponse, + TelemetryAggregateResponse, +) + + +class AnalyticsService: + def __init__(self) -> None: + self.analytics_repo = AnalyticsRepository() + + async def get_telemetry_aggregate( + self, device_id: str, start: datetime, end: datetime, interval: str = "1h" + ) -> TelemetryAggregateResponse: + df = await aggregate_telemetry(device_id, start, end, interval) + records = to_records(df) if len(df) > 0 else [] + return TelemetryAggregateResponse( + device_id=device_id, records=records, count=len(records) + ) + + async def generate_report( + self, device_id: str, start: datetime, end: datetime + ) -> ReportResponse: + result = await generate_device_report(device_id, start, end) + return ReportResponse( + report_id=str(result.id), + device_id=device_id, + status=result.result.get("status", {}), + trends=result.result.get("trends", {}), + created_at=result.created_at, + ) + + async def get_device_status_analysis( + self, device_id: str, start: datetime, end: datetime + ) -> dict: + return await analyze_device_status(device_id, start, end) + + async def get_trend_analysis( + self, device_id: str, start: datetime, end: datetime + ) -> dict: + return await analyze_trend(device_id, start, end) + + async def list_results( + self, analysis_type: str, device_id: str | None = None, skip: int = 0, limit: int = 20 + ) -> list[AnalyticsResultRead]: + results = await self.analytics_repo.get_by_type( + analysis_type, device_id=device_id, skip=skip, limit=limit + ) + return [ + AnalyticsResultRead( + id=str(r.id), + analysis_type=r.analysis_type, + device_id=r.device_id, + result=r.result, + parameters=r.parameters, + period_start=r.period_start, + period_end=r.period_end, + created_at=r.created_at, + ) + for r in results + ] diff --git a/app/services/auth_service.py b/app/services/auth_service.py new file mode 100644 index 0000000..828c8cd --- /dev/null +++ b/app/services/auth_service.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from datetime import datetime, timedelta + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.config import settings +from app.core.exceptions import ConflictException, UnauthorizedException +from app.core.security import ( + create_access_token, + create_refresh_token, + decode_token, + hash_password, + verify_password, +) +from app.models.mariadb.auth import RefreshToken +from app.models.mariadb.user import User +from app.repositories.auth_repo import AuthRepository +from app.repositories.user_repo import UserRepository +from app.schemas.auth import TokenResponse + + +class AuthService: + def __init__(self, session: AsyncSession): + self.user_repo = UserRepository(session) + self.auth_repo = AuthRepository(session) + + async def register( + self, email: str, password: str, full_name: str = "" + ) -> User: + existing = await self.user_repo.get_by_email(email) + if existing: + raise ConflictException("Email already registered") + + user = User( + email=email, + hashed_password=hash_password(password), + ) + return await self.user_repo.create_with_profile(user, full_name=full_name) + + async def login(self, email: str, password: str) -> TokenResponse: + user = await self.user_repo.get_by_email(email) + if not user or not verify_password(password, user.hashed_password): + raise UnauthorizedException("Invalid email or password") + if not user.is_active: + raise UnauthorizedException("Account is deactivated") + + user.last_login_at = datetime.utcnow() + + return await self._create_tokens(user) + + async def refresh(self, refresh_token_str: str) -> TokenResponse: + payload = decode_token(refresh_token_str) + if not payload or payload.get("type") != "refresh": + raise UnauthorizedException("Invalid refresh token") + + stored = await self.auth_repo.get_by_token(refresh_token_str) + if not stored: + raise UnauthorizedException("Refresh token not found or expired") + + stored.is_revoked = True + + user = await self.user_repo.get_by_id(stored.user_id) + if not user or not user.is_active: + raise UnauthorizedException("User not found or deactivated") + + return await self._create_tokens(user) + + async def logout(self, user_id: int) -> None: + await self.auth_repo.revoke_all_for_user(user_id) + + async def _create_tokens(self, user: User) -> TokenResponse: + access = create_access_token(user.id, user.role) # type: ignore[arg-type] + refresh = create_refresh_token(user.id) # type: ignore[arg-type] + + token_obj = RefreshToken( + user_id=user.id, # type: ignore[arg-type] + token=refresh, + expires_at=datetime.utcnow() + + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS), + ) + await self.auth_repo.create(token_obj) + + return TokenResponse(access_token=access, refresh_token=refresh) diff --git a/app/services/device_service.py b/app/services/device_service.py new file mode 100644 index 0000000..3ba4e42 --- /dev/null +++ b/app/services/device_service.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.exceptions import ConflictException, NotFoundException +from app.models.mariadb.device import Device +from app.repositories.device_repo import DeviceRepository +from app.schemas.device import DeviceCreate, DeviceRead, DeviceUpdate + + +class DeviceService: + def __init__(self, session: AsyncSession): + self.device_repo = DeviceRepository(session) + + async def get_device(self, device_id: int) -> DeviceRead: + device = await self.device_repo.get_by_id(device_id) + if not device or device.is_deleted: + raise NotFoundException("Device not found") + return DeviceRead.model_validate(device) + + async def get_device_by_uid(self, device_uid: str) -> DeviceRead: + device = await self.device_repo.get_by_uid(device_uid) + if not device: + raise NotFoundException("Device not found") + return DeviceRead.model_validate(device) + + async def list_devices(self, skip: int = 0, limit: int = 20) -> list[DeviceRead]: + devices = await self.device_repo.get_all( + skip=skip, limit=limit, filters={"is_deleted": False} + ) + return [DeviceRead.model_validate(d) for d in devices] + + async def count_devices(self) -> int: + return await self.device_repo.count(filters={"is_deleted": False}) + + async def create_device(self, data: DeviceCreate) -> DeviceRead: + existing = await self.device_repo.get_by_uid(data.device_uid) + if existing: + raise ConflictException("Device UID already registered") + + device = Device(**data.model_dump()) + device = await self.device_repo.create(device) + return DeviceRead.model_validate(device) + + async def update_device(self, device_id: int, data: DeviceUpdate) -> DeviceRead: + device = await self.device_repo.get_by_id(device_id) + if not device or device.is_deleted: + raise NotFoundException("Device not found") + + update_data = data.model_dump(exclude_none=True) + device = await self.device_repo.update(device, update_data) + return DeviceRead.model_validate(device) + + async def delete_device(self, device_id: int) -> None: + device = await self.device_repo.get_by_id(device_id) + if not device or device.is_deleted: + raise NotFoundException("Device not found") + await self.device_repo.update( + device, {"is_deleted": True, "deleted_at": datetime.utcnow()} + ) + + async def update_device_status(self, device_uid: str, status: str) -> DeviceRead: + device = await self.device_repo.get_by_uid(device_uid) + if not device: + raise NotFoundException("Device not found") + device = await self.device_repo.update( + device, {"status": status, "last_seen_at": datetime.utcnow()} + ) + return DeviceRead.model_validate(device) diff --git a/app/services/monitoring_service.py b/app/services/monitoring_service.py new file mode 100644 index 0000000..1f25653 --- /dev/null +++ b/app/services/monitoring_service.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.constants import DeviceStatus +from app.core.exceptions import NotFoundException +from app.models.mariadb.monitoring import Alert, AlertRule +from app.repositories.device_repo import DeviceRepository +from app.repositories.monitoring_repo import AlertRepository, AlertRuleRepository +from app.schemas.monitoring import AlertRead, AlertRuleCreate, AlertRuleRead, SystemHealthResponse + + +class MonitoringService: + def __init__(self, session: AsyncSession): + self.alert_rule_repo = AlertRuleRepository(session) + self.alert_repo = AlertRepository(session) + self.device_repo = DeviceRepository(session) + + async def create_alert_rule(self, data: AlertRuleCreate, user_id: int) -> AlertRuleRead: + rule = AlertRule(**data.model_dump(), created_by=user_id) + rule = await self.alert_rule_repo.create(rule) + return AlertRuleRead.model_validate(rule) + + async def list_alert_rules(self) -> list[AlertRuleRead]: + rules = await self.alert_rule_repo.get_all() + return [AlertRuleRead.model_validate(r) for r in rules] + + async def list_active_alerts(self, skip: int = 0, limit: int = 50) -> list[AlertRead]: + alerts = await self.alert_repo.get_unacknowledged(skip=skip, limit=limit) + return [AlertRead.model_validate(a) for a in alerts] + + async def acknowledge_alert(self, alert_id: int, user_id: int) -> AlertRead: + alert = await self.alert_repo.get_by_id(alert_id) + if not alert: + raise NotFoundException("Alert not found") + alert = await self.alert_repo.update(alert, { + "is_acknowledged": True, + "acknowledged_by": user_id, + "acknowledged_at": datetime.utcnow(), + }) + return AlertRead.model_validate(alert) + + async def get_system_health(self) -> SystemHealthResponse: + active_devices = await self.device_repo.count(filters={"status": DeviceStatus.ONLINE}) + active_alerts = await self.alert_repo.count_active() + + return SystemHealthResponse( + status="ok", + mariadb="connected", + mongodb="connected", + redis="connected", + mqtt="connected", + active_devices=active_devices, + active_alerts=active_alerts, + ) diff --git a/app/services/notification_service.py b/app/services/notification_service.py new file mode 100644 index 0000000..b41a022 --- /dev/null +++ b/app/services/notification_service.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from app.communication.socketio.server import sio +from app.models.mongodb.notification import Notification + + +class NotificationService: + async def create_notification( + self, user_id: int, title: str, message: str, notification_type: str = "info" + ) -> Notification: + notification = Notification( + user_id=user_id, + title=title, + message=message, + notification_type=notification_type, + ) + await notification.insert() + + await sio.emit( + "notification", + {"title": title, "message": message, "type": notification_type}, + room=f"user:{user_id}", + namespace="/notification", + ) + + return notification + + async def get_user_notifications( + self, user_id: int, skip: int = 0, limit: int = 20, unread_only: bool = False + ) -> list[Notification]: + query: dict = {"user_id": user_id} + if unread_only: + query["is_read"] = False + return await ( + Notification.find(query) + .sort("-created_at") + .skip(skip) + .limit(limit) + .to_list() + ) + + async def mark_as_read(self, notification_id: str, user_id: int) -> None: + from datetime import datetime + + notification = await Notification.get(notification_id) + if notification and notification.user_id == user_id: + notification.is_read = True + notification.read_at = datetime.utcnow() + await notification.save() diff --git a/app/services/user_service.py b/app/services/user_service.py new file mode 100644 index 0000000..998b212 --- /dev/null +++ b/app/services/user_service.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.exceptions import ConflictException, NotFoundException +from app.core.security import hash_password +from app.models.mariadb.user import User +from app.repositories.user_repo import UserRepository +from app.schemas.user import UserCreate, UserRead, UserUpdate + + +class UserService: + def __init__(self, session: AsyncSession): + self.user_repo = UserRepository(session) + + async def get_user(self, user_id: int) -> UserRead: + user = await self.user_repo.get_with_profile(user_id) + if not user: + raise NotFoundException("User not found") + return self._to_read(user) + + async def list_users(self, skip: int = 0, limit: int = 20) -> list[UserRead]: + users = await self.user_repo.get_all(skip=skip, limit=limit) + return [self._to_read(u) for u in users] + + async def count_users(self) -> int: + return await self.user_repo.count() + + async def create_user(self, data: UserCreate) -> UserRead: + existing = await self.user_repo.get_by_email(data.email) + if existing: + raise ConflictException("Email already registered") + + user = User( + email=data.email, + hashed_password=hash_password(data.password), + role=data.role, + ) + user = await self.user_repo.create_with_profile( + user, + full_name=data.full_name, + phone=data.phone, + organization=data.organization, + ) + return self._to_read(user) + + async def update_user(self, user_id: int, data: UserUpdate) -> UserRead: + user = await self.user_repo.get_with_profile(user_id) + if not user: + raise NotFoundException("User not found") + + user_fields = {} + profile_fields = {} + + if data.is_active is not None: + user_fields["is_active"] = data.is_active + if data.role is not None: + user_fields["role"] = data.role + + for field in ("full_name", "phone", "organization", "avatar_url"): + val = getattr(data, field, None) + if val is not None: + profile_fields[field] = val + + if user_fields: + await self.user_repo.update(user, user_fields) + if profile_fields and user.profile: + for k, v in profile_fields.items(): + setattr(user.profile, k, v) + + user = await self.user_repo.get_with_profile(user_id) + return self._to_read(user) # type: ignore[arg-type] + + async def delete_user(self, user_id: int) -> None: + user = await self.user_repo.get_by_id(user_id) + if not user: + raise NotFoundException("User not found") + await self.user_repo.update(user, {"is_deleted": True}) + + @staticmethod + def _to_read(user: User) -> UserRead: + profile = user.profile + return UserRead( + id=user.id, # type: ignore[arg-type] + email=user.email, + role=user.role, + is_active=user.is_active, + is_verified=user.is_verified, + full_name=profile.full_name if profile else "", + phone=profile.phone if profile else "", + organization=profile.organization if profile else "", + avatar_url=profile.avatar_url if profile else "", + created_at=user.created_at, + ) diff --git a/app/tasks/__init__.py b/app/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/tasks/analytics_tasks.py b/app/tasks/analytics_tasks.py new file mode 100644 index 0000000..8abfe8b --- /dev/null +++ b/app/tasks/analytics_tasks.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import structlog + +from app.tasks.celery_app import celery_app + +logger = structlog.get_logger("tasks.analytics") + + +@celery_app.task(name="app.tasks.analytics_tasks.run_daily_analytics") +def run_daily_analytics() -> dict: + """Run daily aggregation analytics on telemetry data.""" + import asyncio + from datetime import datetime, timedelta + + from app.models.mongodb.analytics_result import AnalyticsResult + from app.models.mongodb.telemetry import TelemetryData + + async def _run() -> dict: + yesterday = datetime.utcnow().replace(hour=0, minute=0, second=0) - timedelta(days=1) + today = yesterday + timedelta(days=1) + + pipeline = [ + {"$match": {"timestamp": {"$gte": yesterday, "$lt": today}}}, + {"$group": { + "_id": "$device_id", + "count": {"$sum": 1}, + "avg_metrics": {"$avg": "$metrics.value"}, + }}, + ] + + collection = TelemetryData.get_motor_collection() + results = await collection.aggregate(pipeline).to_list(length=1000) + + for r in results: + await AnalyticsResult( + analysis_type="daily_telemetry", + device_id=r["_id"], + parameters={"date": yesterday.isoformat()}, + result={"count": r["count"], "avg_value": r.get("avg_metrics")}, + period_start=yesterday, + period_end=today, + ).insert() + + return {"devices_analyzed": len(results)} + + result = asyncio.get_event_loop().run_until_complete(_run()) + logger.info("daily_analytics_done", **result) + return result + + +@celery_app.task(name="app.tasks.analytics_tasks.run_device_analysis") +def run_device_analysis(device_id: str, analysis_type: str, params: dict) -> dict: + """Run on-demand analysis for a specific device.""" + logger.info("device_analysis_started", device_id=device_id, type=analysis_type) + return {"status": "completed", "device_id": device_id, "type": analysis_type} diff --git a/app/tasks/auth_tasks.py b/app/tasks/auth_tasks.py new file mode 100644 index 0000000..e2c5927 --- /dev/null +++ b/app/tasks/auth_tasks.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import structlog + +from app.tasks.celery_app import celery_app + +logger = structlog.get_logger("tasks.auth") + + +@celery_app.task(name="app.tasks.auth_tasks.cleanup_expired_tokens") +def cleanup_expired_tokens() -> dict: + """Remove expired and revoked refresh tokens from the database.""" + import asyncio + + from sqlalchemy import delete + from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + from sqlalchemy.orm import sessionmaker + from datetime import datetime + + from app.core.config import settings + from app.models.mariadb.auth import RefreshToken + + async def _cleanup() -> int: + engine = create_async_engine(settings.MARIADB_DSN) + async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + async with async_session() as session: + stmt = delete(RefreshToken).where( + (RefreshToken.expires_at < datetime.utcnow()) | (RefreshToken.is_revoked == True) # noqa: E712 + ) + result = await session.execute(stmt) + await session.commit() + count = result.rowcount # type: ignore[assignment] + + await engine.dispose() + return count + + count = asyncio.get_event_loop().run_until_complete(_cleanup()) + logger.info("tokens_cleaned", count=count) + return {"cleaned": count} + + +@celery_app.task(name="app.tasks.auth_tasks.send_verification_email") +def send_verification_email(user_id: int, email: str, token: str) -> dict: + """Send email verification link to user.""" + logger.info("verification_email_sent", user_id=user_id, email=email) + return {"status": "sent", "email": email} diff --git a/app/tasks/celery_app.py b/app/tasks/celery_app.py new file mode 100644 index 0000000..227490c --- /dev/null +++ b/app/tasks/celery_app.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from celery import Celery +from celery.schedules import crontab + +from app.core.config import settings + +celery_app = Celery( + "core_api", + broker=settings.CELERY_BROKER_URL, + backend=settings.CELERY_RESULT_BACKEND, +) + +celery_app.conf.update( + task_serializer="json", + accept_content=["json"], + result_serializer="json", + timezone="Asia/Seoul", + enable_utc=True, + task_track_started=True, + task_routes={ + "app.tasks.analytics_tasks.*": {"queue": "analytics"}, + "app.tasks.notification_tasks.*": {"queue": "notifications"}, + "app.tasks.device_tasks.*": {"queue": "devices"}, + "app.tasks.auth_tasks.*": {"queue": "default"}, + }, + beat_schedule={ + "cleanup-expired-tokens": { + "task": "app.tasks.auth_tasks.cleanup_expired_tokens", + "schedule": crontab(hour=3, minute=0), + }, + "check-device-health": { + "task": "app.tasks.device_tasks.check_device_health", + "schedule": crontab(minute="*/5"), + }, + "daily-analytics": { + "task": "app.tasks.analytics_tasks.run_daily_analytics", + "schedule": crontab(hour=1, minute=0), + }, + }, +) + +celery_app.autodiscover_tasks([ + "app.tasks.auth_tasks", + "app.tasks.device_tasks", + "app.tasks.notification_tasks", + "app.tasks.analytics_tasks", + "app.tasks.scheduled", +]) diff --git a/app/tasks/device_tasks.py b/app/tasks/device_tasks.py new file mode 100644 index 0000000..33debc9 --- /dev/null +++ b/app/tasks/device_tasks.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import structlog + +from app.tasks.celery_app import celery_app + +logger = structlog.get_logger("tasks.device") + + +@celery_app.task(name="app.tasks.device_tasks.check_device_health") +def check_device_health() -> dict: + """Check all devices for heartbeat timeout and mark offline.""" + import asyncio + from datetime import datetime, timedelta + + from sqlalchemy import select, update + from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine + from sqlalchemy.orm import sessionmaker + + from app.core.config import settings + from app.core.constants import DeviceStatus + from app.models.mariadb.device import Device + + async def _check() -> int: + engine = create_async_engine(settings.MARIADB_DSN) + async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + threshold = datetime.utcnow() - timedelta(minutes=10) + + async with async_session() as session: + stmt = ( + update(Device) + .where( + Device.status == DeviceStatus.ONLINE, + Device.last_seen_at < threshold, + Device.is_deleted == False, # noqa: E712 + ) + .values(status=DeviceStatus.OFFLINE) + ) + result = await session.execute(stmt) + await session.commit() + count = result.rowcount # type: ignore[assignment] + + await engine.dispose() + return count + + count = asyncio.get_event_loop().run_until_complete(_check()) + logger.info("device_health_check", offline_count=count) + return {"marked_offline": count} + + +@celery_app.task(name="app.tasks.device_tasks.batch_firmware_update") +def batch_firmware_update(device_uids: list[str], firmware_url: str) -> dict: + """Trigger OTA firmware update for a batch of devices.""" + import asyncio + from app.communication.mqtt.publisher import publish_ota + + async def _update() -> int: + for uid in device_uids: + await publish_ota(uid, {"url": firmware_url, "action": "update"}) + return len(device_uids) + + count = asyncio.get_event_loop().run_until_complete(_update()) + logger.info("batch_firmware_update", count=count) + return {"updated": count} diff --git a/app/tasks/notification_tasks.py b/app/tasks/notification_tasks.py new file mode 100644 index 0000000..c82b9a4 --- /dev/null +++ b/app/tasks/notification_tasks.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import structlog + +from app.tasks.celery_app import celery_app + +logger = structlog.get_logger("tasks.notification") + + +@celery_app.task(name="app.tasks.notification_tasks.send_push_notification") +def send_push_notification(user_id: int, title: str, message: str) -> dict: + """Send push notification to a user via Socket.IO.""" + import asyncio + + from app.communication.socketio.server import sio + + async def _send() -> None: + await sio.emit( + "notification", + {"title": title, "message": message}, + room=f"user:{user_id}", + namespace="/notification", + ) + + asyncio.get_event_loop().run_until_complete(_send()) + logger.info("push_sent", user_id=user_id) + return {"status": "sent", "user_id": user_id} + + +@celery_app.task(name="app.tasks.notification_tasks.send_bulk_notification") +def send_bulk_notification(user_ids: list[int], title: str, message: str) -> dict: + """Send notification to multiple users and store in MongoDB.""" + import asyncio + + from app.models.mongodb.notification import Notification + + async def _bulk() -> int: + notifications = [ + Notification(user_id=uid, title=title, message=message) + for uid in user_ids + ] + await Notification.insert_many(notifications) + return len(notifications) + + count = asyncio.get_event_loop().run_until_complete(_bulk()) + logger.info("bulk_notification_sent", count=count) + return {"sent": count} diff --git a/app/tasks/scheduled.py b/app/tasks/scheduled.py new file mode 100644 index 0000000..8c603a9 --- /dev/null +++ b/app/tasks/scheduled.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +import structlog + +from app.tasks.celery_app import celery_app + +logger = structlog.get_logger("tasks.scheduled") + + +@celery_app.task(name="app.tasks.scheduled.system_health_report") +def system_health_report() -> dict: + """Generate periodic system health report.""" + logger.info("system_health_report_generated") + return {"status": "ok"} diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/utils/datetime_utils.py b/app/utils/datetime_utils.py new file mode 100644 index 0000000..db95e6f --- /dev/null +++ b/app/utils/datetime_utils.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from datetime import datetime, timezone, timedelta + +KST = timezone(timedelta(hours=9)) + + +def now_utc() -> datetime: + return datetime.now(timezone.utc) + + +def now_kst() -> datetime: + return datetime.now(KST) + + +def utc_to_kst(dt: datetime) -> datetime: + return dt.astimezone(KST) diff --git a/app/utils/file_utils.py b/app/utils/file_utils.py new file mode 100644 index 0000000..b600bc1 --- /dev/null +++ b/app/utils/file_utils.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import os +import uuid + +from fastapi import UploadFile + +UPLOAD_DIR = "uploads" + + +async def save_upload(file: UploadFile, subdir: str = "") -> str: + directory = os.path.join(UPLOAD_DIR, subdir) if subdir else UPLOAD_DIR + os.makedirs(directory, exist_ok=True) + + ext = os.path.splitext(file.filename or "")[1] + filename = f"{uuid.uuid4().hex}{ext}" + filepath = os.path.join(directory, filename) + + content = await file.read() + with open(filepath, "wb") as f: + f.write(content) + + return filepath diff --git a/app/utils/pagination.py b/app/utils/pagination.py new file mode 100644 index 0000000..5febb13 --- /dev/null +++ b/app/utils/pagination.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from typing import TypeVar + +from app.schemas.common import PaginatedResponse + +T = TypeVar("T") + + +def paginate(items: list[T], total: int, page: int, size: int) -> PaginatedResponse[T]: + pages = (total + size - 1) // size if size > 0 else 0 + return PaginatedResponse(items=items, total=total, page=page, size=size, pages=pages) diff --git a/app/utils/validators.py b/app/utils/validators.py new file mode 100644 index 0000000..0bd8261 --- /dev/null +++ b/app/utils/validators.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +import re + + +def is_valid_device_uid(uid: str) -> bool: + return bool(re.match(r"^[a-zA-Z0-9_-]{3,100}$", uid)) + + +def is_valid_email(email: str) -> bool: + return bool(re.match(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$", email)) diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml new file mode 100644 index 0000000..caae9e5 --- /dev/null +++ b/docker-compose.prod.yml @@ -0,0 +1,69 @@ +services: + app: + build: + context: . + dockerfile: Dockerfile + ports: + - "8000:8000" + env_file: .env + depends_on: + - redis + - mosquitto + restart: always + command: uvicorn app.asgi:app --host 0.0.0.0 --port 8000 --workers 4 + + worker: + build: + context: . + dockerfile: Dockerfile.worker + env_file: .env + depends_on: + - redis + restart: always + command: celery -A app.tasks.celery_app worker --loglevel=warning --concurrency=4 + deploy: + replicas: 2 + + beat: + build: + context: . + dockerfile: Dockerfile.worker + env_file: .env + depends_on: + - redis + restart: always + command: celery -A app.tasks.celery_app beat --loglevel=warning + + flower: + build: + context: . + dockerfile: Dockerfile.worker + ports: + - "5555:5555" + env_file: .env + depends_on: + - redis + - worker + restart: always + command: celery -A app.tasks.celery_app flower --port=5555 --basic_auth=${FLOWER_USER:-admin}:${FLOWER_PASSWORD:-changeme} + + redis: + image: redis:7-alpine + ports: + - "6379:6379" + volumes: + - redis_data:/data + restart: always + + mosquitto: + image: eclipse-mosquitto:2 + ports: + - "1883:1883" + volumes: + - ./docker/mosquitto/mosquitto.conf:/mosquitto/config/mosquitto.conf + - mosquitto_data:/mosquitto/data + restart: always + +volumes: + redis_data: + mosquitto_data: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..f54d314 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,70 @@ +services: + app: + build: + context: . + dockerfile: Dockerfile + ports: + - "8000:8000" + env_file: .env + depends_on: + - redis + - mosquitto + volumes: + - .:/app + command: uvicorn app.asgi:app --host 0.0.0.0 --port 8000 --reload + + worker: + build: + context: . + dockerfile: Dockerfile.worker + env_file: .env + depends_on: + - redis + volumes: + - .:/app + command: celery -A app.tasks.celery_app worker --loglevel=info --concurrency=2 + + beat: + build: + context: . + dockerfile: Dockerfile.worker + env_file: .env + depends_on: + - redis + volumes: + - .:/app + command: celery -A app.tasks.celery_app beat --loglevel=info + + flower: + build: + context: . + dockerfile: Dockerfile.worker + ports: + - "5555:5555" + env_file: .env + depends_on: + - redis + - worker + command: celery -A app.tasks.celery_app flower --port=5555 + + redis: + image: redis:7-alpine + ports: + - "6379:6379" + volumes: + - redis_data:/data + + mosquitto: + image: eclipse-mosquitto:2 + ports: + - "1883:1883" + - "9001:9001" + volumes: + - ./docker/mosquitto/mosquitto.conf:/mosquitto/config/mosquitto.conf + - mosquitto_data:/mosquitto/data + - mosquitto_log:/mosquitto/log + +volumes: + redis_data: + mosquitto_data: + mosquitto_log: diff --git a/docker/mosquitto/mosquitto.conf b/docker/mosquitto/mosquitto.conf new file mode 100644 index 0000000..6f2b10b --- /dev/null +++ b/docker/mosquitto/mosquitto.conf @@ -0,0 +1,8 @@ +listener 1883 +allow_anonymous true +persistence true +persistence_location /mosquitto/data/ +log_dest file /mosquitto/log/mosquitto.log + +listener 9001 +protocol websockets diff --git a/docs/API_REFERENCE.md b/docs/API_REFERENCE.md new file mode 100644 index 0000000..21f2b25 --- /dev/null +++ b/docs/API_REFERENCE.md @@ -0,0 +1,564 @@ +# API 레퍼런스 + +> Base URL: `http://localhost:8000/api/v1` +> 인증: `Authorization: Bearer ` + +--- + +## 인증 (Auth) + +### POST `/auth/register` + +회원가입 후 토큰을 발급한다. + +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "securepassword", + "full_name": "홍길동" +} +``` + +**Response (201):** +```json +{ + "access_token": "eyJ...", + "refresh_token": "eyJ...", + "token_type": "bearer" +} +``` + +**에러:** +- `409` — 이미 등록된 이메일 + +--- + +### POST `/auth/login` + +이메일/비밀번호로 로그인한다. + +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "securepassword" +} +``` + +**Response (200):** +```json +{ + "access_token": "eyJ...", + "refresh_token": "eyJ...", + "token_type": "bearer" +} +``` + +**에러:** +- `401` — 잘못된 이메일/비밀번호 또는 비활성 계정 + +--- + +### POST `/auth/refresh` + +리프레시 토큰으로 새 토큰 쌍을 발급한다. 기존 리프레시 토큰은 폐기된다. + +**Request Body:** +```json +{ + "refresh_token": "eyJ..." +} +``` + +**Response (200):** +```json +{ + "access_token": "eyJ...(new)", + "refresh_token": "eyJ...(new)", + "token_type": "bearer" +} +``` + +--- + +### POST `/auth/logout` + +현재 사용자의 모든 리프레시 토큰을 폐기한다. + +**Headers:** `Authorization: Bearer ` + +**Response:** `204 No Content` + +--- + +## 사용자 (Users) + +### GET `/users/me` + +현재 로그인한 사용자의 정보를 조회한다. + +**Response (200):** +```json +{ + "id": 1, + "email": "user@example.com", + "role": "user", + "is_active": true, + "is_verified": false, + "full_name": "홍길동", + "phone": "010-1234-5678", + "organization": "ACME Corp", + "avatar_url": "", + "created_at": "2025-01-01T00:00:00" +} +``` + +--- + +### PATCH `/users/me` + +자신의 프로필 정보를 수정한다. `role`과 `is_active`는 변경할 수 없다. + +**Request Body (partial):** +```json +{ + "full_name": "김철수", + "phone": "010-9999-8888" +} +``` + +--- + +### GET `/users` + +사용자 목록을 페이징 조회한다. + +**권한:** SUPERADMIN, ADMIN + +**Query Parameters:** +| 파라미터 | 타입 | 기본값 | 설명 | +|---------|------|--------|------| +| page | int | 1 | 페이지 번호 | +| size | int | 20 | 페이지 크기 (max 100) | + +**Response (200):** +```json +{ + "items": [{ ... }], + "total": 150, + "page": 1, + "size": 20, + "pages": 8 +} +``` + +--- + +### GET `/users/{user_id}` + +특정 사용자를 조회한다. + +**권한:** SUPERADMIN, ADMIN + +--- + +### POST `/users` + +사용자를 생성한다. + +**권한:** SUPERADMIN, ADMIN + +**Request Body:** +```json +{ + "email": "new@example.com", + "password": "password123", + "role": "manager", + "full_name": "박매니저", + "phone": "010-1111-2222", + "organization": "Dev Team" +} +``` + +--- + +### PATCH `/users/{user_id}` + +사용자 정보를 수정한다. 관리자는 역할과 활성 상태도 변경할 수 있다. + +**권한:** SUPERADMIN, ADMIN + +--- + +### DELETE `/users/{user_id}` + +사용자를 소프트 삭제한다. + +**권한:** SUPERADMIN, ADMIN + +**Response:** `204 No Content` + +--- + +## 디바이스 (Devices) + +### GET `/devices` + +디바이스 목록을 페이징 조회한다. + +**Query Parameters:** +| 파라미터 | 타입 | 기본값 | 설명 | +|---------|------|--------|------| +| page | int | 1 | 페이지 번호 | +| size | int | 20 | 페이지 크기 (max 100) | + +**Response (200):** +```json +{ + "items": [ + { + "id": 1, + "device_uid": "sensor-temp-001", + "name": "1층 온도센서", + "device_type": "temperature", + "status": "online", + "firmware_version": "1.2.0", + "ip_address": "192.168.1.100", + "group_id": 1, + "owner_id": 2, + "last_seen_at": "2025-01-15T12:00:00", + "created_at": "2025-01-01T00:00:00" + } + ], + "total": 50, + "page": 1, + "size": 20, + "pages": 3 +} +``` + +--- + +### GET `/devices/{device_id}` + +특정 디바이스 상세를 조회한다. + +--- + +### POST `/devices` + +디바이스를 등록한다. + +**권한:** SUPERADMIN, ADMIN, MANAGER + +**Request Body:** +```json +{ + "device_uid": "sensor-temp-001", + "name": "1층 온도센서", + "device_type": "temperature", + "group_id": 1, + "owner_id": 2, + "firmware_version": "1.0.0" +} +``` + +**에러:** +- `409` — 이미 등록된 device_uid + +--- + +### PATCH `/devices/{device_id}` + +디바이스 정보를 수정한다. + +**권한:** SUPERADMIN, ADMIN, MANAGER + +**Request Body (partial):** +```json +{ + "name": "수정된 센서명", + "status": "maintenance" +} +``` + +--- + +### DELETE `/devices/{device_id}` + +디바이스를 소프트 삭제한다. + +**권한:** SUPERADMIN, ADMIN + +**Response:** `204 No Content` + +--- + +## 모니터링 (Monitoring) + +### GET `/monitoring/health` + +시스템 상태를 상세 조회한다. + +**권한:** SUPERADMIN, ADMIN, MANAGER + +**Response (200):** +```json +{ + "status": "ok", + "mariadb": "connected", + "mongodb": "connected", + "redis": "connected", + "mqtt": "connected", + "active_devices": 42, + "active_alerts": 3 +} +``` + +--- + +### GET `/monitoring/alerts` + +미확인 알림 목록을 조회한다. + +**권한:** SUPERADMIN, ADMIN, MANAGER + +**Query Parameters:** +| 파라미터 | 타입 | 기본값 | 설명 | +|---------|------|--------|------| +| skip | int | 0 | 건너뛸 수 | +| limit | int | 50 | 최대 개수 (max 200) | + +**Response (200):** +```json +[ + { + "id": 1, + "rule_id": 3, + "device_id": 5, + "severity": "warning", + "message": "Temperature exceeded threshold", + "is_acknowledged": false, + "acknowledged_by": null, + "acknowledged_at": null, + "created_at": "2025-01-15T10:30:00" + } +] +``` + +--- + +### POST `/monitoring/alerts/{alert_id}/acknowledge` + +알림을 확인 처리한다. + +--- + +### GET `/monitoring/alert-rules` + +알림 규칙 목록을 조회한다. + +--- + +### POST `/monitoring/alert-rules` + +알림 규칙을 생성한다. + +**Request Body:** +```json +{ + "name": "고온 경고", + "description": "온도가 40도를 초과하면 알림", + "metric": "temperature", + "condition": "gt", + "threshold": 40.0, + "severity": "warning", + "device_group_id": 1 +} +``` + +--- + +## 분석 (Analytics) + +### GET `/analytics/telemetry/{device_id}` + +특정 디바이스의 텔레메트리 데이터를 시간 간격별로 집계한다. + +**권한:** SUPERADMIN, ADMIN, MANAGER + +**Query Parameters:** +| 파라미터 | 타입 | 필수 | 설명 | +|---------|------|------|------| +| start | datetime | O | 시작 시각 (ISO 8601) | +| end | datetime | O | 종료 시각 (ISO 8601) | +| interval | string | X | 집계 간격 (기본값: `1h`) | + +**Response (200):** +```json +{ + "device_id": "sensor-temp-001", + "records": [ + {"timestamp": "2025-01-15T10:00:00", "temperature": 23.5, "humidity": 45.2}, + {"timestamp": "2025-01-15T11:00:00", "temperature": 24.1, "humidity": 44.8} + ], + "count": 2 +} +``` + +--- + +### POST `/analytics/reports/{device_id}` + +디바이스 종합 리포트를 생성한다 (상태 분석 + 추세 분석). + +**Query Parameters:** `start`, `end` (ISO 8601) + +**Response (200):** +```json +{ + "report_id": "65a1b2c3...", + "device_id": "sensor-temp-001", + "status": { + "total_events": 150, + "status_counts": {"online": 140, "offline": 10}, + "uptime_ratio": 0.9333 + }, + "trends": { + "count": 720, + "mean": 23.5, + "std": 1.2, + "min": 20.1, + "max": 28.3, + "slope": 0.0023, + "trend": "stable" + }, + "created_at": "2025-01-15T12:00:00" +} +``` + +--- + +### GET `/analytics/status/{device_id}` + +디바이스 상태 변경 이력을 분석한다. + +--- + +### GET `/analytics/trends/{device_id}` + +텔레메트리 데이터의 추세를 분석한다 (선형 회귀). + +--- + +### GET `/analytics/results` + +저장된 분석 결과를 조회한다. + +**Query Parameters:** +| 파라미터 | 타입 | 필수 | 설명 | +|---------|------|------|------| +| analysis_type | string | O | 분석 유형 | +| device_id | string | X | 디바이스 ID | +| skip | int | X | 건너뛸 수 (기본 0) | +| limit | int | X | 최대 개수 (기본 20) | + +--- + +## 시스템 (System) + +### GET `/system/health` + +서비스 헬스체크. 인증 불필요. + +**Response (200):** +```json +{ + "status": "ok", + "service": "core-api", + "version": "0.1.0" +} +``` + +--- + +### GET `/system/info` + +시스템 설정 정보를 조회한다. + +**권한:** SUPERADMIN + +--- + +## 공통 에러 응답 + +모든 에러는 다음 형식으로 반환된다: + +```json +{ + "detail": "에러 메시지" +} +``` + +| 상태 코드 | 설명 | +|----------|------| +| 401 | 인증 실패 (토큰 없음/만료/유효하지 않음) | +| 403 | 권한 부족 | +| 404 | 리소스를 찾을 수 없음 | +| 409 | 리소스 충돌 (중복) | +| 422 | 요청 데이터 검증 실패 | +| 429 | 요청 속도 제한 초과 | +| 500 | 서버 내부 오류 | + +--- + +## Socket.IO 이벤트 + +> 연결: `io("http://localhost:8000", { path: "/socket.io/" })` + +### 네임스페이스: `/monitoring` + +| 이벤트 | 방향 | 데이터 | 설명 | +|--------|------|--------|------| +| `subscribe_device` | Client → Server | `{ device_uid }` | 디바이스 모니터링 구독 | +| `unsubscribe_device` | Client → Server | `{ device_uid }` | 구독 해제 | +| `telemetry` | Server → Client | `{ device_uid, data }` | 실시간 텔레메트리 | +| `device_status` | Server → Client | `{ device_uid, status }` | 상태 변경 | + +### 네임스페이스: `/device` + +| 이벤트 | 방향 | 데이터 | 설명 | +|--------|------|--------|------| +| `send_command` | Client → Server | `{ device_uid, command }` | 디바이스 명령 전송 | +| `device_response` | Server → Client | `{ device_uid, data }` | 명령 응답 | + +### 네임스페이스: `/notification` + +| 이벤트 | 방향 | 데이터 | 설명 | +|--------|------|--------|------| +| `join_user_room` | Client → Server | `{ user_id }` | 알림 수신 등록 | +| `notification` | Server → Client | `{ title, message, type }` | 알림 푸시 | + +--- + +## MQTT 토픽 + +> 브로커: `mqtt://localhost:1883` + +### Device → Server + +| 토픽 | 페이로드 | 처리 | +|------|---------|------| +| `devices/{uid}/telemetry` | `{ temperature: 23.5, ... }` | MongoDB 저장 + Socket.IO 브로드캐스트 | +| `devices/{uid}/status` | `{ status: "online" }` | MongoDB 로그 + Socket.IO 전송 | +| `devices/{uid}/log` | `{ event_type: "...", ... }` | MongoDB 저장 | +| `devices/{uid}/response` | `{ command_id: "...", ... }` | Socket.IO 전송 | + +### Server → Device + +| 토픽 | 페이로드 | 용도 | +|------|---------|------| +| `devices/{uid}/command` | `{ action: "restart" }` | 원격 명령 | +| `devices/{uid}/config` | `{ interval: 30 }` | 설정 변경 | +| `devices/{uid}/ota` | `{ url: "...", action: "update" }` | 펌웨어 업데이트 | diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md new file mode 100644 index 0000000..cdb1dce --- /dev/null +++ b/docs/ARCHITECTURE.md @@ -0,0 +1,595 @@ +# core-api 아키텍처 문서 + +## 1. 개요 + +IoT 임베디드 디바이스 통신, 사용자/관리자 관리, 모니터링, 데이터 분석을 지원하는 FastAPI 기반 통합 백엔드 서버이다. 향후 MSA 전환을 고려한 논리적 계층 분리를 핵심 설계 원칙으로 한다. + +**주요 특징:** +- Flutter 웹/앱 프론트엔드 연동 +- MariaDB + MongoDB 듀얼 DB +- MQTT + Socket.IO 실시간 통신 +- Celery 비동기 태스크 처리 +- RBAC 기반 접근 제어 + +--- + +## 2. 계층 아키텍처 + +``` +┌─────────────────────────────────────────────────────────────┐ +│ ASGI Entry Point │ +│ Socket.IO ASGIApp (최외곽 래퍼) │ +│ app/asgi.py │ +├─────────────────────────────────────────────────────────────┤ +│ Middleware Stack │ +│ RequestIDMiddleware → RequestLoggingMiddleware → CORS │ +├─────────────────────────────────────────────────────────────┤ +│ API Layer │ +│ app/api/v1/endpoints/*.py │ +│ auth │ users │ devices │ monitoring │ analytics │ system│ +├─────────────────────────────────────────────────────────────┤ +│ Service Layer │ +│ app/services/*.py │ +│ AuthService │ UserService │ DeviceService │ ... │ +├───────────────────────┬─────────────────────────────────────┤ +│ Repository Layer │ Communication Layer │ +│ app/repositories/*.py │ app/communication/ │ +│ BaseRepository[T] │ mqtt/ │ socketio/ │ external/ │ +├───────────────────────┴─────────────────────────────────────┤ +│ DB Layer │ +│ MariaDB │ MongoDB │ Redis │ +│ app/db/mariadb.py │ mongodb.py │ redis.py │ +└─────────────────────────────────────────────────────────────┘ + │ │ + ┌───────┴───────┐ ┌────────┴────────┐ + │ Celery Tasks │ │ Data Processing │ + │ app/tasks/*.py │ │ app/processing/ │ + └────────────────┘ └─────────────────┘ +``` + +### 의존성 규칙 + +``` +API Layer → Service Layer → Repository Layer → DB Layer + │ + Communication Layer (MQTT, Socket.IO, External API) + │ + Task Layer (Celery) +``` + +- 각 계층은 **바로 아래 계층만** 의존한다. +- 도메인 간 직접 Repository 호출을 금지하고, 반드시 Service 인터페이스를 통해 통신한다. +- 이 규칙을 지키면 향후 MSA 전환 시 Service 인터페이스를 gRPC/HTTP 호출로 교체하는 것만으로 분리가 가능하다. + +--- + +## 3. 디렉토리 구조 + +``` +python-api/ +├── pyproject.toml # 의존성 + 빌드 설정 +├── .env.example # 환경변수 템플릿 +├── alembic.ini # Alembic 마이그레이션 설정 +├── Dockerfile # 앱 이미지 +├── Dockerfile.worker # Celery 워커 이미지 +├── docker-compose.yml # 개발 환경 +├── docker-compose.prod.yml # 프로덕션 오버라이드 +│ +├── alembic/ # DB 마이그레이션 +│ ├── env.py +│ ├── script.py.mako +│ └── versions/ +│ +├── scripts/ # 운영 스크립트 +│ ├── init_db.py # DB 시드 데이터 +│ ├── create_superuser.py # 관리자 계정 생성 +│ ├── run_dev.sh +│ ├── run_worker.sh +│ └── run_beat.sh +│ +├── docker/ +│ └── mosquitto/mosquitto.conf +│ +├── tests/ # 테스트 +│ ├── conftest.py +│ ├── unit/ +│ ├── integration/ +│ └── e2e/ +│ +├── docs/ # 문서 +│ +└── app/ # 애플리케이션 루트 + ├── main.py # FastAPI 앱 팩토리 + 라이프사이클 + ├── asgi.py # ASGI 엔트리포인트 + ├── core/ # 횡단 관심사 + ├── middleware/ # HTTP 미들웨어 + ├── db/ # DB 연결/세션 + ├── models/ # DB 모델 (mariadb/ + mongodb/) + ├── schemas/ # Pydantic DTO + ├── repositories/ # 데이터 접근 계층 + ├── services/ # 비즈니스 로직 + ├── api/ # API 라우터 + ├── admin/ # SQLAdmin 관리자 패널 + ├── communication/ # MQTT, Socket.IO, 외부 API + ├── tasks/ # Celery 비동기 태스크 + ├── processing/ # 데이터 분석 파이프라인 + └── utils/ # 범용 유틸리티 +``` + +--- + +## 4. 기술 스택 + +| 카테고리 | 기술 | 용도 | +|---------|------|------| +| **Web Framework** | FastAPI + Uvicorn | ASGI 비동기 웹 서버 | +| **MariaDB ORM** | SQLModel + SQLAlchemy(async) + aiomysql | 관계형 DB 비동기 ORM | +| **Migration** | Alembic + pymysql | DB 스키마 마이그레이션 | +| **MongoDB ODM** | Beanie + Motor | 문서 DB 비동기 ODM | +| **Cache/Queue** | Redis (hiredis) | 캐싱 + Celery 브로커/백엔드 | +| **Auth** | python-jose + passlib[bcrypt] | JWT 토큰 + 비밀번호 해싱 | +| **MQTT** | fastapi-mqtt | IoT 디바이스 양방향 통신 | +| **WebSocket** | python-socketio | 실시간 프론트엔드 푸시 | +| **Background** | Celery + Flower | 비동기 태스크 + 모니터링 | +| **Admin** | SQLAdmin | 관리자 대시보드 | +| **Data** | Polars + Pandas + NumPy | 데이터 분석/집계/통계 | +| **Logging** | structlog | 구조화 로깅 (JSON/Console) | +| **Test** | pytest + pytest-asyncio + factory-boy | 테스트 프레임워크 | +| **Lint** | Ruff + mypy + pre-commit | 코드 품질 | + +--- + +## 5. 데이터 모델 + +### 5.1 MariaDB (관계형 — SQLModel) + +``` +┌──────────────┐ 1:1 ┌──────────────┐ +│ User │────────────▶│ UserProfile │ +│──────────────│ │──────────────│ +│ id (PK) │ │ user_id (FK) │ +│ email │ │ full_name │ +│ hashed_pwd │ │ phone │ +│ role │ │ organization │ +│ is_active │ │ avatar_url │ +│ is_verified │ └───────────────┘ +│ last_login_at│ +└──────┬───────┘ + │ 1:N + ▼ +┌──────────────┐ ┌───────────────┐ +│ RefreshToken │ │ OAuthAccount │ +│──────────────│ │───────────────│ +│ user_id (FK) │ │ user_id (FK) │ +│ token │ │ provider │ +│ expires_at │ │ provider_uid │ +│ is_revoked │ │ access_token │ +└──────────────┘ └───────────────┘ + +┌──────────────┐ N:1 ┌──────────────┐ +│ Device │────────────▶│ DeviceGroup │ +│──────────────│ │──────────────│ +│ device_uid │ │ name │ +│ name │ │ description │ +│ device_type │ └──────────────┘ +│ status │ +│ firmware_ver │ +│ ip_address │ +│ owner_id(FK) │ +│ last_seen_at │ +└──────────────┘ + +┌──────────────┐ ┌──────────────┐ +│ AlertRule │ │ Alert │ +│──────────────│ │──────────────│ +│ metric │◀────────────│ rule_id (FK) │ +│ condition │ │ device_id(FK)│ +│ threshold │ │ severity │ +│ severity │ │ message │ +│ is_enabled │ │ is_ack │ +└──────────────┘ └──────────────┘ + +┌──────────────┐ ┌──────────────┐ +│ SystemConfig │ │ AuditLog │ +│──────────────│ │──────────────│ +│ key (unique) │ │ user_id (FK) │ +│ value │ │ action │ +│ is_secret │ │ resource_type│ +└──────────────┘ │ details │ + └──────────────┘ +``` + +**공통 Mixin:** +- `TimestampMixin` — `created_at`, `updated_at` (서버 기본값 + 자동 갱신) +- `SoftDeleteMixin` — `is_deleted`, `deleted_at` (논리 삭제) + +### 5.2 MongoDB (문서형 — Beanie) + +| Collection | 주요 필드 | 인덱스 | 비고 | +|-----------|----------|--------|------| +| `device_logs` | device_id, event_type, payload, timestamp | device_id, event_type, timestamp(desc) | TTL: 90일 | +| `telemetry_data` | device_id, metrics(dict), timestamp | device_id, timestamp(desc), 복합(device_id+timestamp) | 시계열 | +| `analytics_results` | analysis_type, parameters, result, device_id, period | analysis_type, device_id, created_at(desc) | 분석 결과 | +| `notifications` | user_id, title, message, type, is_read | user_id, 복합(user_id+is_read), created_at(desc) | 사용자 알림 | + +--- + +## 6. API 엔드포인트 + +### 6.1 인증 (`/api/v1/auth`) + +| Method | Path | 설명 | 인증 | +|--------|------|------|------| +| POST | `/register` | 회원가입 → 토큰 반환 | - | +| POST | `/login` | 로그인 → 토큰 반환 | - | +| POST | `/refresh` | 리프레시 토큰으로 갱신 | - | +| POST | `/logout` | 모든 리프레시 토큰 폐기 | Bearer | + +### 6.2 사용자 (`/api/v1/users`) + +| Method | Path | 설명 | 권한 | +|--------|------|------|------| +| GET | `/me` | 내 정보 조회 | 인증됨 | +| PATCH | `/me` | 내 프로필 수정 | 인증됨 | +| GET | `/` | 사용자 목록 (페이징) | SUPERADMIN, ADMIN | +| GET | `/{user_id}` | 사용자 상세 | SUPERADMIN, ADMIN | +| POST | `/` | 사용자 생성 | SUPERADMIN, ADMIN | +| PATCH | `/{user_id}` | 사용자 수정 | SUPERADMIN, ADMIN | +| DELETE | `/{user_id}` | 사용자 삭제 (소프트) | SUPERADMIN, ADMIN | + +### 6.3 디바이스 (`/api/v1/devices`) + +| Method | Path | 설명 | 권한 | +|--------|------|------|------| +| GET | `/` | 디바이스 목록 (페이징) | 인증됨 | +| GET | `/{device_id}` | 디바이스 상세 | 인증됨 | +| POST | `/` | 디바이스 등록 | SUPERADMIN, ADMIN, MANAGER | +| PATCH | `/{device_id}` | 디바이스 수정 | SUPERADMIN, ADMIN, MANAGER | +| DELETE | `/{device_id}` | 디바이스 삭제 (소프트) | SUPERADMIN, ADMIN | + +### 6.4 모니터링 (`/api/v1/monitoring`) + +| Method | Path | 설명 | 권한 | +|--------|------|------|------| +| GET | `/health` | 시스템 상태 상세 | MANAGEMENT | +| GET | `/alerts` | 미확인 알림 목록 | MANAGEMENT | +| POST | `/alerts/{id}/acknowledge` | 알림 확인 처리 | 인증됨 | +| GET | `/alert-rules` | 알림 규칙 목록 | MANAGEMENT | +| POST | `/alert-rules` | 알림 규칙 생성 | 인증됨 | + +### 6.5 분석 (`/api/v1/analytics`) + +| Method | Path | 설명 | 권한 | +|--------|------|------|------| +| GET | `/telemetry/{device_id}` | 텔레메트리 집계 | MANAGEMENT | +| POST | `/reports/{device_id}` | 종합 리포트 생성 | MANAGEMENT | +| GET | `/status/{device_id}` | 디바이스 상태 분석 | MANAGEMENT | +| GET | `/trends/{device_id}` | 추세 분석 | MANAGEMENT | +| GET | `/results` | 분석 결과 조회 | MANAGEMENT | + +### 6.6 시스템 (`/api/v1/system`) + +| Method | Path | 설명 | 권한 | +|--------|------|------|------| +| GET | `/health` | 헬스체크 | - | +| GET | `/info` | 시스템 정보 | SUPERADMIN | + +--- + +## 7. 인증/인가 체계 + +### 7.1 JWT 토큰 흐름 + +``` +클라이언트 서버 + │ │ + │─── POST /auth/login ──────────▶│ + │ {email, password} │ + │ │── 비밀번호 검증 + │ │── Access Token 생성 (30분) + │ │── Refresh Token 생성 (7일) + │ │── Refresh Token DB 저장 + │◀── {access_token, │ + │ refresh_token} ────────────│ + │ │ + │─── GET /api/v1/users/me ──────▶│ + │ Authorization: Bearer {AT} │── decode_token() + │ │── get_current_user_payload() + │◀── {user data} ───────────────│ + │ │ + │─── POST /auth/refresh ────────▶│ + │ {refresh_token} │── 기존 RT 폐기 + │ │── 새 AT + RT 발급 + │◀── {new tokens} ──────────────│ +``` + +### 7.2 역할 기반 접근 제어 (RBAC) + +``` +SUPERADMIN (4) ─── 전체 시스템 관리, 시스템 설정 + │ + ADMIN (3) ─── 사용자/디바이스 관리, 삭제 권한 + │ + MANAGER (2) ─── 디바이스 등록/수정, 모니터링, 분석 + │ + USER (1) ─── 자기 프로필, 디바이스 조회 + │ + DEVICE (0) ─── 디바이스 전용 (MQTT 인증) +``` + +- `require_role(*roles)` — 허용된 역할만 접근 가능한 Dependency +- `can_manage_user(actor_role, target_role)` — 상위 역할만 하위 역할 관리 가능 + +--- + +## 8. 실시간 통신 + +### 8.1 MQTT (IoT 디바이스 ↔ 서버) + +``` +┌──────────┐ ┌──────────────┐ ┌──────────┐ +│ Device │──MQTT───▶│ Mosquitto │──────────▶│ Server │ +│ │◀──MQTT───│ (Broker) │◀──────────│ │ +└──────────┘ └──────────────┘ └──────────┘ +``` + +**토픽 구조:** + +| 방향 | 토픽 패턴 | 용도 | +|------|----------|------| +| Device → Server | `devices/{uid}/telemetry` | 센서 데이터 전송 | +| Device → Server | `devices/{uid}/status` | 상태 변경 알림 | +| Device → Server | `devices/{uid}/log` | 이벤트 로그 | +| Device → Server | `devices/{uid}/response` | 명령 응답 | +| Server → Device | `devices/{uid}/command` | 원격 명령 | +| Server → Device | `devices/{uid}/config` | 설정 변경 | +| Server → Device | `devices/{uid}/ota` | 펌웨어 업데이트 | + +**데이터 파이프라인:** +``` +MQTT 수신 → JSON 파싱 → MongoDB 저장 → Socket.IO 브로드캐스트 +``` + +### 8.2 Socket.IO (서버 → 프론트엔드) + +| 네임스페이스 | 이벤트 | 설명 | +|------------|--------|------| +| `/monitoring` | `telemetry` | 실시간 텔레메트리 데이터 | +| `/monitoring` | `device_status` | 디바이스 상태 변경 | +| `/device` | `device_response` | 디바이스 명령 응답 | +| `/device` | `send_command` | 클라이언트→디바이스 명령 전달 | +| `/notification` | `notification` | 사용자 알림 푸시 | + +**Room 기반 구독:** +- `device:{device_uid}` — 특정 디바이스 모니터링 +- `user:{user_id}` — 특정 사용자 알림 + +--- + +## 9. 비동기 태스크 (Celery) + +### 9.1 큐 구성 + +``` +┌─────────────┐ +│ Redis │ ← Broker (redis://...6379/1) +│ (Broker) │ ← Result Backend (redis://...6379/2) +└──────┬──────┘ + │ + ┌────┴────────────────────────────────┐ + │ Celery Workers │ + ├─────────┬───────────┬───────────────┤ + │ default │ analytics │ notifications │ devices │ + │ queue │ queue │ queue │ queue │ + ├─────────┼───────────┼───────────────┤ + │ 토큰정리 │ 일간분석 │ 푸시알림 │ 헬스체크 │ + │ 이메일 │ 디바이스분석 │ 대량알림 │ OTA배치 │ + └─────────┴───────────┴───────────────┘ +``` + +### 9.2 Beat 스케줄 (정기 실행) + +| 태스크 | 스케줄 | 큐 | 설명 | +|--------|--------|-----|------| +| `cleanup_expired_tokens` | 매일 03:00 | default | 만료/폐기된 리프레시 토큰 삭제 | +| `check_device_health` | 5분마다 | devices | 10분 이상 미응답 디바이스를 OFFLINE 처리 | +| `run_daily_analytics` | 매일 01:00 | analytics | 전일 텔레메트리 데이터 집계 및 저장 | + +--- + +## 10. 미들웨어 파이프라인 + +요청은 아래 순서로 미들웨어를 통과한다: + +``` +요청 ──▶ RequestIDMiddleware ──▶ RequestLoggingMiddleware ──▶ CORS ──▶ FastAPI Router + │ │ + │ X-Request-ID 생성/전파 │ method, path, status, elapsed_ms 로깅 + │ structlog 컨텍스트 바인딩 │ + ▼ ▼ +응답 ◀── X-Request-ID 헤더 추가 ◀── 로그 기록 ◀────────────────────── 핸들러 응답 +``` + +**RateLimitMiddleware** (선택 사용): +- Redis 기반 IP별 속도 제한 +- 기본값: 100 요청/60초 +- `/docs`, `/redoc` 경로 제외 + +--- + +## 11. 데이터 분석 파이프라인 + +``` +┌─────────────────┐ ┌──────────────────┐ ┌────────────────┐ +│ TelemetryData │────▶│ Polars/Pandas │────▶│ AnalyticsResult│ +│ (MongoDB) │ │ Processing │ │ (MongoDB) │ +└─────────────────┘ └──────────────────┘ └────────────────┘ + │ + ┌─────────┴──────────┐ + │ │ + ┌──────┴──────┐ ┌──────┴──────┐ + │ Aggregation │ │ Analysis │ + │ (Polars) │ │ (NumPy) │ + │ │ │ │ + │ - resample │ │ - trend │ + │ - group_by │ │ - anomaly │ + │ - mean/sum │ │ - percentile│ + └─────────────┘ └─────────────┘ +``` + +| 모듈 | 기능 | +|------|------| +| `telemetry_pipeline` | 시간 간격별 텔레메트리 집계 (Polars group_by_dynamic) | +| `report_pipeline` | 디바이스 종합 리포트 생성 (상태 + 추세) | +| `device_analyzer` | 디바이스 상태 변경 분석, 가동률 계산 | +| `trend_analyzer` | 선형 회귀 기반 추세 분석 (NumPy polyfit) | +| `statistics` | 이동평균, Z-score 이상치 탐지, 백분위 통계 | + +--- + +## 12. MSA 전환 전략 + +현재 모놀리식 구조에서 각 도메인을 독립 서비스로 분리할 수 있는 논리적 경계가 이미 설정되어 있다. + +| 도메인 | 현재 패키지 | 향후 MSA 서비스 | 주 DB | +|--------|------------|----------------|-------| +| auth | auth_service + auth_repo | Auth Service | MariaDB + Redis | +| users | user_service + user_repo | User Service | MariaDB | +| devices | device_service + device_repo | Device Service | MariaDB + MongoDB | +| monitoring | monitoring_service + monitoring_repo | Monitoring Service | MongoDB + Redis | +| analytics | analytics_service + analytics_repo | Analytics Service | MongoDB | +| notification | notification_service + socketio | Notification Service | Redis + MongoDB | + +**전환 시 변경점:** +1. Service 간 호출을 HTTP/gRPC 클라이언트로 교체 +2. 각 서비스별 독립 DB 인스턴스 할당 +3. 이벤트 버스(Kafka/RabbitMQ)로 서비스 간 비동기 통신 +4. API Gateway 도입 (인증/라우팅 통합) + +--- + +## 13. 배포 구성 + +### 13.1 Docker 서비스 + +``` + ┌─────────────┐ + │ Mosquitto │:1883 (MQTT) + └──────┬──────┘ + │ +┌──────────┐ ┌──────┴──────┐ ┌──────────┐ +│ Redis │:6379 ◀─│ App │:8000 ──▶│ MariaDB │ (호스트) +└────┬─────┘ │ (FastAPI) │ └──────────┘ + │ └──────┬──────┘ ┌──────────┐ + │ │ ────▶│ MongoDB │ (호스트) + │ ┌──────┴──────┐ └──────────┘ + ├──────────────│ Worker │ + │ │ (Celery) │ + │ └─────────────┘ + │ ┌─────────────┐ + ├──────────────│ Beat │ + │ │ (Scheduler) │ + │ └─────────────┘ + │ ┌─────────────┐ + └──────────────│ Flower │:5555 (모니터링) + └─────────────┘ +``` + +- MariaDB/MongoDB는 **호스트에 직접 설치** (Docker 외부) +- App, Worker, Beat, Flower, Redis, Mosquitto는 **Docker 컨테이너**로 관리 + +### 13.2 환경별 차이 + +| 항목 | 개발 (docker-compose.yml) | 프로덕션 (docker-compose.prod.yml) | +|------|--------------------------|-----------------------------------| +| App 워커 수 | 1 (reload 모드) | 4 | +| Celery 동시성 | 2 | 4 | +| Worker 레플리카 | 1 | 2 | +| 로그 레벨 | info | warning | +| Flower 인증 | 없음 | basic_auth | +| 볼륨 마운트 | 소스 코드 (.:/app) | 이미지 내장 | + +--- + +## 14. 실행 방법 + +### 개발 환경 + +```bash +# 1. 환경변수 설정 +cp .env.example .env +# .env 파일에서 DB 접속 정보 등 수정 + +# 2. 의존성 설치 +pip install -e ".[dev]" + +# 3. DB 마이그레이션 +alembic upgrade head + +# 4. 시드 데이터 +python -m scripts.init_db + +# 5. 슈퍼관리자 생성 +python -m scripts.create_superuser admin@example.com password123 + +# 6. 인프라 (Redis + Mosquitto) +docker-compose up -d redis mosquitto + +# 7. 앱 서버 실행 +uvicorn app.asgi:app --reload + +# 8. Celery 워커 (별도 터미널) +celery -A app.tasks.celery_app worker --loglevel=info -Q default,analytics,notifications,devices + +# 9. Celery 스케줄러 (별도 터미널) +celery -A app.tasks.celery_app beat --loglevel=info +``` + +### Docker 전체 스택 + +```bash +# 개발 +docker-compose up -d + +# 프로덕션 +docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d +``` + +### 검증 + +```bash +# 헬스체크 +curl http://localhost:8000/api/v1/system/health + +# Swagger UI +open http://localhost:8000/docs + +# Flower (Celery 모니터링) +open http://localhost:5555 + +# SQLAdmin (관리자) +open http://localhost:8000/admin +``` + +--- + +## 15. 테스트 + +```bash +# 전체 테스트 +pytest tests/ -v + +# 커버리지 +pytest tests/ -v --cov=app --cov-report=html + +# 단위 테스트만 +pytest tests/unit/ -v + +# 통합 테스트만 +pytest tests/integration/ -v +``` + +**테스트 구조:** +- `tests/unit/` — 순수 함수 테스트 (보안, 검증, 통계, 권한) +- `tests/integration/` — API 엔드포인트 + DB 연동 테스트 +- `tests/e2e/` — 전체 CRUD 플로우 테스트 diff --git a/docs/DATABASE.md b/docs/DATABASE.md new file mode 100644 index 0000000..e697af0 --- /dev/null +++ b/docs/DATABASE.md @@ -0,0 +1,338 @@ +# 데이터베이스 설계 문서 + +## 1. 개요 + +| DB | 용도 | 연결 방식 | +|----|------|----------| +| **MariaDB** | 사용자, 인증, 디바이스, 알림 규칙, 시스템 설정 | SQLModel + SQLAlchemy (async, aiomysql) | +| **MongoDB** | 디바이스 로그, 텔레메트리, 분석 결과, 알림 | Beanie + Motor (async) | +| **Redis** | 캐싱, 세션, 속도 제한, Celery 브로커 | redis-py (async, hiredis) | + +--- + +## 2. MariaDB 스키마 + +### 2.1 users + +사용자 계정 정보. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK, AUTO_INCREMENT | | +| email | VARCHAR(255) | UNIQUE, INDEX | 로그인 ID | +| hashed_password | VARCHAR(255) | NOT NULL | bcrypt 해시 | +| role | VARCHAR(20) | DEFAULT 'user' | superadmin/admin/manager/user/device | +| is_active | BOOLEAN | DEFAULT TRUE | 계정 활성 여부 | +| is_verified | BOOLEAN | DEFAULT FALSE | 이메일 인증 여부 | +| last_login_at | DATETIME | NULLABLE | 마지막 로그인 | +| is_deleted | BOOLEAN | DEFAULT FALSE | 소프트 삭제 | +| deleted_at | DATETIME | NULLABLE | 삭제 시각 | +| created_at | DATETIME | server_default=NOW() | | +| updated_at | DATETIME | onupdate=NOW() | | + +### 2.2 user_profiles + +사용자 프로필 (1:1). + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| user_id | INT | FK→users.id, UNIQUE | | +| full_name | VARCHAR(100) | | 이름 | +| phone | VARCHAR(20) | | 전화번호 | +| organization | VARCHAR(100) | | 소속 | +| avatar_url | VARCHAR(500) | | 프로필 이미지 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.3 refresh_tokens + +리프레시 토큰 저장소. 토큰 순환(rotation) 방식 사용. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| user_id | INT | FK→users.id, INDEX | | +| token | VARCHAR(500) | UNIQUE, INDEX | JWT 리프레시 토큰 | +| expires_at | DATETIME | NOT NULL | 만료 시각 | +| is_revoked | BOOLEAN | DEFAULT FALSE | 폐기 여부 | +| device_info | VARCHAR(255) | | 접속 디바이스 정보 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.4 oauth_accounts + +소셜 로그인 연동 계정. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| user_id | INT | FK→users.id, INDEX | | +| provider | VARCHAR(50) | | google/kakao/naver | +| provider_user_id | VARCHAR(255) | | 제공자 사용자 ID | +| access_token | VARCHAR(500) | | OAuth 액세스 토큰 | +| refresh_token | VARCHAR(500) | | OAuth 리프레시 토큰 | +| expires_at | DATETIME | NULLABLE | 토큰 만료 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.5 devices + +IoT 디바이스 정보. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| device_uid | VARCHAR(100) | UNIQUE, INDEX | 디바이스 고유 식별자 | +| name | VARCHAR(100) | | 디바이스 이름 | +| device_type | VARCHAR(50) | | 센서 유형 (temperature, humidity 등) | +| status | VARCHAR(20) | DEFAULT 'offline' | online/offline/error/maintenance | +| firmware_version | VARCHAR(50) | | 펌웨어 버전 | +| ip_address | VARCHAR(45) | | IPv4/IPv6 | +| group_id | INT | FK→device_groups.id, NULLABLE | | +| owner_id | INT | FK→users.id, NULLABLE | | +| last_seen_at | DATETIME | NULLABLE | 마지막 통신 시각 | +| metadata_json | VARCHAR(2000) | DEFAULT '{}' | 추가 메타데이터 | +| is_deleted | BOOLEAN | DEFAULT FALSE | | +| deleted_at | DATETIME | NULLABLE | | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.6 device_groups + +디바이스 그룹 (논리적 분류). + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| name | VARCHAR(100) | UNIQUE | 그룹명 | +| description | VARCHAR(500) | | 설명 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.7 alert_rules + +알림 발생 조건 규칙. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| name | VARCHAR(100) | | 규칙명 | +| description | VARCHAR(500) | | 설명 | +| metric | VARCHAR(100) | | 감시 메트릭 (temperature, humidity 등) | +| condition | VARCHAR(50) | | 조건 (gt, lt, eq, gte, lte) | +| threshold | FLOAT | | 임계값 | +| severity | VARCHAR(20) | DEFAULT 'warning' | critical/warning/info | +| is_enabled | BOOLEAN | DEFAULT TRUE | | +| device_group_id | INT | FK→device_groups.id, NULLABLE | 대상 그룹 | +| created_by | INT | FK→users.id, NULLABLE | 생성자 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.8 alerts + +발생한 알림 이력. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| rule_id | INT | FK→alert_rules.id, NULLABLE | 원인 규칙 | +| device_id | INT | FK→devices.id, NULLABLE | 대상 디바이스 | +| severity | VARCHAR(20) | | critical/warning/info | +| message | VARCHAR(500) | | 알림 메시지 | +| is_acknowledged | BOOLEAN | DEFAULT FALSE | 확인 여부 | +| acknowledged_by | INT | FK→users.id, NULLABLE | 확인한 사용자 | +| acknowledged_at | DATETIME | NULLABLE | 확인 시각 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.9 system_configs + +시스템 설정 키-값 저장소. + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| key | VARCHAR(100) | UNIQUE, INDEX | 설정 키 | +| value | VARCHAR(2000) | | 설정 값 | +| description | VARCHAR(500) | | 설명 | +| is_secret | BOOLEAN | DEFAULT FALSE | 비밀 값 여부 | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +### 2.10 audit_logs + +감사 로그 (변경 이력 추적). + +| 컬럼 | 타입 | 제약 | 설명 | +|------|------|------|------| +| id | INT | PK | | +| user_id | INT | FK→users.id, NULLABLE | 행위자 | +| action | VARCHAR(100) | | 액션 (create, update, delete, login 등) | +| resource_type | VARCHAR(50) | | 대상 리소스 타입 | +| resource_id | VARCHAR(50) | | 대상 리소스 ID | +| details | VARCHAR(2000) | DEFAULT '{}' | 변경 상세 (JSON) | +| ip_address | VARCHAR(45) | | 요청 IP | +| created_at | DATETIME | | | +| updated_at | DATETIME | | | + +--- + +## 3. MariaDB ER 다이어그램 + +``` +users ──────────┬──── 1:1 ──── user_profiles + │ + ├──── 1:N ──── refresh_tokens + │ + ├──── 1:N ──── oauth_accounts + │ + ├──── 1:N ──── devices (owner_id) + │ + ├──── 1:N ──── alert_rules (created_by) + │ + ├──── 1:N ──── alerts (acknowledged_by) + │ + └──── 1:N ──── audit_logs (user_id) + +device_groups ──┬──── 1:N ──── devices (group_id) + │ + └──── 1:N ──── alert_rules (device_group_id) + +alert_rules ────┬──── 1:N ──── alerts (rule_id) + +devices ────────┴──── 1:N ──── alerts (device_id) +``` + +--- + +## 4. MongoDB 컬렉션 + +### 4.1 device_logs + +디바이스 이벤트 로그. TTL 인덱스로 90일 후 자동 삭제. + +```json +{ + "_id": "ObjectId", + "device_id": "sensor-temp-001", + "event_type": "status_change", + "payload": { + "status": "online", + "reason": "boot" + }, + "ip_address": "192.168.1.100", + "timestamp": "2025-01-15T12:00:00Z" +} +``` + +**인덱스:** +- `device_id` (단일) +- `event_type` (단일) +- `timestamp` (내림차순) + +### 4.2 telemetry_data + +디바이스 센서 측정 데이터 (시계열). + +```json +{ + "_id": "ObjectId", + "device_id": "sensor-temp-001", + "metrics": { + "temperature": 23.5, + "humidity": 45.2, + "pressure": 1013.25 + }, + "timestamp": "2025-01-15T12:05:00Z" +} +``` + +**인덱스:** +- `device_id` (단일) +- `timestamp` (내림차순) +- `(device_id, timestamp)` (복합, 범위 쿼리 최적화) + +### 4.3 analytics_results + +분석 수행 결과 저장. + +```json +{ + "_id": "ObjectId", + "analysis_type": "daily_telemetry", + "parameters": { + "date": "2025-01-14" + }, + "result": { + "count": 1440, + "avg_value": 23.8 + }, + "device_id": "sensor-temp-001", + "period_start": "2025-01-14T00:00:00Z", + "period_end": "2025-01-15T00:00:00Z", + "created_at": "2025-01-15T01:05:00Z" +} +``` + +**인덱스:** +- `analysis_type` (단일) +- `device_id` (단일) +- `created_at` (내림차순) + +### 4.4 notifications + +사용자별 알림 메시지. + +```json +{ + "_id": "ObjectId", + "user_id": 1, + "title": "디바이스 오프라인 알림", + "message": "sensor-temp-001이 오프라인 상태입니다.", + "notification_type": "warning", + "is_read": false, + "read_at": null, + "created_at": "2025-01-15T12:10:00Z" +} +``` + +**인덱스:** +- `user_id` (단일) +- `(user_id, is_read)` (복합, 읽지 않은 알림 조회) +- `created_at` (내림차순) + +--- + +## 5. Redis 사용 패턴 + +| 용도 | 키 패턴 | TTL | 설명 | +|------|---------|-----|------| +| 속도 제한 | `rate_limit:{ip}` | 60초 | IP별 요청 카운터 | +| Celery 브로커 | redis://...6379/1 | - | 태스크 큐 | +| Celery 결과 | redis://...6379/2 | - | 태스크 결과 저장 | + +--- + +## 6. 마이그레이션 + +Alembic으로 MariaDB 스키마를 관리한다. + +```bash +# 마이그레이션 생성 +alembic revision --autogenerate -m "description" + +# 마이그레이션 적용 +alembic upgrade head + +# 롤백 +alembic downgrade -1 + +# 현재 리비전 확인 +alembic current + +# 히스토리 확인 +alembic history +``` + +MongoDB는 스키마리스이므로 별도 마이그레이션이 불필요하다. 인덱스는 Beanie 모델의 `Settings.indexes`로 앱 시작 시 자동 생성된다. diff --git a/docs/DEPLOYMENT.md b/docs/DEPLOYMENT.md new file mode 100644 index 0000000..6881f2c --- /dev/null +++ b/docs/DEPLOYMENT.md @@ -0,0 +1,260 @@ +# 배포 가이드 + +## 1. 전제 조건 + +### 호스트에 직접 설치 (Docker 외부) +- **MariaDB** 10.6+ — 관계형 데이터 저장 +- **MongoDB** 7.0+ — 문서 데이터 저장 + +### Docker로 관리 +- **Docker** 24.0+ +- **Docker Compose** v2 + +--- + +## 2. 최초 설정 + +### 2.1 환경변수 + +```bash +cp .env.example .env +``` + +`.env` 파일에서 반드시 변경해야 할 항목: + +```bash +# 보안 키 (반드시 변경) +SECRET_KEY=<랜덤 문자열> +JWT_SECRET_KEY=<랜덤 문자열> + +# MariaDB 접속 정보 +MARIADB_HOST=127.0.0.1 +MARIADB_PORT=3306 +MARIADB_USER=core_api_user +MARIADB_PASSWORD=<강력한 비밀번호> +MARIADB_DATABASE=core_api + +# MongoDB 접속 정보 +MONGODB_URL=mongodb://127.0.0.1:27017 +MONGODB_DATABASE=core_api +``` + +### 2.2 MariaDB 준비 + +```sql +CREATE DATABASE core_api CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; +CREATE USER 'core_api_user'@'%' IDENTIFIED BY 'your_password'; +GRANT ALL PRIVILEGES ON core_api.* TO 'core_api_user'@'%'; +FLUSH PRIVILEGES; +``` + +### 2.3 MongoDB 준비 + +```javascript +use core_api +db.createUser({ + user: "core_api_user", + pwd: "your_password", + roles: [{ role: "readWrite", db: "core_api" }] +}) +``` + +--- + +## 3. 개발 환경 + +### 3.1 로컬 실행 + +```bash +# 의존성 설치 +pip install -e ".[dev]" + +# DB 마이그레이션 +alembic upgrade head + +# 시드 데이터 +python -m scripts.init_db + +# 관리자 계정 생성 +python -m scripts.create_superuser admin@example.com password123 + +# 인프라 시작 (Redis + Mosquitto) +docker-compose up -d redis mosquitto + +# 앱 서버 +uvicorn app.asgi:app --reload --host 0.0.0.0 --port 8000 + +# Celery 워커 (별도 터미널) +celery -A app.tasks.celery_app worker --loglevel=info \ + -Q default,analytics,notifications,devices + +# Celery 스케줄러 (별도 터미널) +celery -A app.tasks.celery_app beat --loglevel=info +``` + +### 3.2 Docker 전체 스택 + +```bash +docker-compose up -d +``` + +서비스 확인: +- API: http://localhost:8000/docs +- Flower: http://localhost:5555 + +--- + +## 4. 프로덕션 배포 + +### 4.1 Docker Compose + +```bash +docker-compose -f docker-compose.yml -f docker-compose.prod.yml up -d +``` + +프로덕션 차이점: +- Uvicorn 워커 4개 (`--workers 4`) +- Celery 동시성 4, Worker 레플리카 2 +- Flower 기본 인증 활성화 +- 소스 코드 볼륨 마운트 없음 (이미지 내장) +- 로그 레벨 `warning` + +### 4.2 환경변수 (프로덕션 추가) + +```bash +APP_ENV=production +DEBUG=false +LOG_LEVEL=WARNING + +# Flower 인증 +FLOWER_USER=admin +FLOWER_PASSWORD=<강력한 비밀번호> + +# CORS (실제 도메인) +CORS_ORIGINS=["https://your-domain.com"] +``` + +### 4.3 리버스 프록시 (Nginx 예시) + +```nginx +upstream core_api { + server 127.0.0.1:8000; +} + +server { + listen 80; + server_name api.your-domain.com; + + location / { + proxy_pass http://core_api; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /socket.io/ { + proxy_pass http://core_api; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + } +} +``` + +--- + +## 5. 서비스 구성 + +``` +┌────────────────────────────────────────────────────┐ +│ Docker Compose │ +│ │ +│ ┌─────────┐ ┌─────────┐ ┌────────┐ ┌───────┐ │ +│ │ App │ │ Worker │ │ Beat │ │Flower │ │ +│ │ :8000 │ │ (x2) │ │ │ │ :5555 │ │ +│ └────┬────┘ └────┬────┘ └───┬────┘ └───┬───┘ │ +│ │ │ │ │ │ +│ ┌────┴────┐ ┌────┴──────────┴────────────┘ │ +│ │ Redis │ │ │ +│ │ :6379 │ │ │ +│ └─────────┘ │ │ +│ ┌────────────┘ │ +│ │ Mosquitto │ │ +│ │ :1883 │ │ +│ └───────────┘ │ +└────────────────────────────────────────────────────┘ + │ │ + ┌────┴────┐ ┌───┴────┐ + │ MariaDB │ │MongoDB │ + │ (호스트) │ │(호스트) │ + └─────────┘ └────────┘ +``` + +--- + +## 6. 헬스체크 + +```bash +# 기본 헬스체크 +curl http://localhost:8000/api/v1/system/health +# → {"status": "ok", "service": "core-api", "version": "0.1.0"} + +# 상세 시스템 상태 (인증 필요) +curl -H "Authorization: Bearer " \ + http://localhost:8000/api/v1/monitoring/health + +# Docker 서비스 상태 +docker-compose ps + +# Celery 워커 상태 +docker-compose logs worker + +# Celery 활성 태스크 +docker-compose exec worker celery -A app.tasks.celery_app inspect active +``` + +--- + +## 7. 로그 + +### 개발 환경 +- structlog 콘솔 렌더러 (사람이 읽기 좋은 형식) +- 로그 레벨: DEBUG + +### 프로덕션 환경 +- structlog JSON 렌더러 (ELK/Grafana 연동) +- 로그 레벨: WARNING + +```bash +# 실시간 로그 확인 +docker-compose logs -f app +docker-compose logs -f worker +``` + +--- + +## 8. 백업 + +### MariaDB +```bash +mysqldump -u root -p core_api > backup_$(date +%Y%m%d).sql +``` + +### MongoDB +```bash +mongodump --db core_api --out backup_$(date +%Y%m%d) +``` + +--- + +## 9. 트러블슈팅 + +| 증상 | 원인 | 해결 | +|------|------|------| +| `Cannot connect to MySQL` | MariaDB 미실행 또는 접속 정보 오류 | `.env` 확인, MariaDB 상태 확인 | +| `MongoDB not initialized` | MongoDB 미실행 | MongoDB 서비스 시작 | +| `Redis connection refused` | Redis 미실행 | `docker-compose up -d redis` | +| Celery 태스크 실행 안 됨 | Worker 미실행 또는 큐 불일치 | Worker 로그 확인, 큐 이름 확인 | +| Socket.IO 연결 실패 | CORS 설정 누락 | `CORS_ORIGINS`에 클라이언트 URL 추가 | +| MQTT 연결 실패 | Mosquitto 미실행 | `docker-compose up -d mosquitto` | diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..054e598 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,93 @@ +[project] +name = "core-api" +version = "0.1.0" +description = "FastAPI-based integrated backend for IoT device management" +requires-python = ">=3.11" +dependencies = [ + # Web framework + "fastapi>=0.115.0", + "uvicorn[standard]>=0.30.0", + + # MariaDB ORM + "sqlmodel>=0.0.22", + "sqlalchemy[asyncio]>=2.0.35", + "aiomysql>=0.2.0", + + # Migration + "alembic>=1.14.0", + "pymysql>=1.1.0", + + # MongoDB ODM + "beanie>=1.27.0", + "motor>=3.6.0", + + # Redis + "redis[hiredis]>=5.2.0", + + # Auth + "python-jose[cryptography]>=3.3.0", + "passlib[bcrypt]>=1.7.4", + "httpx>=0.27.0", + + # MQTT + "fastapi-mqtt>=2.1.0", + + # WebSocket (Socket.IO) + "python-socketio>=5.11.0", + + # Background tasks + "celery[redis]>=5.4.0", + "flower>=2.0.0", + + # Admin + "sqladmin>=0.19.0", + + # Data processing + "pandas>=2.2.0", + "numpy>=2.1.0", + "polars>=1.12.0", + + # Logging + "structlog>=24.4.0", + + # Settings + "pydantic-settings>=2.6.0", + + # Email + "aiosmtplib>=3.0.0", + + # Utils + "python-multipart>=0.0.12", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.3.0", + "pytest-asyncio>=0.24.0", + "pytest-cov>=6.0.0", + "factory-boy>=3.3.0", + "httpx>=0.27.0", + "ruff>=0.7.0", + "mypy>=1.13.0", + "pre-commit>=4.0.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.ruff] +target-version = "py311" +line-length = 100 + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W", "UP"] + +[tool.mypy] +python_version = "3.11" +strict = true +plugins = ["pydantic.mypy"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] diff --git a/scripts/create_superuser.py b/scripts/create_superuser.py new file mode 100644 index 0000000..7ef93a8 --- /dev/null +++ b/scripts/create_superuser.py @@ -0,0 +1,45 @@ +"""Create a superuser account.""" +from __future__ import annotations + +import asyncio +import sys + +from app.core.constants import Role +from app.core.security import hash_password +from app.db.mariadb import AsyncSessionLocal, async_engine +from app.models.mariadb.user import User, UserProfile + + +async def create_superuser(email: str, password: str, full_name: str = "Super Admin") -> None: + async with AsyncSessionLocal() as session: + user = User( + email=email, + hashed_password=hash_password(password), + role=Role.SUPERADMIN, + is_active=True, + is_verified=True, + ) + session.add(user) + await session.flush() + + profile = UserProfile( + user_id=user.id, + full_name=full_name, + ) + session.add(profile) + await session.commit() + + await async_engine.dispose() + print(f"Superuser '{email}' created successfully!") + + +if __name__ == "__main__": + if len(sys.argv) < 3: + print("Usage: python -m scripts.create_superuser [full_name]") + sys.exit(1) + + email = sys.argv[1] + password = sys.argv[2] + name = sys.argv[3] if len(sys.argv) > 3 else "Super Admin" + + asyncio.run(create_superuser(email, password, name)) diff --git a/scripts/init_db.py b/scripts/init_db.py new file mode 100644 index 0000000..cd9d6cc --- /dev/null +++ b/scripts/init_db.py @@ -0,0 +1,44 @@ +"""Initialize database with seed data.""" +from __future__ import annotations + +import asyncio + +from sqlmodel import SQLModel + +from app.core.config import settings +from app.core.constants import Role +from app.core.security import hash_password +from app.db.mariadb import async_engine, AsyncSessionLocal +from app.models.mariadb.auth import OAuthAccount, RefreshToken # noqa: F401 +from app.models.mariadb.device import Device, DeviceGroup # noqa: F401 +from app.models.mariadb.monitoring import Alert, AlertRule # noqa: F401 +from app.models.mariadb.system import AuditLog, SystemConfig # noqa: F401 +from app.models.mariadb.user import User, UserProfile + + +async def init() -> None: + # Create tables + async with async_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + # Seed default data + async with AsyncSessionLocal() as session: + # Create default device group + group = DeviceGroup(name="default", description="Default device group") + session.add(group) + + # Create system configs + configs = [ + SystemConfig(key="maintenance_mode", value="false", description="Enable maintenance mode"), + SystemConfig(key="max_devices_per_user", value="50", description="Max devices per user"), + ] + for c in configs: + session.add(c) + + await session.commit() + + print("Database initialized successfully!") + + +if __name__ == "__main__": + asyncio.run(init()) diff --git a/scripts/run_beat.sh b/scripts/run_beat.sh new file mode 100644 index 0000000..e3b8349 --- /dev/null +++ b/scripts/run_beat.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e + +echo "Starting Celery beat scheduler..." +celery -A app.tasks.celery_app beat --loglevel=info diff --git a/scripts/run_dev.sh b/scripts/run_dev.sh new file mode 100644 index 0000000..ba87d8b --- /dev/null +++ b/scripts/run_dev.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e + +echo "Starting development server..." +uvicorn app.asgi:app --host 0.0.0.0 --port 8000 --reload diff --git a/scripts/run_worker.sh b/scripts/run_worker.sh new file mode 100644 index 0000000..fc3ed55 --- /dev/null +++ b/scripts/run_worker.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +echo "Starting Celery worker..." +celery -A app.tasks.celery_app worker \ + --loglevel=info \ + --concurrency=4 \ + -Q default,analytics,notifications,devices diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b1c7c1e --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio +from httpx import ASGITransport, AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker +from sqlmodel import SQLModel + +from app.api.deps import get_session +from app.core.config import settings +from app.core.constants import Role +from app.core.security import create_access_token, hash_password +from app.main import create_app + +# Use a test database +TEST_MARIADB_DSN = settings.MARIADB_DSN.replace( + settings.MARIADB_DATABASE, f"{settings.MARIADB_DATABASE}_test" +) + +test_engine = create_async_engine(TEST_MARIADB_DSN, echo=False) +TestSessionLocal = sessionmaker(bind=test_engine, class_=AsyncSession, expire_on_commit=False) + + +@pytest_asyncio.fixture +async def db_session() -> AsyncGenerator[AsyncSession, None]: + async with test_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + async with TestSessionLocal() as session: + yield session + + async with test_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.drop_all) + + await test_engine.dispose() + + +@pytest_asyncio.fixture +async def client(db_session: AsyncSession) -> AsyncGenerator[AsyncClient, None]: + app = create_app() + + async def override_get_session() -> AsyncGenerator[AsyncSession, None]: + yield db_session + + app.dependency_overrides[get_session] = override_get_session + + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac + + +@pytest.fixture +def admin_token() -> str: + return create_access_token(subject=1, role=Role.SUPERADMIN) + + +@pytest.fixture +def user_token() -> str: + return create_access_token(subject=2, role=Role.USER) + + +@pytest.fixture +def auth_headers(admin_token: str) -> dict[str, str]: + return {"Authorization": f"Bearer {admin_token}"} + + +@pytest.fixture +def user_headers(user_token: str) -> dict[str, str]: + return {"Authorization": f"Bearer {user_token}"} diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/e2e/test_device_flow.py b/tests/e2e/test_device_flow.py new file mode 100644 index 0000000..939ba46 --- /dev/null +++ b/tests/e2e/test_device_flow.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_device_crud_flow(client: AsyncClient, auth_headers: dict) -> None: + """Test full device CRUD lifecycle.""" + # Create + response = await client.post( + "/api/v1/devices", + json={"device_uid": "test-device-001", "name": "Test Sensor", "device_type": "temperature"}, + headers=auth_headers, + ) + assert response.status_code == 201 + device = response.json() + device_id = device["id"] + assert device["device_uid"] == "test-device-001" + + # Read + response = await client.get(f"/api/v1/devices/{device_id}", headers=auth_headers) + assert response.status_code == 200 + assert response.json()["name"] == "Test Sensor" + + # Update + response = await client.patch( + f"/api/v1/devices/{device_id}", + json={"name": "Updated Sensor"}, + headers=auth_headers, + ) + assert response.status_code == 200 + assert response.json()["name"] == "Updated Sensor" + + # List + response = await client.get("/api/v1/devices", headers=auth_headers) + assert response.status_code == 200 + assert response.json()["total"] >= 1 + + # Delete + response = await client.delete(f"/api/v1/devices/{device_id}", headers=auth_headers) + assert response.status_code == 204 diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py new file mode 100644 index 0000000..d6ccf07 --- /dev/null +++ b/tests/integration/test_auth.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.security import hash_password +from app.models.mariadb.user import User, UserProfile + + +@pytest.mark.asyncio +async def test_register(client: AsyncClient) -> None: + response = await client.post( + "/api/v1/auth/register", + json={"email": "test@example.com", "password": "password123", "full_name": "Test User"}, + ) + assert response.status_code == 201 + data = response.json() + assert "access_token" in data + assert "refresh_token" in data + assert data["token_type"] == "bearer" + + +@pytest.mark.asyncio +async def test_register_duplicate_email(client: AsyncClient) -> None: + await client.post( + "/api/v1/auth/register", + json={"email": "dup@example.com", "password": "pass123"}, + ) + response = await client.post( + "/api/v1/auth/register", + json={"email": "dup@example.com", "password": "pass456"}, + ) + assert response.status_code == 409 + + +@pytest.mark.asyncio +async def test_login(client: AsyncClient, db_session: AsyncSession) -> None: + user = User(email="login@example.com", hashed_password=hash_password("pass123")) + db_session.add(user) + await db_session.flush() + profile = UserProfile(user_id=user.id, full_name="Login User") + db_session.add(profile) + await db_session.commit() + + response = await client.post( + "/api/v1/auth/login", + json={"email": "login@example.com", "password": "pass123"}, + ) + assert response.status_code == 200 + data = response.json() + assert "access_token" in data + + +@pytest.mark.asyncio +async def test_login_wrong_password(client: AsyncClient, db_session: AsyncSession) -> None: + user = User(email="wrong@example.com", hashed_password=hash_password("correct")) + db_session.add(user) + await db_session.flush() + profile = UserProfile(user_id=user.id) + db_session.add(profile) + await db_session.commit() + + response = await client.post( + "/api/v1/auth/login", + json={"email": "wrong@example.com", "password": "incorrect"}, + ) + assert response.status_code == 401 diff --git a/tests/integration/test_health.py b/tests/integration/test_health.py new file mode 100644 index 0000000..003e54f --- /dev/null +++ b/tests/integration/test_health.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_health_check(client: AsyncClient) -> None: + response = await client.get("/api/v1/system/health") + assert response.status_code == 200 + data = response.json() + assert data["status"] == "ok" + assert data["service"] == "core-api" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_permissions.py b/tests/unit/test_permissions.py new file mode 100644 index 0000000..854eac3 --- /dev/null +++ b/tests/unit/test_permissions.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from app.core.constants import Role +from app.core.permissions import can_manage_user, is_admin, is_management + + +def test_is_admin() -> None: + assert is_admin(Role.SUPERADMIN) + assert is_admin(Role.ADMIN) + assert not is_admin(Role.MANAGER) + assert not is_admin(Role.USER) + + +def test_is_management() -> None: + assert is_management(Role.SUPERADMIN) + assert is_management(Role.ADMIN) + assert is_management(Role.MANAGER) + assert not is_management(Role.USER) + + +def test_can_manage_user() -> None: + assert can_manage_user(Role.SUPERADMIN, Role.ADMIN) + assert can_manage_user(Role.ADMIN, Role.USER) + assert not can_manage_user(Role.USER, Role.ADMIN) + assert not can_manage_user(Role.ADMIN, Role.ADMIN) diff --git a/tests/unit/test_security.py b/tests/unit/test_security.py new file mode 100644 index 0000000..d198e14 --- /dev/null +++ b/tests/unit/test_security.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from app.core.constants import TokenType +from app.core.security import ( + create_access_token, + create_refresh_token, + decode_token, + hash_password, + verify_password, +) + + +def test_password_hash_and_verify() -> None: + password = "securepassword123" + hashed = hash_password(password) + assert hashed != password + assert verify_password(password, hashed) + assert not verify_password("wrongpassword", hashed) + + +def test_create_access_token() -> None: + token = create_access_token(subject=1, role="admin") + payload = decode_token(token) + assert payload is not None + assert payload["sub"] == "1" + assert payload["role"] == "admin" + assert payload["type"] == TokenType.ACCESS + + +def test_create_refresh_token() -> None: + token = create_refresh_token(subject=1) + payload = decode_token(token) + assert payload is not None + assert payload["sub"] == "1" + assert payload["type"] == TokenType.REFRESH + + +def test_decode_invalid_token() -> None: + result = decode_token("invalid.token.string") + assert result is None diff --git a/tests/unit/test_statistics.py b/tests/unit/test_statistics.py new file mode 100644 index 0000000..2f6967c --- /dev/null +++ b/tests/unit/test_statistics.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from app.processing.utils.statistics import detect_anomalies, moving_average, percentile_stats + + +def test_moving_average() -> None: + values = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0] + result = moving_average(values, window=3) + assert len(result) == 5 + assert abs(result[0] - 2.0) < 0.001 + + +def test_moving_average_short_input() -> None: + values = [1.0, 2.0] + result = moving_average(values, window=5) + assert result == values + + +def test_detect_anomalies() -> None: + values = [10.0, 10.1, 9.9, 10.0, 50.0, 10.0, 9.8] + anomalies = detect_anomalies(values, threshold=2.0) + assert len(anomalies) >= 1 + assert any(a["value"] == 50.0 for a in anomalies) + + +def test_percentile_stats() -> None: + values = list(range(1, 101)) + stats = percentile_stats([float(v) for v in values]) + assert abs(stats["p50"] - 50.5) < 1.0 + assert stats["p99"] > stats["p95"] diff --git a/tests/unit/test_validators.py b/tests/unit/test_validators.py new file mode 100644 index 0000000..bcefd19 --- /dev/null +++ b/tests/unit/test_validators.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from app.utils.validators import is_valid_device_uid, is_valid_email + + +def test_valid_device_uid() -> None: + assert is_valid_device_uid("device-001") + assert is_valid_device_uid("SENSOR_ABC_123") + assert not is_valid_device_uid("ab") # too short + assert not is_valid_device_uid("device uid") # space + assert not is_valid_device_uid("") + + +def test_valid_email() -> None: + assert is_valid_email("user@example.com") + assert is_valid_email("test.user+tag@domain.co.kr") + assert not is_valid_email("not-an-email") + assert not is_valid_email("@domain.com")