초기 커밋

This commit is contained in:
2026-03-01 07:44:19 +09:00
commit 09359f30be
146 changed files with 6120 additions and 0 deletions

0
app/__init__.py Normal file
View File

0
app/admin/__init__.py Normal file
View File

56
app/admin/setup.py Normal file
View File

@@ -0,0 +1,56 @@
from __future__ import annotations
from fastapi import FastAPI
from sqladmin import Admin
from sqladmin.authentication import AuthenticationBackend
from starlette.requests import Request
from app.core.config import settings
from app.core.constants import Role
from app.core.security import decode_token
from app.db.mariadb import async_engine
class AdminAuth(AuthenticationBackend):
async def login(self, request: Request) -> bool:
form = await request.form()
token = str(form.get("token", ""))
payload = decode_token(token)
if payload and payload.get("role") in Role.ADMIN_ROLES:
request.session["token"] = token
return True
return False
async def logout(self, request: Request) -> bool:
request.session.clear()
return True
async def authenticate(self, request: Request) -> bool:
token = request.session.get("token")
if not token:
return False
payload = decode_token(token)
return payload is not None and payload.get("role") in Role.ADMIN_ROLES
def setup_admin(app: FastAPI) -> Admin:
auth_backend = AdminAuth(secret_key=settings.SECRET_KEY)
admin = Admin(
app,
engine=async_engine,
authentication_backend=auth_backend,
title=f"{settings.APP_NAME} Admin",
)
from app.admin.views.device_admin import DeviceAdmin, DeviceGroupAdmin
from app.admin.views.system_admin import AuditLogAdmin, SystemConfigAdmin
from app.admin.views.user_admin import UserAdmin, UserProfileAdmin
admin.add_view(UserAdmin)
admin.add_view(UserProfileAdmin)
admin.add_view(DeviceAdmin)
admin.add_view(DeviceGroupAdmin)
admin.add_view(SystemConfigAdmin)
admin.add_view(AuditLogAdmin)
return admin

View File

View File

@@ -0,0 +1,32 @@
from __future__ import annotations
from sqladmin import ModelView
from app.models.mariadb.device import Device, DeviceGroup
class DeviceAdmin(ModelView, model=Device):
column_list = [
Device.id, Device.device_uid, Device.name, Device.device_type,
Device.status, Device.last_seen_at, Device.created_at,
]
column_searchable_list = [Device.device_uid, Device.name]
column_sortable_list = [Device.id, Device.name, Device.status, Device.created_at]
column_default_sort = ("id", True)
can_create = True
can_edit = True
can_delete = False
name = "Device"
name_plural = "Devices"
icon = "fa-solid fa-microchip"
class DeviceGroupAdmin(ModelView, model=DeviceGroup):
column_list = [DeviceGroup.id, DeviceGroup.name, DeviceGroup.description]
column_searchable_list = [DeviceGroup.name]
can_create = True
can_edit = True
can_delete = True
name = "Device Group"
name_plural = "Device Groups"
icon = "fa-solid fa-layer-group"

View File

@@ -0,0 +1,31 @@
from __future__ import annotations
from sqladmin import ModelView
from app.models.mariadb.system import AuditLog, SystemConfig
class SystemConfigAdmin(ModelView, model=SystemConfig):
column_list = [SystemConfig.id, SystemConfig.key, SystemConfig.value, SystemConfig.is_secret]
column_searchable_list = [SystemConfig.key]
can_create = True
can_edit = True
can_delete = True
name = "System Config"
name_plural = "System Configs"
icon = "fa-solid fa-gear"
class AuditLogAdmin(ModelView, model=AuditLog):
column_list = [
AuditLog.id, AuditLog.user_id, AuditLog.action,
AuditLog.resource_type, AuditLog.resource_id, AuditLog.created_at,
]
column_sortable_list = [AuditLog.id, AuditLog.created_at]
column_default_sort = ("id", True)
can_create = False
can_edit = False
can_delete = False
name = "Audit Log"
name_plural = "Audit Logs"
icon = "fa-solid fa-clipboard-list"

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
from sqladmin import ModelView
from app.models.mariadb.user import User, UserProfile
class UserAdmin(ModelView, model=User):
column_list = [User.id, User.email, User.role, User.is_active, User.is_verified, User.created_at]
column_searchable_list = [User.email]
column_sortable_list = [User.id, User.email, User.created_at]
column_default_sort = ("id", True)
can_create = True
can_edit = True
can_delete = False
name = "User"
name_plural = "Users"
icon = "fa-solid fa-user"
class UserProfileAdmin(ModelView, model=UserProfile):
column_list = [UserProfile.id, UserProfile.user_id, UserProfile.full_name, UserProfile.organization]
column_searchable_list = [UserProfile.full_name]
can_create = False
can_delete = False
name = "User Profile"
name_plural = "User Profiles"
icon = "fa-solid fa-address-card"

0
app/api/__init__.py Normal file
View File

12
app/api/deps.py Normal file
View File

@@ -0,0 +1,12 @@
from __future__ import annotations
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession
from app.db.mariadb import get_db
async def get_session() -> AsyncGenerator[AsyncSession, None]:
async for session in get_db():
yield session

0
app/api/v1/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,73 @@
from __future__ import annotations
from datetime import datetime
from fastapi import APIRouter, Depends, Query
from app.core.constants import Role
from app.core.dependencies import require_role
from app.schemas.analytics import (
AnalyticsResultRead,
ReportResponse,
TelemetryAggregateResponse,
)
from app.services.analytics_service import AnalyticsService
router = APIRouter(prefix="/analytics", tags=["analytics"])
@router.get("/telemetry/{device_id}", response_model=TelemetryAggregateResponse)
async def get_telemetry_aggregate(
device_id: str,
start: datetime = Query(...),
end: datetime = Query(...),
interval: str = Query("1h"),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
) -> TelemetryAggregateResponse:
service = AnalyticsService()
return await service.get_telemetry_aggregate(device_id, start, end, interval)
@router.post("/reports/{device_id}", response_model=ReportResponse)
async def generate_report(
device_id: str,
start: datetime = Query(...),
end: datetime = Query(...),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
) -> ReportResponse:
service = AnalyticsService()
return await service.generate_report(device_id, start, end)
@router.get("/status/{device_id}")
async def device_status_analysis(
device_id: str,
start: datetime = Query(...),
end: datetime = Query(...),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
) -> dict:
service = AnalyticsService()
return await service.get_device_status_analysis(device_id, start, end)
@router.get("/trends/{device_id}")
async def trend_analysis(
device_id: str,
start: datetime = Query(...),
end: datetime = Query(...),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
) -> dict:
service = AnalyticsService()
return await service.get_trend_analysis(device_id, start, end)
@router.get("/results", response_model=list[AnalyticsResultRead])
async def list_analytics_results(
analysis_type: str = Query(...),
device_id: str | None = Query(None),
skip: int = Query(0, ge=0),
limit: int = Query(20, ge=1, le=100),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
) -> list[AnalyticsResultRead]:
service = AnalyticsService()
return await service.list_results(analysis_type, device_id, skip, limit)

View File

@@ -0,0 +1,53 @@
from __future__ import annotations
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_session
from app.core.dependencies import get_current_user_id
from app.schemas.auth import (
LoginRequest,
RefreshTokenRequest,
RegisterRequest,
TokenResponse,
)
from app.services.auth_service import AuthService
router = APIRouter(prefix="/auth", tags=["auth"])
@router.post("/register", response_model=TokenResponse, status_code=201)
async def register(
body: RegisterRequest,
session: AsyncSession = Depends(get_session),
) -> TokenResponse:
service = AuthService(session)
user = await service.register(body.email, body.password, body.full_name)
return await service._create_tokens(user)
@router.post("/login", response_model=TokenResponse)
async def login(
body: LoginRequest,
session: AsyncSession = Depends(get_session),
) -> TokenResponse:
service = AuthService(session)
return await service.login(body.email, body.password)
@router.post("/refresh", response_model=TokenResponse)
async def refresh_token(
body: RefreshTokenRequest,
session: AsyncSession = Depends(get_session),
) -> TokenResponse:
service = AuthService(session)
return await service.refresh(body.refresh_token)
@router.post("/logout", status_code=204)
async def logout(
user_id: int = Depends(get_current_user_id),
session: AsyncSession = Depends(get_session),
) -> None:
service = AuthService(session)
await service.logout(user_id)

View File

@@ -0,0 +1,70 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_session
from app.core.constants import Role
from app.core.dependencies import get_current_user_payload, require_role
from app.schemas.common import PaginatedResponse
from app.schemas.device import DeviceCreate, DeviceRead, DeviceUpdate
from app.services.device_service import DeviceService
router = APIRouter(prefix="/devices", tags=["devices"])
@router.get("", response_model=PaginatedResponse[DeviceRead])
async def list_devices(
page: int = Query(1, ge=1),
size: int = Query(20, ge=1, le=100),
_: dict = Depends(get_current_user_payload),
session: AsyncSession = Depends(get_session),
) -> PaginatedResponse[DeviceRead]:
service = DeviceService(session)
skip = (page - 1) * size
items = await service.list_devices(skip=skip, limit=size)
total = await service.count_devices()
return PaginatedResponse(
items=items, total=total, page=page, size=size, pages=(total + size - 1) // size
)
@router.get("/{device_id}", response_model=DeviceRead)
async def get_device(
device_id: int,
_: dict = Depends(get_current_user_payload),
session: AsyncSession = Depends(get_session),
) -> DeviceRead:
service = DeviceService(session)
return await service.get_device(device_id)
@router.post("", response_model=DeviceRead, status_code=201)
async def create_device(
body: DeviceCreate,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)),
session: AsyncSession = Depends(get_session),
) -> DeviceRead:
service = DeviceService(session)
return await service.create_device(body)
@router.patch("/{device_id}", response_model=DeviceRead)
async def update_device(
device_id: int,
body: DeviceUpdate,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)),
session: AsyncSession = Depends(get_session),
) -> DeviceRead:
service = DeviceService(session)
return await service.update_device(device_id, body)
@router.delete("/{device_id}", status_code=204)
async def delete_device(
device_id: int,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> None:
service = DeviceService(session)
await service.delete_device(device_id)

View File

@@ -0,0 +1,61 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_session
from app.core.constants import Role
from app.core.dependencies import get_current_user_id, require_role
from app.schemas.monitoring import AlertRead, AlertRuleCreate, AlertRuleRead, SystemHealthResponse
from app.services.monitoring_service import MonitoringService
router = APIRouter(prefix="/monitoring", tags=["monitoring"])
@router.get("/health", response_model=SystemHealthResponse)
async def system_health(
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
session: AsyncSession = Depends(get_session),
) -> SystemHealthResponse:
service = MonitoringService(session)
return await service.get_system_health()
@router.get("/alerts", response_model=list[AlertRead])
async def list_alerts(
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=200),
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
session: AsyncSession = Depends(get_session),
) -> list[AlertRead]:
service = MonitoringService(session)
return await service.list_active_alerts(skip=skip, limit=limit)
@router.post("/alerts/{alert_id}/acknowledge", response_model=AlertRead)
async def acknowledge_alert(
alert_id: int,
user_id: int = Depends(get_current_user_id),
session: AsyncSession = Depends(get_session),
) -> AlertRead:
service = MonitoringService(session)
return await service.acknowledge_alert(alert_id, user_id)
@router.get("/alert-rules", response_model=list[AlertRuleRead])
async def list_alert_rules(
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
session: AsyncSession = Depends(get_session),
) -> list[AlertRuleRead]:
service = MonitoringService(session)
return await service.list_alert_rules()
@router.post("/alert-rules", response_model=AlertRuleRead, status_code=201)
async def create_alert_rule(
body: AlertRuleCreate,
user_id: int = Depends(get_current_user_id),
session: AsyncSession = Depends(get_session),
) -> AlertRuleRead:
service = MonitoringService(session)
return await service.create_alert_rule(body, user_id)

View File

@@ -0,0 +1,32 @@
from __future__ import annotations
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_session
from app.core.config import settings
from app.core.constants import Role
from app.core.dependencies import require_role
router = APIRouter(prefix="/system", tags=["system"])
@router.get("/health")
async def health_check() -> dict:
return {
"status": "ok",
"service": settings.APP_NAME,
"version": "0.1.0",
}
@router.get("/info")
async def system_info(
_: dict = Depends(require_role(Role.SUPERADMIN)),
) -> dict:
return {
"app_name": settings.APP_NAME,
"environment": settings.APP_ENV,
"debug": settings.DEBUG,
"api_prefix": settings.API_V1_PREFIX,
}

View File

@@ -0,0 +1,91 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_session
from app.core.constants import Role
from app.core.dependencies import get_current_user_id, require_role
from app.schemas.common import PaginatedResponse
from app.schemas.user import UserCreate, UserRead, UserUpdate
from app.services.user_service import UserService
router = APIRouter(prefix="/users", tags=["users"])
@router.get("/me", response_model=UserRead)
async def get_me(
user_id: int = Depends(get_current_user_id),
session: AsyncSession = Depends(get_session),
) -> UserRead:
service = UserService(session)
return await service.get_user(user_id)
@router.patch("/me", response_model=UserRead)
async def update_me(
body: UserUpdate,
user_id: int = Depends(get_current_user_id),
session: AsyncSession = Depends(get_session),
) -> UserRead:
body.role = None
body.is_active = None
service = UserService(session)
return await service.update_user(user_id, body)
@router.get("", response_model=PaginatedResponse[UserRead])
async def list_users(
page: int = Query(1, ge=1),
size: int = Query(20, ge=1, le=100),
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> PaginatedResponse[UserRead]:
service = UserService(session)
skip = (page - 1) * size
items = await service.list_users(skip=skip, limit=size)
total = await service.count_users()
return PaginatedResponse(
items=items, total=total, page=page, size=size, pages=(total + size - 1) // size
)
@router.get("/{user_id}", response_model=UserRead)
async def get_user(
user_id: int,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> UserRead:
service = UserService(session)
return await service.get_user(user_id)
@router.post("", response_model=UserRead, status_code=201)
async def create_user(
body: UserCreate,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> UserRead:
service = UserService(session)
return await service.create_user(body)
@router.patch("/{user_id}", response_model=UserRead)
async def update_user(
user_id: int,
body: UserUpdate,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> UserRead:
service = UserService(session)
return await service.update_user(user_id, body)
@router.delete("/{user_id}", status_code=204)
async def delete_user(
user_id: int,
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
session: AsyncSession = Depends(get_session),
) -> None:
service = UserService(session)
await service.delete_user(user_id)

13
app/api/v1/router.py Normal file
View File

@@ -0,0 +1,13 @@
from __future__ import annotations
from fastapi import APIRouter
from app.api.v1.endpoints import analytics, auth, devices, monitoring, system, users
v1_router = APIRouter()
v1_router.include_router(system.router)
v1_router.include_router(auth.router)
v1_router.include_router(users.router)
v1_router.include_router(devices.router)
v1_router.include_router(monitoring.router)
v1_router.include_router(analytics.router)

15
app/asgi.py Normal file
View File

@@ -0,0 +1,15 @@
import socketio
from app.communication.socketio.server import sio
from app.main import create_app
# Import namespace handlers to register them
import app.communication.socketio.events # noqa: F401
import app.communication.socketio.namespaces.device_ns # noqa: F401
import app.communication.socketio.namespaces.monitoring_ns # noqa: F401
import app.communication.socketio.namespaces.notification_ns # noqa: F401
fastapi_app = create_app()
# Socket.IO wraps FastAPI as the outermost ASGI app
app = socketio.ASGIApp(sio, other_app=fastapi_app)

View File

View File

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
import httpx
_client: httpx.AsyncClient | None = None
async def get_http_client() -> httpx.AsyncClient:
global _client
if _client is None or _client.is_closed:
_client = httpx.AsyncClient(timeout=30.0)
return _client
async def close_http_client() -> None:
global _client
if _client and not _client.is_closed:
await _client.aclose()
_client = None

View File

@@ -0,0 +1,114 @@
from __future__ import annotations
from dataclasses import dataclass
from app.communication.external.http_client import get_http_client
from app.core.config import settings
@dataclass
class OAuthUserInfo:
provider: str
provider_user_id: str
email: str
name: str
async def get_google_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
client = await get_http_client()
token_resp = await client.post(
"https://oauth2.googleapis.com/token",
data={
"code": code,
"client_id": settings.GOOGLE_CLIENT_ID,
"client_secret": settings.GOOGLE_CLIENT_SECRET,
"redirect_uri": redirect_uri,
"grant_type": "authorization_code",
},
)
token_resp.raise_for_status()
access_token = token_resp.json()["access_token"]
user_resp = await client.get(
"https://www.googleapis.com/oauth2/v2/userinfo",
headers={"Authorization": f"Bearer {access_token}"},
)
user_resp.raise_for_status()
data = user_resp.json()
return OAuthUserInfo(
provider="google",
provider_user_id=data["id"],
email=data["email"],
name=data.get("name", ""),
)
async def get_kakao_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
client = await get_http_client()
token_resp = await client.post(
"https://kauth.kakao.com/oauth/token",
data={
"grant_type": "authorization_code",
"client_id": settings.KAKAO_CLIENT_ID,
"client_secret": settings.KAKAO_CLIENT_SECRET,
"redirect_uri": redirect_uri,
"code": code,
},
)
token_resp.raise_for_status()
access_token = token_resp.json()["access_token"]
user_resp = await client.get(
"https://kapi.kakao.com/v2/user/me",
headers={"Authorization": f"Bearer {access_token}"},
)
user_resp.raise_for_status()
data = user_resp.json()
account = data.get("kakao_account", {})
return OAuthUserInfo(
provider="kakao",
provider_user_id=str(data["id"]),
email=account.get("email", ""),
name=account.get("profile", {}).get("nickname", ""),
)
async def get_naver_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
client = await get_http_client()
token_resp = await client.post(
"https://nid.naver.com/oauth2.0/token",
data={
"grant_type": "authorization_code",
"client_id": settings.NAVER_CLIENT_ID,
"client_secret": settings.NAVER_CLIENT_SECRET,
"code": code,
},
)
token_resp.raise_for_status()
access_token = token_resp.json()["access_token"]
user_resp = await client.get(
"https://openapi.naver.com/v1/nid/me",
headers={"Authorization": f"Bearer {access_token}"},
)
user_resp.raise_for_status()
data = user_resp.json()["response"]
return OAuthUserInfo(
provider="naver",
provider_user_id=data["id"],
email=data.get("email", ""),
name=data.get("name", ""),
)
OAUTH_PROVIDERS = {
"google": get_google_user_info,
"kakao": get_kakao_user_info,
"naver": get_naver_user_info,
}

View File

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from fastapi_mqtt import FastMQTT, MQTTConfig
from app.core.config import settings
from app.communication.mqtt.topics import SUBSCRIBE_TOPICS
mqtt_config = MQTTConfig(
host=settings.MQTT_HOST,
port=settings.MQTT_PORT,
username=settings.MQTT_USERNAME or None,
password=settings.MQTT_PASSWORD or None,
keepalive=60,
)
mqtt = FastMQTT(config=mqtt_config)
async def mqtt_startup() -> None:
await mqtt.mqtt_startup()
for topic in SUBSCRIBE_TOPICS:
mqtt.client.subscribe(topic)
async def mqtt_shutdown() -> None:
await mqtt.mqtt_shutdown()

View File

@@ -0,0 +1,86 @@
from __future__ import annotations
import json
import structlog
from app.communication.mqtt.client import mqtt
from app.models.mongodb.device_log import DeviceLog
from app.models.mongodb.telemetry import TelemetryData
logger = structlog.get_logger("mqtt")
def _extract_device_uid(topic: str) -> str:
parts = topic.split("/")
return parts[1] if len(parts) >= 3 else "unknown"
@mqtt.on_message()
async def on_message(client, topic: str, payload: bytes, qos: int, properties) -> None: # type: ignore[no-untyped-def]
device_uid = _extract_device_uid(topic)
try:
data = json.loads(payload.decode())
except (json.JSONDecodeError, UnicodeDecodeError):
logger.warning("invalid_mqtt_payload", topic=topic)
return
if "/telemetry" in topic:
await _handle_telemetry(device_uid, data)
elif "/status" in topic:
await _handle_status(device_uid, data)
elif "/log" in topic:
await _handle_log(device_uid, data)
elif "/response" in topic:
await _handle_response(device_uid, data)
async def _handle_telemetry(device_uid: str, data: dict) -> None:
telemetry = TelemetryData(device_id=device_uid, metrics=data)
await telemetry.insert()
# Broadcast via Socket.IO
from app.communication.socketio.server import sio
await sio.emit(
"telemetry",
{"device_uid": device_uid, "data": data},
namespace="/monitoring",
)
logger.debug("telemetry_saved", device_uid=device_uid)
async def _handle_status(device_uid: str, data: dict) -> None:
log = DeviceLog(device_id=device_uid, event_type="status_change", payload=data)
await log.insert()
from app.communication.socketio.server import sio
await sio.emit(
"device_status",
{"device_uid": device_uid, "status": data},
namespace="/device",
)
logger.debug("status_update", device_uid=device_uid)
async def _handle_log(device_uid: str, data: dict) -> None:
log = DeviceLog(
device_id=device_uid,
event_type=data.get("event_type", "log"),
payload=data,
)
await log.insert()
logger.debug("device_log_saved", device_uid=device_uid)
async def _handle_response(device_uid: str, data: dict) -> None:
from app.communication.socketio.server import sio
await sio.emit(
"device_response",
{"device_uid": device_uid, "data": data},
namespace="/device",
)
logger.debug("device_response", device_uid=device_uid)

View File

@@ -0,0 +1,21 @@
from __future__ import annotations
import json
from app.communication.mqtt.client import mqtt
from app.communication.mqtt.topics import DEVICE_COMMAND, DEVICE_CONFIG, DEVICE_OTA
async def publish_command(device_uid: str, command: dict) -> None:
topic = DEVICE_COMMAND.format(device_uid=device_uid)
mqtt.client.publish(topic, json.dumps(command))
async def publish_config(device_uid: str, config: dict) -> None:
topic = DEVICE_CONFIG.format(device_uid=device_uid)
mqtt.client.publish(topic, json.dumps(config))
async def publish_ota(device_uid: str, ota_info: dict) -> None:
topic = DEVICE_OTA.format(device_uid=device_uid)
mqtt.client.publish(topic, json.dumps(ota_info))

View File

@@ -0,0 +1,25 @@
from __future__ import annotations
# ── Device → Server ──────────────────────────────────
DEVICE_TELEMETRY = "devices/{device_uid}/telemetry"
DEVICE_STATUS = "devices/{device_uid}/status"
DEVICE_LOG = "devices/{device_uid}/log"
DEVICE_RESPONSE = "devices/{device_uid}/response"
# ── Server → Device ──────────────────────────────────
DEVICE_COMMAND = "devices/{device_uid}/command"
DEVICE_CONFIG = "devices/{device_uid}/config"
DEVICE_OTA = "devices/{device_uid}/ota"
# ── Wildcard subscriptions ───────────────────────────
SUB_ALL_TELEMETRY = "devices/+/telemetry"
SUB_ALL_STATUS = "devices/+/status"
SUB_ALL_LOG = "devices/+/log"
SUB_ALL_RESPONSE = "devices/+/response"
SUBSCRIBE_TOPICS = [
SUB_ALL_TELEMETRY,
SUB_ALL_STATUS,
SUB_ALL_LOG,
SUB_ALL_RESPONSE,
]

View File

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
import structlog
from app.communication.socketio.server import sio
logger = structlog.get_logger("socketio")
@sio.event
async def connect(sid: str, environ: dict) -> None:
logger.info("client_connected", sid=sid)
@sio.event
async def disconnect(sid: str) -> None:
logger.info("client_disconnected", sid=sid)

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
import structlog
from app.communication.socketio.server import sio
logger = structlog.get_logger("socketio.device")
@sio.on("connect", namespace="/device")
async def device_connect(sid: str, environ: dict) -> None:
logger.info("device_ns_connected", sid=sid)
@sio.on("disconnect", namespace="/device")
async def device_disconnect(sid: str) -> None:
logger.info("device_ns_disconnected", sid=sid)
@sio.on("send_command", namespace="/device")
async def send_command(sid: str, data: dict) -> None:
device_uid = data.get("device_uid")
command = data.get("command")
if device_uid and command:
from app.communication.mqtt.publisher import publish_command
await publish_command(device_uid, command)
logger.info("command_sent", device_uid=device_uid)

View File

@@ -0,0 +1,32 @@
from __future__ import annotations
import structlog
from app.communication.socketio.server import sio
logger = structlog.get_logger("socketio.monitoring")
@sio.on("connect", namespace="/monitoring")
async def monitoring_connect(sid: str, environ: dict) -> None:
logger.info("monitoring_connected", sid=sid)
@sio.on("disconnect", namespace="/monitoring")
async def monitoring_disconnect(sid: str) -> None:
logger.info("monitoring_disconnected", sid=sid)
@sio.on("subscribe_device", namespace="/monitoring")
async def subscribe_device(sid: str, data: dict) -> None:
device_uid = data.get("device_uid")
if device_uid:
await sio.enter_room(sid, f"device:{device_uid}", namespace="/monitoring")
logger.info("subscribed_device", sid=sid, device_uid=device_uid)
@sio.on("unsubscribe_device", namespace="/monitoring")
async def unsubscribe_device(sid: str, data: dict) -> None:
device_uid = data.get("device_uid")
if device_uid:
await sio.leave_room(sid, f"device:{device_uid}", namespace="/monitoring")

View File

@@ -0,0 +1,25 @@
from __future__ import annotations
import structlog
from app.communication.socketio.server import sio
logger = structlog.get_logger("socketio.notification")
@sio.on("connect", namespace="/notification")
async def notification_connect(sid: str, environ: dict) -> None:
logger.info("notification_connected", sid=sid)
@sio.on("disconnect", namespace="/notification")
async def notification_disconnect(sid: str) -> None:
logger.info("notification_disconnected", sid=sid)
@sio.on("join_user_room", namespace="/notification")
async def join_user_room(sid: str, data: dict) -> None:
user_id = data.get("user_id")
if user_id:
await sio.enter_room(sid, f"user:{user_id}", namespace="/notification")
logger.info("joined_user_room", sid=sid, user_id=user_id)

View File

@@ -0,0 +1,14 @@
from __future__ import annotations
import socketio
from app.core.config import settings
sio = socketio.AsyncServer(
async_mode="asgi",
cors_allowed_origins=settings.CORS_ORIGINS,
logger=settings.DEBUG,
engineio_logger=False,
)
sio_app = socketio.ASGIApp(sio)

0
app/core/__init__.py Normal file
View File

93
app/core/config.py Normal file
View File

@@ -0,0 +1,93 @@
from __future__ import annotations
from pydantic import field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
)
# ── Application ──────────────────────────────────
APP_NAME: str = "core-api"
APP_ENV: str = "development"
DEBUG: bool = True
SECRET_KEY: str = "change-me-to-a-random-secret-key"
API_V1_PREFIX: str = "/api/v1"
# ── MariaDB ──────────────────────────────────────
MARIADB_HOST: str = "127.0.0.1"
MARIADB_PORT: int = 3306
MARIADB_USER: str = "root"
MARIADB_PASSWORD: str = "changeme"
MARIADB_DATABASE: str = "core_api"
@property
def MARIADB_DSN(self) -> str:
return (
f"mysql+aiomysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}"
f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}"
)
@property
def MARIADB_DSN_SYNC(self) -> str:
return (
f"mysql+pymysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}"
f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}"
)
# ── MongoDB ──────────────────────────────────────
MONGODB_URL: str = "mongodb://127.0.0.1:27017"
MONGODB_DATABASE: str = "core_api"
# ── Redis ────────────────────────────────────────
REDIS_URL: str = "redis://127.0.0.1:6379/0"
# ── JWT ──────────────────────────────────────────
JWT_SECRET_KEY: str = "change-me-jwt-secret"
JWT_ALGORITHM: str = "HS256"
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = 7
# ── MQTT ─────────────────────────────────────────
MQTT_HOST: str = "127.0.0.1"
MQTT_PORT: int = 1883
MQTT_USERNAME: str = ""
MQTT_PASSWORD: str = ""
# ── Celery ───────────────────────────────────────
CELERY_BROKER_URL: str = "redis://127.0.0.1:6379/1"
CELERY_RESULT_BACKEND: str = "redis://127.0.0.1:6379/2"
# ── CORS ─────────────────────────────────────────
CORS_ORIGINS: list[str] = ["http://localhost:3000", "http://localhost:8080"]
@field_validator("CORS_ORIGINS", mode="before")
@classmethod
def assemble_cors_origins(cls, v: str | list[str]) -> list[str]:
if isinstance(v, str):
return [i.strip() for i in v.strip("[]").split(",") if i.strip()]
return v
# ── OAuth ────────────────────────────────────────
GOOGLE_CLIENT_ID: str = ""
GOOGLE_CLIENT_SECRET: str = ""
KAKAO_CLIENT_ID: str = ""
KAKAO_CLIENT_SECRET: str = ""
NAVER_CLIENT_ID: str = ""
NAVER_CLIENT_SECRET: str = ""
# ── SMTP ─────────────────────────────────────────
SMTP_HOST: str = "smtp.gmail.com"
SMTP_PORT: int = 587
SMTP_USERNAME: str = ""
SMTP_PASSWORD: str = ""
# ── Logging ──────────────────────────────────────
LOG_LEVEL: str = "DEBUG"
settings = Settings()

31
app/core/constants.py Normal file
View File

@@ -0,0 +1,31 @@
from __future__ import annotations
class Role:
SUPERADMIN = "superadmin"
ADMIN = "admin"
MANAGER = "manager"
USER = "user"
DEVICE = "device"
ALL = [SUPERADMIN, ADMIN, MANAGER, USER, DEVICE]
ADMIN_ROLES = [SUPERADMIN, ADMIN]
MANAGEMENT_ROLES = [SUPERADMIN, ADMIN, MANAGER]
class DeviceStatus:
ONLINE = "online"
OFFLINE = "offline"
ERROR = "error"
MAINTENANCE = "maintenance"
class AlertSeverity:
CRITICAL = "critical"
WARNING = "warning"
INFO = "info"
class TokenType:
ACCESS = "access"
REFRESH = "refresh"

36
app/core/dependencies.py Normal file
View File

@@ -0,0 +1,36 @@
from __future__ import annotations
from fastapi import Depends
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from app.core.constants import TokenType
from app.core.exceptions import ForbiddenException, UnauthorizedException
from app.core.security import decode_token
bearer_scheme = HTTPBearer()
async def get_current_user_payload(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
) -> dict:
payload = decode_token(credentials.credentials)
if payload is None:
raise UnauthorizedException("Invalid or expired token")
if payload.get("type") != TokenType.ACCESS:
raise UnauthorizedException("Invalid token type")
return payload
async def get_current_user_id(
payload: dict = Depends(get_current_user_payload),
) -> int:
return int(payload["sub"])
def require_role(*allowed_roles: str):
async def _check(payload: dict = Depends(get_current_user_payload)) -> dict:
if payload.get("role") not in allowed_roles:
raise ForbiddenException("Insufficient permissions")
return payload
return _check

View File

@@ -0,0 +1,22 @@
from __future__ import annotations
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from app.core.exceptions import AppException
def register_error_handlers(app: FastAPI) -> None:
@app.exception_handler(AppException)
async def app_exception_handler(request: Request, exc: AppException) -> JSONResponse:
return JSONResponse(
status_code=exc.status_code,
content={"detail": exc.detail},
)
@app.exception_handler(Exception)
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
return JSONResponse(
status_code=500,
content={"detail": "Internal server error"},
)

32
app/core/exceptions.py Normal file
View File

@@ -0,0 +1,32 @@
from __future__ import annotations
class AppException(Exception):
def __init__(self, status_code: int, detail: str):
self.status_code = status_code
self.detail = detail
class NotFoundException(AppException):
def __init__(self, detail: str = "Resource not found"):
super().__init__(status_code=404, detail=detail)
class UnauthorizedException(AppException):
def __init__(self, detail: str = "Not authenticated"):
super().__init__(status_code=401, detail=detail)
class ForbiddenException(AppException):
def __init__(self, detail: str = "Permission denied"):
super().__init__(status_code=403, detail=detail)
class ConflictException(AppException):
def __init__(self, detail: str = "Resource already exists"):
super().__init__(status_code=409, detail=detail)
class ValidationException(AppException):
def __init__(self, detail: str = "Validation error"):
super().__init__(status_code=422, detail=detail)

View File

@@ -0,0 +1,43 @@
from __future__ import annotations
import logging
import sys
import structlog
from app.core.config import settings
def setup_logging() -> None:
log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO)
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.dev.ConsoleRenderer()
if settings.DEBUG
else structlog.processors.JSONRenderer(),
],
wrapper_class=structlog.stdlib.BoundLogger,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
logging.basicConfig(
format="%(message)s",
stream=sys.stdout,
level=log_level,
)
def get_logger(name: str) -> structlog.stdlib.BoundLogger:
return structlog.get_logger(name)

16
app/core/permissions.py Normal file
View File

@@ -0,0 +1,16 @@
from __future__ import annotations
from app.core.constants import Role
def is_admin(role: str) -> bool:
return role in Role.ADMIN_ROLES
def is_management(role: str) -> bool:
return role in Role.MANAGEMENT_ROLES
def can_manage_user(actor_role: str, target_role: str) -> bool:
hierarchy = {Role.SUPERADMIN: 4, Role.ADMIN: 3, Role.MANAGER: 2, Role.USER: 1, Role.DEVICE: 0}
return hierarchy.get(actor_role, 0) > hierarchy.get(target_role, 0)

47
app/core/security.py Normal file
View File

@@ -0,0 +1,47 @@
from __future__ import annotations
from datetime import datetime, timedelta
from jose import JWTError, jwt
from passlib.context import CryptContext
from app.core.config import settings
from app.core.constants import TokenType
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def hash_password(password: str) -> str:
return pwd_context.hash(password)
def verify_password(plain_password: str, hashed_password: str) -> bool:
return pwd_context.verify(plain_password, hashed_password)
def create_access_token(subject: int | str, role: str) -> str:
expire = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
payload = {
"sub": str(subject),
"role": role,
"type": TokenType.ACCESS,
"exp": expire,
}
return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
def create_refresh_token(subject: int | str) -> str:
expire = datetime.utcnow() + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS)
payload = {
"sub": str(subject),
"type": TokenType.REFRESH,
"exp": expire,
}
return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
def decode_token(token: str) -> dict | None:
try:
return jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM])
except JWTError:
return None

0
app/db/__init__.py Normal file
View File

22
app/db/base.py Normal file
View File

@@ -0,0 +1,22 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import func
from sqlmodel import Field, SQLModel
class TimestampMixin(SQLModel):
created_at: datetime = Field(
default_factory=datetime.utcnow,
sa_column_kwargs={"server_default": func.now()},
)
updated_at: datetime = Field(
default_factory=datetime.utcnow,
sa_column_kwargs={"server_default": func.now(), "onupdate": func.now()},
)
class SoftDeleteMixin(SQLModel):
is_deleted: bool = Field(default=False)
deleted_at: datetime | None = Field(default=None)

42
app/db/mariadb.py Normal file
View File

@@ -0,0 +1,42 @@
from __future__ import annotations
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel import SQLModel
from app.core.config import settings
async_engine = create_async_engine(
settings.MARIADB_DSN,
echo=settings.DEBUG,
pool_pre_ping=True,
pool_size=10,
max_overflow=20,
)
AsyncSessionLocal = sessionmaker(
bind=async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def init_mariadb() -> None:
async with async_engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def close_mariadb() -> None:
await async_engine.dispose()
async def get_db() -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise

37
app/db/mongodb.py Normal file
View File

@@ -0,0 +1,37 @@
from __future__ import annotations
from beanie import init_beanie
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
from app.core.config import settings
mongo_client: AsyncIOMotorClient | None = None
mongo_db: AsyncIOMotorDatabase | None = None
async def init_mongodb() -> None:
global mongo_client, mongo_db
mongo_client = AsyncIOMotorClient(settings.MONGODB_URL)
mongo_db = mongo_client[settings.MONGODB_DATABASE]
from app.models.mongodb.analytics_result import AnalyticsResult
from app.models.mongodb.device_log import DeviceLog
from app.models.mongodb.notification import Notification
from app.models.mongodb.telemetry import TelemetryData
await init_beanie(
database=mongo_db,
document_models=[DeviceLog, TelemetryData, AnalyticsResult, Notification],
)
async def close_mongodb() -> None:
global mongo_client
if mongo_client:
mongo_client.close()
def get_mongo_db() -> AsyncIOMotorDatabase:
assert mongo_db is not None, "MongoDB not initialized"
return mongo_db

27
app/db/redis.py Normal file
View File

@@ -0,0 +1,27 @@
from __future__ import annotations
from redis.asyncio import Redis, from_url
from app.core.config import settings
redis_client: Redis | None = None
async def init_redis() -> None:
global redis_client
redis_client = from_url(
settings.REDIS_URL,
encoding="utf-8",
decode_responses=True,
)
async def close_redis() -> None:
global redis_client
if redis_client:
await redis_client.close()
def get_redis() -> Redis:
assert redis_client is not None, "Redis not initialized"
return redis_client

67
app/main.py Normal file
View File

@@ -0,0 +1,67 @@
from __future__ import annotations
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from fastapi import FastAPI
from app.core.config import settings
from app.core.logging_config import setup_logging
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
# ── Startup ──────────────────────────────────────
setup_logging()
from app.db.mariadb import init_mariadb
from app.db.mongodb import init_mongodb
from app.db.redis import init_redis
await init_mariadb()
await init_mongodb()
await init_redis()
yield
# ── Shutdown ─────────────────────────────────────
from app.communication.external.http_client import close_http_client
from app.db.mariadb import close_mariadb
from app.db.mongodb import close_mongodb
from app.db.redis import close_redis
await close_http_client()
await close_redis()
await close_mongodb()
await close_mariadb()
def create_app() -> FastAPI:
app = FastAPI(
title=settings.APP_NAME,
version="0.1.0",
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
)
# ── Middleware (order matters: last added = first executed) ───
from app.middleware.cors import add_cors_middleware
from app.middleware.request_id import RequestIDMiddleware
from app.middleware.request_logging import RequestLoggingMiddleware
add_cors_middleware(app)
app.add_middleware(RequestLoggingMiddleware)
app.add_middleware(RequestIDMiddleware)
# ── Error handlers ───────────────────────────────
from app.core.error_handlers import register_error_handlers
register_error_handlers(app)
# ── Routers ──────────────────────────────────────
from app.api.v1.router import v1_router
app.include_router(v1_router, prefix=settings.API_V1_PREFIX)
return app

View File

16
app/middleware/cors.py Normal file
View File

@@ -0,0 +1,16 @@
from __future__ import annotations
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.core.config import settings
def add_cors_middleware(app: FastAPI) -> None:
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

View File

@@ -0,0 +1,37 @@
from __future__ import annotations
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from app.db.redis import get_redis
class RateLimitMiddleware(BaseHTTPMiddleware):
def __init__(self, app, max_requests: int = 100, window_seconds: int = 60): # type: ignore[no-untyped-def]
super().__init__(app)
self.max_requests = max_requests
self.window_seconds = window_seconds
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
if request.url.path.startswith("/docs") or request.url.path.startswith("/redoc"):
return await call_next(request)
client_ip = request.client.host if request.client else "unknown"
key = f"rate_limit:{client_ip}"
try:
redis = get_redis()
current = await redis.incr(key)
if current == 1:
await redis.expire(key, self.window_seconds)
if current > self.max_requests:
return JSONResponse(
status_code=429,
content={"detail": "Too many requests"},
)
except Exception:
pass
return await call_next(request)

View File

@@ -0,0 +1,17 @@
from __future__ import annotations
import uuid
import structlog
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
class RequestIDMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
request_id = request.headers.get("X-Request-ID", str(uuid.uuid4()))
structlog.contextvars.bind_contextvars(request_id=request_id)
response = await call_next(request)
response.headers["X-Request-ID"] = request_id
return response

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
import time
import structlog
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
logger = structlog.get_logger("request")
class RequestLoggingMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
start = time.perf_counter()
response = await call_next(request)
elapsed_ms = round((time.perf_counter() - start) * 1000, 2)
logger.info(
"request",
method=request.method,
path=request.url.path,
status=response.status_code,
elapsed_ms=elapsed_ms,
)
return response

0
app/models/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from datetime import datetime
from sqlmodel import Field, Relationship, SQLModel
from app.db.base import TimestampMixin
class RefreshToken(TimestampMixin, SQLModel, table=True):
__tablename__ = "refresh_tokens"
id: int | None = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="users.id", index=True)
token: str = Field(max_length=500, unique=True, index=True)
expires_at: datetime
is_revoked: bool = Field(default=False)
device_info: str = Field(default="", max_length=255)
# Relationships
user: User | None = Relationship(back_populates="refresh_tokens")
class OAuthAccount(TimestampMixin, SQLModel, table=True):
__tablename__ = "oauth_accounts"
id: int | None = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="users.id", index=True)
provider: str = Field(max_length=50)
provider_user_id: str = Field(max_length=255)
access_token: str = Field(default="", max_length=500)
refresh_token: str = Field(default="", max_length=500)
expires_at: datetime | None = Field(default=None)
# Avoid circular import
from app.models.mariadb.user import User # noqa: E402, F811
RefreshToken.model_rebuild()

View File

@@ -0,0 +1,36 @@
from __future__ import annotations
from datetime import datetime
from sqlmodel import Field, Relationship, SQLModel
from app.core.constants import DeviceStatus
from app.db.base import SoftDeleteMixin, TimestampMixin
class DeviceGroup(TimestampMixin, SQLModel, table=True):
__tablename__ = "device_groups"
id: int | None = Field(default=None, primary_key=True)
name: str = Field(max_length=100, unique=True)
description: str = Field(default="", max_length=500)
devices: list[Device] = Relationship(back_populates="group")
class Device(TimestampMixin, SoftDeleteMixin, SQLModel, table=True):
__tablename__ = "devices"
id: int | None = Field(default=None, primary_key=True)
device_uid: str = Field(max_length=100, unique=True, index=True)
name: str = Field(max_length=100)
device_type: str = Field(default="", max_length=50)
status: str = Field(default=DeviceStatus.OFFLINE, max_length=20)
firmware_version: str = Field(default="", max_length=50)
ip_address: str = Field(default="", max_length=45)
group_id: int | None = Field(default=None, foreign_key="device_groups.id")
owner_id: int | None = Field(default=None, foreign_key="users.id")
last_seen_at: datetime | None = Field(default=None)
metadata_json: str = Field(default="{}", max_length=2000)
group: DeviceGroup | None = Relationship(back_populates="devices")

View File

@@ -0,0 +1,36 @@
from __future__ import annotations
from datetime import datetime
from sqlmodel import Field, SQLModel
from app.core.constants import AlertSeverity
from app.db.base import TimestampMixin
class AlertRule(TimestampMixin, SQLModel, table=True):
__tablename__ = "alert_rules"
id: int | None = Field(default=None, primary_key=True)
name: str = Field(max_length=100)
description: str = Field(default="", max_length=500)
metric: str = Field(max_length=100)
condition: str = Field(max_length=50)
threshold: float
severity: str = Field(default=AlertSeverity.WARNING, max_length=20)
is_enabled: bool = Field(default=True)
device_group_id: int | None = Field(default=None, foreign_key="device_groups.id")
created_by: int | None = Field(default=None, foreign_key="users.id")
class Alert(TimestampMixin, SQLModel, table=True):
__tablename__ = "alerts"
id: int | None = Field(default=None, primary_key=True)
rule_id: int | None = Field(default=None, foreign_key="alert_rules.id")
device_id: int | None = Field(default=None, foreign_key="devices.id")
severity: str = Field(default=AlertSeverity.WARNING, max_length=20)
message: str = Field(max_length=500)
is_acknowledged: bool = Field(default=False)
acknowledged_by: int | None = Field(default=None, foreign_key="users.id")
acknowledged_at: datetime | None = Field(default=None)

View File

@@ -0,0 +1,27 @@
from __future__ import annotations
from sqlmodel import Field, SQLModel
from app.db.base import TimestampMixin
class SystemConfig(TimestampMixin, SQLModel, table=True):
__tablename__ = "system_configs"
id: int | None = Field(default=None, primary_key=True)
key: str = Field(max_length=100, unique=True, index=True)
value: str = Field(default="", max_length=2000)
description: str = Field(default="", max_length=500)
is_secret: bool = Field(default=False)
class AuditLog(TimestampMixin, SQLModel, table=True):
__tablename__ = "audit_logs"
id: int | None = Field(default=None, primary_key=True)
user_id: int | None = Field(default=None, foreign_key="users.id")
action: str = Field(max_length=100)
resource_type: str = Field(max_length=50)
resource_id: str = Field(default="", max_length=50)
details: str = Field(default="{}", max_length=2000)
ip_address: str = Field(default="", max_length=45)

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from datetime import datetime
from sqlmodel import Field, Relationship, SQLModel
from app.core.constants import Role
from app.db.base import SoftDeleteMixin, TimestampMixin
class User(TimestampMixin, SoftDeleteMixin, SQLModel, table=True):
__tablename__ = "users"
id: int | None = Field(default=None, primary_key=True)
email: str = Field(max_length=255, unique=True, index=True)
hashed_password: str = Field(max_length=255)
role: str = Field(default=Role.USER, max_length=20)
is_active: bool = Field(default=True)
is_verified: bool = Field(default=False)
last_login_at: datetime | None = Field(default=None)
# Relationships
profile: UserProfile | None = Relationship(back_populates="user")
refresh_tokens: list[RefreshToken] = Relationship(back_populates="user")
class UserProfile(TimestampMixin, SQLModel, table=True):
__tablename__ = "user_profiles"
id: int | None = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="users.id", unique=True, index=True)
full_name: str = Field(default="", max_length=100)
phone: str = Field(default="", max_length=20)
organization: str = Field(default="", max_length=100)
avatar_url: str = Field(default="", max_length=500)
# Relationships
user: User | None = Relationship(back_populates="profile")
# Forward reference resolution
from app.models.mariadb.auth import RefreshToken # noqa: E402
User.model_rebuild()

View File

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from datetime import datetime
from beanie import Document
from pydantic import Field
class AnalyticsResult(Document):
analysis_type: str
parameters: dict = Field(default_factory=dict)
result: dict = Field(default_factory=dict)
device_id: str | None = None
period_start: datetime | None = None
period_end: datetime | None = None
created_at: datetime = Field(default_factory=datetime.utcnow)
class Settings:
name = "analytics_results"
indexes = [
"analysis_type",
"device_id",
[("created_at", -1)],
]

View File

@@ -0,0 +1,27 @@
from __future__ import annotations
from datetime import datetime
from beanie import Document
from pydantic import Field
class DeviceLog(Document):
device_id: str
event_type: str
payload: dict = Field(default_factory=dict)
ip_address: str | None = None
timestamp: datetime = Field(default_factory=datetime.utcnow)
class Settings:
name = "device_logs"
indexes = [
"device_id",
"event_type",
[("timestamp", -1)],
]
# TTL: 90일 후 자동 삭제
timeseries = {
"timeField": "timestamp",
"expireAfterSeconds": 90 * 24 * 3600,
}

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from datetime import datetime
from beanie import Document
from pydantic import Field
class Notification(Document):
user_id: int
title: str
message: str
notification_type: str = "info"
is_read: bool = False
read_at: datetime | None = None
created_at: datetime = Field(default_factory=datetime.utcnow)
class Settings:
name = "notifications"
indexes = [
"user_id",
[("user_id", 1), ("is_read", 1)],
[("created_at", -1)],
]

View File

@@ -0,0 +1,20 @@
from __future__ import annotations
from datetime import datetime
from beanie import Document
from pydantic import Field
class TelemetryData(Document):
device_id: str
metrics: dict = Field(default_factory=dict)
timestamp: datetime = Field(default_factory=datetime.utcnow)
class Settings:
name = "telemetry_data"
indexes = [
"device_id",
[("timestamp", -1)],
[("device_id", 1), ("timestamp", -1)],
]

View File

View File

View File

@@ -0,0 +1,37 @@
from __future__ import annotations
from datetime import datetime
from app.models.mongodb.device_log import DeviceLog
async def analyze_device_status(
device_id: str, start: datetime, end: datetime
) -> dict:
"""Analyze device status changes over a period."""
logs = await (
DeviceLog.find(
DeviceLog.device_id == device_id,
DeviceLog.event_type == "status_change",
DeviceLog.timestamp >= start,
DeviceLog.timestamp <= end,
)
.sort("+timestamp")
.to_list()
)
status_counts: dict[str, int] = {}
for log in logs:
status = log.payload.get("status", "unknown")
status_counts[status] = status_counts.get(status, 0) + 1
total_events = len(logs)
uptime_events = status_counts.get("online", 0)
uptime_ratio = uptime_events / total_events if total_events > 0 else 0.0
return {
"total_events": total_events,
"status_counts": status_counts,
"uptime_ratio": round(uptime_ratio, 4),
"period": {"start": start.isoformat(), "end": end.isoformat()},
}

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
from datetime import datetime
import numpy as np
from app.models.mongodb.telemetry import TelemetryData
async def analyze_trend(
device_id: str, start: datetime, end: datetime
) -> dict:
"""Analyze telemetry data trends using linear regression."""
docs = await (
TelemetryData.find(
TelemetryData.device_id == device_id,
TelemetryData.timestamp >= start,
TelemetryData.timestamp <= end,
)
.sort("+timestamp")
.to_list()
)
if len(docs) < 2:
return {"status": "insufficient_data", "count": len(docs)}
timestamps = np.array([d.timestamp.timestamp() for d in docs])
values = np.array([d.metrics.get("value", 0) for d in docs], dtype=float)
# Normalize timestamps
t_norm = timestamps - timestamps[0]
# Linear regression
coeffs = np.polyfit(t_norm, values, 1)
slope = float(coeffs[0])
return {
"count": len(docs),
"mean": float(np.mean(values)),
"std": float(np.std(values)),
"min": float(np.min(values)),
"max": float(np.max(values)),
"slope": slope,
"trend": "increasing" if slope > 0.001 else "decreasing" if slope < -0.001 else "stable",
}

View File

View File

@@ -0,0 +1,29 @@
from __future__ import annotations
from datetime import datetime
from app.models.mongodb.analytics_result import AnalyticsResult
from app.processing.analyzers.device_analyzer import analyze_device_status
from app.processing.analyzers.trend_analyzer import analyze_trend
async def generate_device_report(
device_id: str, start: datetime, end: datetime
) -> AnalyticsResult:
"""Generate a comprehensive device report."""
status_report = await analyze_device_status(device_id, start, end)
trend_report = await analyze_trend(device_id, start, end)
result = AnalyticsResult(
analysis_type="device_report",
device_id=device_id,
parameters={"start": start.isoformat(), "end": end.isoformat()},
result={
"status": status_report,
"trends": trend_report,
},
period_start=start,
period_end=end,
)
await result.insert()
return result

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
from datetime import datetime
import polars as pl
from app.models.mongodb.telemetry import TelemetryData
async def aggregate_telemetry(
device_id: str,
start: datetime,
end: datetime,
interval: str = "1h",
) -> pl.DataFrame:
"""Aggregate telemetry data for a device over a time range."""
docs = await (
TelemetryData.find(
TelemetryData.device_id == device_id,
TelemetryData.timestamp >= start,
TelemetryData.timestamp <= end,
)
.sort("+timestamp")
.to_list()
)
if not docs:
return pl.DataFrame()
records = [
{"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics}
for d in docs
]
df = pl.DataFrame(records)
return df.sort("timestamp").group_by_dynamic("timestamp", every=interval).agg(
pl.all().exclude("timestamp", "device_id").mean()
)
async def get_latest_telemetry(device_id: str, limit: int = 100) -> pl.DataFrame:
"""Get latest telemetry records as a Polars DataFrame."""
docs = await (
TelemetryData.find(TelemetryData.device_id == device_id)
.sort("-timestamp")
.limit(limit)
.to_list()
)
if not docs:
return pl.DataFrame()
records = [
{"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics}
for d in docs
]
return pl.DataFrame(records)

View File

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from datetime import datetime
import polars as pl
def filter_time_range(
df: pl.DataFrame, column: str, start: datetime, end: datetime
) -> pl.DataFrame:
return df.filter(
(pl.col(column) >= start) & (pl.col(column) <= end)
)
def resample(df: pl.DataFrame, time_column: str, interval: str) -> pl.DataFrame:
numeric_cols = [
c for c in df.columns if c != time_column and df[c].dtype.is_numeric()
]
return df.sort(time_column).group_by_dynamic(time_column, every=interval).agg(
[pl.col(c).mean().alias(c) for c in numeric_cols]
)
def to_records(df: pl.DataFrame) -> list[dict]:
return df.to_dicts()

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
import numpy as np
def moving_average(values: list[float], window: int = 5) -> list[float]:
if len(values) < window:
return values
arr = np.array(values, dtype=float)
return np.convolve(arr, np.ones(window) / window, mode="valid").tolist()
def detect_anomalies(
values: list[float], threshold: float = 2.0
) -> list[dict]:
"""Detect anomalies using Z-score method."""
arr = np.array(values, dtype=float)
mean = np.mean(arr)
std = np.std(arr)
if std == 0:
return []
z_scores = np.abs((arr - mean) / std)
anomalies = []
for i, (val, z) in enumerate(zip(values, z_scores)):
if z > threshold:
anomalies.append({"index": i, "value": val, "z_score": float(z)})
return anomalies
def percentile_stats(values: list[float]) -> dict:
arr = np.array(values, dtype=float)
return {
"p50": float(np.percentile(arr, 50)),
"p90": float(np.percentile(arr, 90)),
"p95": float(np.percentile(arr, 95)),
"p99": float(np.percentile(arr, 99)),
}

View File

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from datetime import datetime
from app.models.mongodb.analytics_result import AnalyticsResult
class AnalyticsRepository:
async def create(self, result: AnalyticsResult) -> AnalyticsResult:
return await result.insert()
async def get_by_type(
self,
analysis_type: str,
device_id: str | None = None,
skip: int = 0,
limit: int = 20,
) -> list[AnalyticsResult]:
query: dict = {"analysis_type": analysis_type}
if device_id:
query["device_id"] = device_id
return await (
AnalyticsResult.find(query)
.sort("-created_at")
.skip(skip)
.limit(limit)
.to_list()
)
async def get_by_period(
self,
analysis_type: str,
start: datetime,
end: datetime,
device_id: str | None = None,
) -> list[AnalyticsResult]:
query: dict = {
"analysis_type": analysis_type,
"period_start": {"$gte": start},
"period_end": {"$lte": end},
}
if device_id:
query["device_id"] = device_id
return await AnalyticsResult.find(query).sort("-created_at").to_list()

View File

@@ -0,0 +1,50 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.mariadb.auth import OAuthAccount, RefreshToken
from app.repositories.base import BaseRepository
class AuthRepository(BaseRepository[RefreshToken]):
def __init__(self, session: AsyncSession):
super().__init__(RefreshToken, session)
async def get_by_token(self, token: str) -> RefreshToken | None:
stmt = select(RefreshToken).where(
RefreshToken.token == token,
RefreshToken.is_revoked == False, # noqa: E712
RefreshToken.expires_at > datetime.utcnow(),
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def revoke_all_for_user(self, user_id: int) -> None:
stmt = select(RefreshToken).where(
RefreshToken.user_id == user_id,
RefreshToken.is_revoked == False, # noqa: E712
)
result = await self.session.execute(stmt)
for token in result.scalars().all():
token.is_revoked = True
self.session.add(token)
await self.session.flush()
async def get_oauth_account(
self, provider: str, provider_user_id: str
) -> OAuthAccount | None:
stmt = select(OAuthAccount).where(
OAuthAccount.provider == provider,
OAuthAccount.provider_user_id == provider_user_id,
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def create_oauth_account(self, account: OAuthAccount) -> OAuthAccount:
self.session.add(account)
await self.session.flush()
await self.session.refresh(account)
return account

58
app/repositories/base.py Normal file
View File

@@ -0,0 +1,58 @@
from __future__ import annotations
from typing import Generic, Sequence, TypeVar
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import SQLModel
T = TypeVar("T", bound=SQLModel)
class BaseRepository(Generic[T]):
def __init__(self, model: type[T], session: AsyncSession):
self.model = model
self.session = session
async def get_by_id(self, id: int) -> T | None:
return await self.session.get(self.model, id)
async def get_all(
self, *, skip: int = 0, limit: int = 100, filters: dict | None = None
) -> Sequence[T]:
stmt = select(self.model)
if filters:
for key, value in filters.items():
if hasattr(self.model, key):
stmt = stmt.where(getattr(self.model, key) == value)
stmt = stmt.offset(skip).limit(limit)
result = await self.session.execute(stmt)
return result.scalars().all()
async def count(self, filters: dict | None = None) -> int:
stmt = select(func.count()).select_from(self.model)
if filters:
for key, value in filters.items():
if hasattr(self.model, key):
stmt = stmt.where(getattr(self.model, key) == value)
result = await self.session.execute(stmt)
return result.scalar_one()
async def create(self, obj: T) -> T:
self.session.add(obj)
await self.session.flush()
await self.session.refresh(obj)
return obj
async def update(self, obj: T, data: dict) -> T:
for key, value in data.items():
if value is not None and hasattr(obj, key):
setattr(obj, key, value)
self.session.add(obj)
await self.session.flush()
await self.session.refresh(obj)
return obj
async def delete(self, obj: T) -> None:
await self.session.delete(obj)
await self.session.flush()

View File

@@ -0,0 +1,40 @@
from __future__ import annotations
from datetime import datetime
from app.models.mongodb.device_log import DeviceLog
class DeviceLogRepository:
async def create(self, log: DeviceLog) -> DeviceLog:
return await log.insert()
async def get_by_device(
self,
device_id: str,
event_type: str | None = None,
since: datetime | None = None,
skip: int = 0,
limit: int = 100,
) -> list[DeviceLog]:
query: dict = {"device_id": device_id}
if event_type:
query["event_type"] = event_type
if since:
query["timestamp"] = {"$gte": since}
return await (
DeviceLog.find(query)
.sort("-timestamp")
.skip(skip)
.limit(limit)
.to_list()
)
async def count_by_device(
self, device_id: str, event_type: str | None = None
) -> int:
query: dict = {"device_id": device_id}
if event_type:
query["event_type"] = event_type
return await DeviceLog.find(query).count()

View File

@@ -0,0 +1,47 @@
from __future__ import annotations
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.mariadb.device import Device, DeviceGroup
from app.repositories.base import BaseRepository
class DeviceRepository(BaseRepository[Device]):
def __init__(self, session: AsyncSession):
super().__init__(Device, session)
async def get_by_uid(self, device_uid: str) -> Device | None:
stmt = select(Device).where(Device.device_uid == device_uid, Device.is_deleted == False) # noqa: E712
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def get_by_owner(self, owner_id: int, skip: int = 0, limit: int = 100) -> list[Device]:
stmt = (
select(Device)
.where(Device.owner_id == owner_id, Device.is_deleted == False) # noqa: E712
.offset(skip)
.limit(limit)
)
result = await self.session.execute(stmt)
return list(result.scalars().all())
async def get_by_group(self, group_id: int, skip: int = 0, limit: int = 100) -> list[Device]:
stmt = (
select(Device)
.where(Device.group_id == group_id, Device.is_deleted == False) # noqa: E712
.offset(skip)
.limit(limit)
)
result = await self.session.execute(stmt)
return list(result.scalars().all())
class DeviceGroupRepository(BaseRepository[DeviceGroup]):
def __init__(self, session: AsyncSession):
super().__init__(DeviceGroup, session)
async def get_by_name(self, name: str) -> DeviceGroup | None:
stmt = select(DeviceGroup).where(DeviceGroup.name == name)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()

View File

@@ -0,0 +1,36 @@
from __future__ import annotations
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.mariadb.monitoring import Alert, AlertRule
from app.repositories.base import BaseRepository
class AlertRuleRepository(BaseRepository[AlertRule]):
def __init__(self, session: AsyncSession):
super().__init__(AlertRule, session)
async def get_enabled_rules(self) -> list[AlertRule]:
stmt = select(AlertRule).where(AlertRule.is_enabled == True) # noqa: E712
result = await self.session.execute(stmt)
return list(result.scalars().all())
class AlertRepository(BaseRepository[Alert]):
def __init__(self, session: AsyncSession):
super().__init__(Alert, session)
async def get_unacknowledged(self, skip: int = 0, limit: int = 50) -> list[Alert]:
stmt = (
select(Alert)
.where(Alert.is_acknowledged == False) # noqa: E712
.order_by(Alert.created_at.desc())
.offset(skip)
.limit(limit)
)
result = await self.session.execute(stmt)
return list(result.scalars().all())
async def count_active(self) -> int:
return await self.count(filters={"is_acknowledged": False})

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.models.mariadb.user import User, UserProfile
from app.repositories.base import BaseRepository
class UserRepository(BaseRepository[User]):
def __init__(self, session: AsyncSession):
super().__init__(User, session)
async def get_by_email(self, email: str) -> User | None:
stmt = select(User).where(User.email == email)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def get_with_profile(self, user_id: int) -> User | None:
stmt = (
select(User)
.options(selectinload(User.profile))
.where(User.id == user_id)
)
result = await self.session.execute(stmt)
return result.scalar_one_or_none()
async def create_with_profile(
self, user: User, full_name: str = "", phone: str = "", organization: str = ""
) -> User:
self.session.add(user)
await self.session.flush()
profile = UserProfile(
user_id=user.id, # type: ignore[arg-type]
full_name=full_name,
phone=phone,
organization=organization,
)
self.session.add(profile)
await self.session.flush()
await self.session.refresh(user)
return user

0
app/schemas/__init__.py Normal file
View File

38
app/schemas/analytics.py Normal file
View File

@@ -0,0 +1,38 @@
from __future__ import annotations
from datetime import datetime
from pydantic import BaseModel
class AnalyticsRequest(BaseModel):
device_id: str | None = None
analysis_type: str = "telemetry_summary"
start: datetime
end: datetime
parameters: dict = {}
class AnalyticsResultRead(BaseModel):
id: str
analysis_type: str
device_id: str | None
result: dict
parameters: dict
period_start: datetime | None
period_end: datetime | None
created_at: datetime
class ReportResponse(BaseModel):
report_id: str
device_id: str
status: dict
trends: dict
created_at: datetime
class TelemetryAggregateResponse(BaseModel):
device_id: str
records: list[dict]
count: int

30
app/schemas/auth.py Normal file
View File

@@ -0,0 +1,30 @@
from __future__ import annotations
from pydantic import BaseModel, EmailStr
class LoginRequest(BaseModel):
email: EmailStr
password: str
class RegisterRequest(BaseModel):
email: EmailStr
password: str
full_name: str = ""
class TokenResponse(BaseModel):
access_token: str
refresh_token: str
token_type: str = "bearer"
class RefreshTokenRequest(BaseModel):
refresh_token: str
class OAuthCallbackRequest(BaseModel):
provider: str
code: str
redirect_uri: str

19
app/schemas/common.py Normal file
View File

@@ -0,0 +1,19 @@
from __future__ import annotations
from typing import Generic, TypeVar
from pydantic import BaseModel
T = TypeVar("T")
class ErrorResponse(BaseModel):
detail: str
class PaginatedResponse(BaseModel, Generic[T]):
items: list[T]
total: int
page: int
size: int
pages: int

52
app/schemas/device.py Normal file
View File

@@ -0,0 +1,52 @@
from __future__ import annotations
from datetime import datetime
from pydantic import BaseModel
class DeviceGroupRead(BaseModel):
id: int
name: str
description: str
model_config = {"from_attributes": True}
class DeviceGroupCreate(BaseModel):
name: str
description: str = ""
class DeviceRead(BaseModel):
id: int
device_uid: str
name: str
device_type: str
status: str
firmware_version: str
ip_address: str
group_id: int | None
owner_id: int | None
last_seen_at: datetime | None
created_at: datetime
model_config = {"from_attributes": True}
class DeviceCreate(BaseModel):
device_uid: str
name: str
device_type: str = ""
group_id: int | None = None
owner_id: int | None = None
firmware_version: str = ""
class DeviceUpdate(BaseModel):
name: str | None = None
device_type: str | None = None
status: str | None = None
firmware_version: str | None = None
group_id: int | None = None
owner_id: int | None = None

54
app/schemas/monitoring.py Normal file
View File

@@ -0,0 +1,54 @@
from __future__ import annotations
from datetime import datetime
from pydantic import BaseModel
class AlertRuleRead(BaseModel):
id: int
name: str
description: str
metric: str
condition: str
threshold: float
severity: str
is_enabled: bool
device_group_id: int | None
created_at: datetime
model_config = {"from_attributes": True}
class AlertRuleCreate(BaseModel):
name: str
description: str = ""
metric: str
condition: str
threshold: float
severity: str = "warning"
device_group_id: int | None = None
class AlertRead(BaseModel):
id: int
rule_id: int | None
device_id: int | None
severity: str
message: str
is_acknowledged: bool
acknowledged_by: int | None
acknowledged_at: datetime | None
created_at: datetime
model_config = {"from_attributes": True}
class SystemHealthResponse(BaseModel):
status: str
mariadb: str
mongodb: str
redis: str
mqtt: str
active_devices: int
active_alerts: int

38
app/schemas/user.py Normal file
View File

@@ -0,0 +1,38 @@
from __future__ import annotations
from datetime import datetime
from pydantic import BaseModel, EmailStr
class UserRead(BaseModel):
id: int
email: str
role: str
is_active: bool
is_verified: bool
full_name: str = ""
phone: str = ""
organization: str = ""
avatar_url: str = ""
created_at: datetime
model_config = {"from_attributes": True}
class UserCreate(BaseModel):
email: EmailStr
password: str
role: str = "user"
full_name: str = ""
phone: str = ""
organization: str = ""
class UserUpdate(BaseModel):
full_name: str | None = None
phone: str | None = None
organization: str | None = None
avatar_url: str | None = None
is_active: bool | None = None
role: str | None = None

0
app/services/__init__.py Normal file
View File

View File

@@ -0,0 +1,71 @@
from __future__ import annotations
from datetime import datetime
from app.processing.analyzers.device_analyzer import analyze_device_status
from app.processing.analyzers.trend_analyzer import analyze_trend
from app.processing.pipelines.report_pipeline import generate_device_report
from app.processing.pipelines.telemetry_pipeline import aggregate_telemetry
from app.processing.utils.dataframe_utils import to_records
from app.repositories.analytics_repo import AnalyticsRepository
from app.schemas.analytics import (
AnalyticsResultRead,
ReportResponse,
TelemetryAggregateResponse,
)
class AnalyticsService:
def __init__(self) -> None:
self.analytics_repo = AnalyticsRepository()
async def get_telemetry_aggregate(
self, device_id: str, start: datetime, end: datetime, interval: str = "1h"
) -> TelemetryAggregateResponse:
df = await aggregate_telemetry(device_id, start, end, interval)
records = to_records(df) if len(df) > 0 else []
return TelemetryAggregateResponse(
device_id=device_id, records=records, count=len(records)
)
async def generate_report(
self, device_id: str, start: datetime, end: datetime
) -> ReportResponse:
result = await generate_device_report(device_id, start, end)
return ReportResponse(
report_id=str(result.id),
device_id=device_id,
status=result.result.get("status", {}),
trends=result.result.get("trends", {}),
created_at=result.created_at,
)
async def get_device_status_analysis(
self, device_id: str, start: datetime, end: datetime
) -> dict:
return await analyze_device_status(device_id, start, end)
async def get_trend_analysis(
self, device_id: str, start: datetime, end: datetime
) -> dict:
return await analyze_trend(device_id, start, end)
async def list_results(
self, analysis_type: str, device_id: str | None = None, skip: int = 0, limit: int = 20
) -> list[AnalyticsResultRead]:
results = await self.analytics_repo.get_by_type(
analysis_type, device_id=device_id, skip=skip, limit=limit
)
return [
AnalyticsResultRead(
id=str(r.id),
analysis_type=r.analysis_type,
device_id=r.device_id,
result=r.result,
parameters=r.parameters,
period_start=r.period_start,
period_end=r.period_end,
created_at=r.created_at,
)
for r in results
]

View File

@@ -0,0 +1,84 @@
from __future__ import annotations
from datetime import datetime, timedelta
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.core.exceptions import ConflictException, UnauthorizedException
from app.core.security import (
create_access_token,
create_refresh_token,
decode_token,
hash_password,
verify_password,
)
from app.models.mariadb.auth import RefreshToken
from app.models.mariadb.user import User
from app.repositories.auth_repo import AuthRepository
from app.repositories.user_repo import UserRepository
from app.schemas.auth import TokenResponse
class AuthService:
def __init__(self, session: AsyncSession):
self.user_repo = UserRepository(session)
self.auth_repo = AuthRepository(session)
async def register(
self, email: str, password: str, full_name: str = ""
) -> User:
existing = await self.user_repo.get_by_email(email)
if existing:
raise ConflictException("Email already registered")
user = User(
email=email,
hashed_password=hash_password(password),
)
return await self.user_repo.create_with_profile(user, full_name=full_name)
async def login(self, email: str, password: str) -> TokenResponse:
user = await self.user_repo.get_by_email(email)
if not user or not verify_password(password, user.hashed_password):
raise UnauthorizedException("Invalid email or password")
if not user.is_active:
raise UnauthorizedException("Account is deactivated")
user.last_login_at = datetime.utcnow()
return await self._create_tokens(user)
async def refresh(self, refresh_token_str: str) -> TokenResponse:
payload = decode_token(refresh_token_str)
if not payload or payload.get("type") != "refresh":
raise UnauthorizedException("Invalid refresh token")
stored = await self.auth_repo.get_by_token(refresh_token_str)
if not stored:
raise UnauthorizedException("Refresh token not found or expired")
stored.is_revoked = True
user = await self.user_repo.get_by_id(stored.user_id)
if not user or not user.is_active:
raise UnauthorizedException("User not found or deactivated")
return await self._create_tokens(user)
async def logout(self, user_id: int) -> None:
await self.auth_repo.revoke_all_for_user(user_id)
async def _create_tokens(self, user: User) -> TokenResponse:
access = create_access_token(user.id, user.role) # type: ignore[arg-type]
refresh = create_refresh_token(user.id) # type: ignore[arg-type]
token_obj = RefreshToken(
user_id=user.id, # type: ignore[arg-type]
token=refresh,
expires_at=datetime.utcnow()
+ timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS),
)
await self.auth_repo.create(token_obj)
return TokenResponse(access_token=access, refresh_token=refresh)

View File

@@ -0,0 +1,71 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.exceptions import ConflictException, NotFoundException
from app.models.mariadb.device import Device
from app.repositories.device_repo import DeviceRepository
from app.schemas.device import DeviceCreate, DeviceRead, DeviceUpdate
class DeviceService:
def __init__(self, session: AsyncSession):
self.device_repo = DeviceRepository(session)
async def get_device(self, device_id: int) -> DeviceRead:
device = await self.device_repo.get_by_id(device_id)
if not device or device.is_deleted:
raise NotFoundException("Device not found")
return DeviceRead.model_validate(device)
async def get_device_by_uid(self, device_uid: str) -> DeviceRead:
device = await self.device_repo.get_by_uid(device_uid)
if not device:
raise NotFoundException("Device not found")
return DeviceRead.model_validate(device)
async def list_devices(self, skip: int = 0, limit: int = 20) -> list[DeviceRead]:
devices = await self.device_repo.get_all(
skip=skip, limit=limit, filters={"is_deleted": False}
)
return [DeviceRead.model_validate(d) for d in devices]
async def count_devices(self) -> int:
return await self.device_repo.count(filters={"is_deleted": False})
async def create_device(self, data: DeviceCreate) -> DeviceRead:
existing = await self.device_repo.get_by_uid(data.device_uid)
if existing:
raise ConflictException("Device UID already registered")
device = Device(**data.model_dump())
device = await self.device_repo.create(device)
return DeviceRead.model_validate(device)
async def update_device(self, device_id: int, data: DeviceUpdate) -> DeviceRead:
device = await self.device_repo.get_by_id(device_id)
if not device or device.is_deleted:
raise NotFoundException("Device not found")
update_data = data.model_dump(exclude_none=True)
device = await self.device_repo.update(device, update_data)
return DeviceRead.model_validate(device)
async def delete_device(self, device_id: int) -> None:
device = await self.device_repo.get_by_id(device_id)
if not device or device.is_deleted:
raise NotFoundException("Device not found")
await self.device_repo.update(
device, {"is_deleted": True, "deleted_at": datetime.utcnow()}
)
async def update_device_status(self, device_uid: str, status: str) -> DeviceRead:
device = await self.device_repo.get_by_uid(device_uid)
if not device:
raise NotFoundException("Device not found")
device = await self.device_repo.update(
device, {"status": status, "last_seen_at": datetime.utcnow()}
)
return DeviceRead.model_validate(device)

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.constants import DeviceStatus
from app.core.exceptions import NotFoundException
from app.models.mariadb.monitoring import Alert, AlertRule
from app.repositories.device_repo import DeviceRepository
from app.repositories.monitoring_repo import AlertRepository, AlertRuleRepository
from app.schemas.monitoring import AlertRead, AlertRuleCreate, AlertRuleRead, SystemHealthResponse
class MonitoringService:
def __init__(self, session: AsyncSession):
self.alert_rule_repo = AlertRuleRepository(session)
self.alert_repo = AlertRepository(session)
self.device_repo = DeviceRepository(session)
async def create_alert_rule(self, data: AlertRuleCreate, user_id: int) -> AlertRuleRead:
rule = AlertRule(**data.model_dump(), created_by=user_id)
rule = await self.alert_rule_repo.create(rule)
return AlertRuleRead.model_validate(rule)
async def list_alert_rules(self) -> list[AlertRuleRead]:
rules = await self.alert_rule_repo.get_all()
return [AlertRuleRead.model_validate(r) for r in rules]
async def list_active_alerts(self, skip: int = 0, limit: int = 50) -> list[AlertRead]:
alerts = await self.alert_repo.get_unacknowledged(skip=skip, limit=limit)
return [AlertRead.model_validate(a) for a in alerts]
async def acknowledge_alert(self, alert_id: int, user_id: int) -> AlertRead:
alert = await self.alert_repo.get_by_id(alert_id)
if not alert:
raise NotFoundException("Alert not found")
alert = await self.alert_repo.update(alert, {
"is_acknowledged": True,
"acknowledged_by": user_id,
"acknowledged_at": datetime.utcnow(),
})
return AlertRead.model_validate(alert)
async def get_system_health(self) -> SystemHealthResponse:
active_devices = await self.device_repo.count(filters={"status": DeviceStatus.ONLINE})
active_alerts = await self.alert_repo.count_active()
return SystemHealthResponse(
status="ok",
mariadb="connected",
mongodb="connected",
redis="connected",
mqtt="connected",
active_devices=active_devices,
active_alerts=active_alerts,
)

View File

@@ -0,0 +1,49 @@
from __future__ import annotations
from app.communication.socketio.server import sio
from app.models.mongodb.notification import Notification
class NotificationService:
async def create_notification(
self, user_id: int, title: str, message: str, notification_type: str = "info"
) -> Notification:
notification = Notification(
user_id=user_id,
title=title,
message=message,
notification_type=notification_type,
)
await notification.insert()
await sio.emit(
"notification",
{"title": title, "message": message, "type": notification_type},
room=f"user:{user_id}",
namespace="/notification",
)
return notification
async def get_user_notifications(
self, user_id: int, skip: int = 0, limit: int = 20, unread_only: bool = False
) -> list[Notification]:
query: dict = {"user_id": user_id}
if unread_only:
query["is_read"] = False
return await (
Notification.find(query)
.sort("-created_at")
.skip(skip)
.limit(limit)
.to_list()
)
async def mark_as_read(self, notification_id: str, user_id: int) -> None:
from datetime import datetime
notification = await Notification.get(notification_id)
if notification and notification.user_id == user_id:
notification.is_read = True
notification.read_at = datetime.utcnow()
await notification.save()

View File

@@ -0,0 +1,94 @@
from __future__ import annotations
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.exceptions import ConflictException, NotFoundException
from app.core.security import hash_password
from app.models.mariadb.user import User
from app.repositories.user_repo import UserRepository
from app.schemas.user import UserCreate, UserRead, UserUpdate
class UserService:
def __init__(self, session: AsyncSession):
self.user_repo = UserRepository(session)
async def get_user(self, user_id: int) -> UserRead:
user = await self.user_repo.get_with_profile(user_id)
if not user:
raise NotFoundException("User not found")
return self._to_read(user)
async def list_users(self, skip: int = 0, limit: int = 20) -> list[UserRead]:
users = await self.user_repo.get_all(skip=skip, limit=limit)
return [self._to_read(u) for u in users]
async def count_users(self) -> int:
return await self.user_repo.count()
async def create_user(self, data: UserCreate) -> UserRead:
existing = await self.user_repo.get_by_email(data.email)
if existing:
raise ConflictException("Email already registered")
user = User(
email=data.email,
hashed_password=hash_password(data.password),
role=data.role,
)
user = await self.user_repo.create_with_profile(
user,
full_name=data.full_name,
phone=data.phone,
organization=data.organization,
)
return self._to_read(user)
async def update_user(self, user_id: int, data: UserUpdate) -> UserRead:
user = await self.user_repo.get_with_profile(user_id)
if not user:
raise NotFoundException("User not found")
user_fields = {}
profile_fields = {}
if data.is_active is not None:
user_fields["is_active"] = data.is_active
if data.role is not None:
user_fields["role"] = data.role
for field in ("full_name", "phone", "organization", "avatar_url"):
val = getattr(data, field, None)
if val is not None:
profile_fields[field] = val
if user_fields:
await self.user_repo.update(user, user_fields)
if profile_fields and user.profile:
for k, v in profile_fields.items():
setattr(user.profile, k, v)
user = await self.user_repo.get_with_profile(user_id)
return self._to_read(user) # type: ignore[arg-type]
async def delete_user(self, user_id: int) -> None:
user = await self.user_repo.get_by_id(user_id)
if not user:
raise NotFoundException("User not found")
await self.user_repo.update(user, {"is_deleted": True})
@staticmethod
def _to_read(user: User) -> UserRead:
profile = user.profile
return UserRead(
id=user.id, # type: ignore[arg-type]
email=user.email,
role=user.role,
is_active=user.is_active,
is_verified=user.is_verified,
full_name=profile.full_name if profile else "",
phone=profile.phone if profile else "",
organization=profile.organization if profile else "",
avatar_url=profile.avatar_url if profile else "",
created_at=user.created_at,
)

0
app/tasks/__init__.py Normal file
View File

Some files were not shown because too many files have changed in this diff Show More