초기 커밋
This commit is contained in:
8
.claude/settings.local.json
Normal file
8
.claude/settings.local.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"WebSearch",
|
||||
"Bash(dir:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
17
.dockerignore
Normal file
17
.dockerignore
Normal file
@@ -0,0 +1,17 @@
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
.venv
|
||||
venv
|
||||
.env
|
||||
.git
|
||||
.idea
|
||||
.vscode
|
||||
.mypy_cache
|
||||
.ruff_cache
|
||||
.pytest_cache
|
||||
htmlcov
|
||||
.coverage
|
||||
*.log
|
||||
data/
|
||||
tests/
|
||||
docs/
|
||||
56
.env.example
Normal file
56
.env.example
Normal file
@@ -0,0 +1,56 @@
|
||||
# ── Application ──────────────────────────────────────
|
||||
APP_NAME=core-api
|
||||
APP_ENV=development
|
||||
DEBUG=true
|
||||
SECRET_KEY=change-me-to-a-random-secret-key
|
||||
API_V1_PREFIX=/api/v1
|
||||
|
||||
# ── MariaDB ─────────────────────────────────────────
|
||||
MARIADB_HOST=127.0.0.1
|
||||
MARIADB_PORT=3306
|
||||
MARIADB_USER=root
|
||||
MARIADB_PASSWORD=changeme
|
||||
MARIADB_DATABASE=core_api
|
||||
|
||||
# ── MongoDB ─────────────────────────────────────────
|
||||
MONGODB_URL=mongodb://127.0.0.1:27017
|
||||
MONGODB_DATABASE=core_api
|
||||
|
||||
# ── Redis ───────────────────────────────────────────
|
||||
REDIS_URL=redis://127.0.0.1:6379/0
|
||||
|
||||
# ── JWT ─────────────────────────────────────────────
|
||||
JWT_SECRET_KEY=change-me-jwt-secret
|
||||
JWT_ALGORITHM=HS256
|
||||
JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||
JWT_REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||
|
||||
# ── MQTT ────────────────────────────────────────────
|
||||
MQTT_HOST=127.0.0.1
|
||||
MQTT_PORT=1883
|
||||
MQTT_USERNAME=
|
||||
MQTT_PASSWORD=
|
||||
|
||||
# ── Celery ──────────────────────────────────────────
|
||||
CELERY_BROKER_URL=redis://127.0.0.1:6379/1
|
||||
CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/2
|
||||
|
||||
# ── CORS ────────────────────────────────────────────
|
||||
CORS_ORIGINS=["http://localhost:3000","http://localhost:8080"]
|
||||
|
||||
# ── OAuth ───────────────────────────────────────────
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
KAKAO_CLIENT_ID=
|
||||
KAKAO_CLIENT_SECRET=
|
||||
NAVER_CLIENT_ID=
|
||||
NAVER_CLIENT_SECRET=
|
||||
|
||||
# ── SMTP (Email) ────────────────────────────────────
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# ── Logging ─────────────────────────────────────────
|
||||
LOG_LEVEL=DEBUG
|
||||
52
.gitignore
vendored
Normal file
52
.gitignore
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
.eggs/
|
||||
|
||||
# Virtual environments
|
||||
.venv/
|
||||
venv/
|
||||
env/
|
||||
|
||||
# Environment
|
||||
.env
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Testing
|
||||
.coverage
|
||||
htmlcov/
|
||||
.pytest_cache/
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# Ruff
|
||||
.ruff_cache/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Docker volumes
|
||||
data/
|
||||
|
||||
# Celery
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# Alembic
|
||||
alembic/versions/*.pyc
|
||||
15
.pre-commit-config.yaml
Normal file
15
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
16
Dockerfile
Normal file
16
Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY pyproject.toml .
|
||||
RUN pip install --no-cache-dir .
|
||||
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "app.asgi:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
14
Dockerfile.worker
Normal file
14
Dockerfile.worker
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY pyproject.toml .
|
||||
RUN pip install --no-cache-dir .
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["celery", "-A", "app.tasks.celery_app", "worker", "--loglevel=info", "--concurrency=4"]
|
||||
37
alembic.ini
Normal file
37
alembic.ini
Normal file
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
prepend_sys_path = .
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
54
alembic/env.py
Normal file
54
alembic/env.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
# Alembic Config
|
||||
config = context.config
|
||||
config.set_main_option("sqlalchemy.url", settings.MARIADB_DSN_SYNC)
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Import all models so they register with SQLModel.metadata
|
||||
from app.models.mariadb import auth, device, monitoring, system, user # noqa: F401
|
||||
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "format"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
alembic/script.py.mako
Normal file
28
alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from alembic import op
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/admin/__init__.py
Normal file
0
app/admin/__init__.py
Normal file
56
app/admin/setup.py
Normal file
56
app/admin/setup.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import FastAPI
|
||||
from sqladmin import Admin
|
||||
from sqladmin.authentication import AuthenticationBackend
|
||||
from starlette.requests import Request
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.constants import Role
|
||||
from app.core.security import decode_token
|
||||
from app.db.mariadb import async_engine
|
||||
|
||||
|
||||
class AdminAuth(AuthenticationBackend):
|
||||
async def login(self, request: Request) -> bool:
|
||||
form = await request.form()
|
||||
token = str(form.get("token", ""))
|
||||
payload = decode_token(token)
|
||||
if payload and payload.get("role") in Role.ADMIN_ROLES:
|
||||
request.session["token"] = token
|
||||
return True
|
||||
return False
|
||||
|
||||
async def logout(self, request: Request) -> bool:
|
||||
request.session.clear()
|
||||
return True
|
||||
|
||||
async def authenticate(self, request: Request) -> bool:
|
||||
token = request.session.get("token")
|
||||
if not token:
|
||||
return False
|
||||
payload = decode_token(token)
|
||||
return payload is not None and payload.get("role") in Role.ADMIN_ROLES
|
||||
|
||||
|
||||
def setup_admin(app: FastAPI) -> Admin:
|
||||
auth_backend = AdminAuth(secret_key=settings.SECRET_KEY)
|
||||
admin = Admin(
|
||||
app,
|
||||
engine=async_engine,
|
||||
authentication_backend=auth_backend,
|
||||
title=f"{settings.APP_NAME} Admin",
|
||||
)
|
||||
|
||||
from app.admin.views.device_admin import DeviceAdmin, DeviceGroupAdmin
|
||||
from app.admin.views.system_admin import AuditLogAdmin, SystemConfigAdmin
|
||||
from app.admin.views.user_admin import UserAdmin, UserProfileAdmin
|
||||
|
||||
admin.add_view(UserAdmin)
|
||||
admin.add_view(UserProfileAdmin)
|
||||
admin.add_view(DeviceAdmin)
|
||||
admin.add_view(DeviceGroupAdmin)
|
||||
admin.add_view(SystemConfigAdmin)
|
||||
admin.add_view(AuditLogAdmin)
|
||||
|
||||
return admin
|
||||
0
app/admin/views/__init__.py
Normal file
0
app/admin/views/__init__.py
Normal file
32
app/admin/views/device_admin.py
Normal file
32
app/admin/views/device_admin.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqladmin import ModelView
|
||||
|
||||
from app.models.mariadb.device import Device, DeviceGroup
|
||||
|
||||
|
||||
class DeviceAdmin(ModelView, model=Device):
|
||||
column_list = [
|
||||
Device.id, Device.device_uid, Device.name, Device.device_type,
|
||||
Device.status, Device.last_seen_at, Device.created_at,
|
||||
]
|
||||
column_searchable_list = [Device.device_uid, Device.name]
|
||||
column_sortable_list = [Device.id, Device.name, Device.status, Device.created_at]
|
||||
column_default_sort = ("id", True)
|
||||
can_create = True
|
||||
can_edit = True
|
||||
can_delete = False
|
||||
name = "Device"
|
||||
name_plural = "Devices"
|
||||
icon = "fa-solid fa-microchip"
|
||||
|
||||
|
||||
class DeviceGroupAdmin(ModelView, model=DeviceGroup):
|
||||
column_list = [DeviceGroup.id, DeviceGroup.name, DeviceGroup.description]
|
||||
column_searchable_list = [DeviceGroup.name]
|
||||
can_create = True
|
||||
can_edit = True
|
||||
can_delete = True
|
||||
name = "Device Group"
|
||||
name_plural = "Device Groups"
|
||||
icon = "fa-solid fa-layer-group"
|
||||
31
app/admin/views/system_admin.py
Normal file
31
app/admin/views/system_admin.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqladmin import ModelView
|
||||
|
||||
from app.models.mariadb.system import AuditLog, SystemConfig
|
||||
|
||||
|
||||
class SystemConfigAdmin(ModelView, model=SystemConfig):
|
||||
column_list = [SystemConfig.id, SystemConfig.key, SystemConfig.value, SystemConfig.is_secret]
|
||||
column_searchable_list = [SystemConfig.key]
|
||||
can_create = True
|
||||
can_edit = True
|
||||
can_delete = True
|
||||
name = "System Config"
|
||||
name_plural = "System Configs"
|
||||
icon = "fa-solid fa-gear"
|
||||
|
||||
|
||||
class AuditLogAdmin(ModelView, model=AuditLog):
|
||||
column_list = [
|
||||
AuditLog.id, AuditLog.user_id, AuditLog.action,
|
||||
AuditLog.resource_type, AuditLog.resource_id, AuditLog.created_at,
|
||||
]
|
||||
column_sortable_list = [AuditLog.id, AuditLog.created_at]
|
||||
column_default_sort = ("id", True)
|
||||
can_create = False
|
||||
can_edit = False
|
||||
can_delete = False
|
||||
name = "Audit Log"
|
||||
name_plural = "Audit Logs"
|
||||
icon = "fa-solid fa-clipboard-list"
|
||||
28
app/admin/views/user_admin.py
Normal file
28
app/admin/views/user_admin.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqladmin import ModelView
|
||||
|
||||
from app.models.mariadb.user import User, UserProfile
|
||||
|
||||
|
||||
class UserAdmin(ModelView, model=User):
|
||||
column_list = [User.id, User.email, User.role, User.is_active, User.is_verified, User.created_at]
|
||||
column_searchable_list = [User.email]
|
||||
column_sortable_list = [User.id, User.email, User.created_at]
|
||||
column_default_sort = ("id", True)
|
||||
can_create = True
|
||||
can_edit = True
|
||||
can_delete = False
|
||||
name = "User"
|
||||
name_plural = "Users"
|
||||
icon = "fa-solid fa-user"
|
||||
|
||||
|
||||
class UserProfileAdmin(ModelView, model=UserProfile):
|
||||
column_list = [UserProfile.id, UserProfile.user_id, UserProfile.full_name, UserProfile.organization]
|
||||
column_searchable_list = [UserProfile.full_name]
|
||||
can_create = False
|
||||
can_delete = False
|
||||
name = "User Profile"
|
||||
name_plural = "User Profiles"
|
||||
icon = "fa-solid fa-address-card"
|
||||
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
12
app/api/deps.py
Normal file
12
app/api/deps.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.mariadb import get_db
|
||||
|
||||
|
||||
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async for session in get_db():
|
||||
yield session
|
||||
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
0
app/api/v1/endpoints/__init__.py
Normal file
73
app/api/v1/endpoints/analytics.py
Normal file
73
app/api/v1/endpoints/analytics.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
|
||||
from app.core.constants import Role
|
||||
from app.core.dependencies import require_role
|
||||
from app.schemas.analytics import (
|
||||
AnalyticsResultRead,
|
||||
ReportResponse,
|
||||
TelemetryAggregateResponse,
|
||||
)
|
||||
from app.services.analytics_service import AnalyticsService
|
||||
|
||||
router = APIRouter(prefix="/analytics", tags=["analytics"])
|
||||
|
||||
|
||||
@router.get("/telemetry/{device_id}", response_model=TelemetryAggregateResponse)
|
||||
async def get_telemetry_aggregate(
|
||||
device_id: str,
|
||||
start: datetime = Query(...),
|
||||
end: datetime = Query(...),
|
||||
interval: str = Query("1h"),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
) -> TelemetryAggregateResponse:
|
||||
service = AnalyticsService()
|
||||
return await service.get_telemetry_aggregate(device_id, start, end, interval)
|
||||
|
||||
|
||||
@router.post("/reports/{device_id}", response_model=ReportResponse)
|
||||
async def generate_report(
|
||||
device_id: str,
|
||||
start: datetime = Query(...),
|
||||
end: datetime = Query(...),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
) -> ReportResponse:
|
||||
service = AnalyticsService()
|
||||
return await service.generate_report(device_id, start, end)
|
||||
|
||||
|
||||
@router.get("/status/{device_id}")
|
||||
async def device_status_analysis(
|
||||
device_id: str,
|
||||
start: datetime = Query(...),
|
||||
end: datetime = Query(...),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
) -> dict:
|
||||
service = AnalyticsService()
|
||||
return await service.get_device_status_analysis(device_id, start, end)
|
||||
|
||||
|
||||
@router.get("/trends/{device_id}")
|
||||
async def trend_analysis(
|
||||
device_id: str,
|
||||
start: datetime = Query(...),
|
||||
end: datetime = Query(...),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
) -> dict:
|
||||
service = AnalyticsService()
|
||||
return await service.get_trend_analysis(device_id, start, end)
|
||||
|
||||
|
||||
@router.get("/results", response_model=list[AnalyticsResultRead])
|
||||
async def list_analytics_results(
|
||||
analysis_type: str = Query(...),
|
||||
device_id: str | None = Query(None),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(20, ge=1, le=100),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
) -> list[AnalyticsResultRead]:
|
||||
service = AnalyticsService()
|
||||
return await service.list_results(analysis_type, device_id, skip, limit)
|
||||
53
app/api/v1/endpoints/auth.py
Normal file
53
app/api/v1/endpoints/auth.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_session
|
||||
from app.core.dependencies import get_current_user_id
|
||||
from app.schemas.auth import (
|
||||
LoginRequest,
|
||||
RefreshTokenRequest,
|
||||
RegisterRequest,
|
||||
TokenResponse,
|
||||
)
|
||||
from app.services.auth_service import AuthService
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
|
||||
@router.post("/register", response_model=TokenResponse, status_code=201)
|
||||
async def register(
|
||||
body: RegisterRequest,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> TokenResponse:
|
||||
service = AuthService(session)
|
||||
user = await service.register(body.email, body.password, body.full_name)
|
||||
return await service._create_tokens(user)
|
||||
|
||||
|
||||
@router.post("/login", response_model=TokenResponse)
|
||||
async def login(
|
||||
body: LoginRequest,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> TokenResponse:
|
||||
service = AuthService(session)
|
||||
return await service.login(body.email, body.password)
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=TokenResponse)
|
||||
async def refresh_token(
|
||||
body: RefreshTokenRequest,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> TokenResponse:
|
||||
service = AuthService(session)
|
||||
return await service.refresh(body.refresh_token)
|
||||
|
||||
|
||||
@router.post("/logout", status_code=204)
|
||||
async def logout(
|
||||
user_id: int = Depends(get_current_user_id),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> None:
|
||||
service = AuthService(session)
|
||||
await service.logout(user_id)
|
||||
70
app/api/v1/endpoints/devices.py
Normal file
70
app/api/v1/endpoints/devices.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_session
|
||||
from app.core.constants import Role
|
||||
from app.core.dependencies import get_current_user_payload, require_role
|
||||
from app.schemas.common import PaginatedResponse
|
||||
from app.schemas.device import DeviceCreate, DeviceRead, DeviceUpdate
|
||||
from app.services.device_service import DeviceService
|
||||
|
||||
router = APIRouter(prefix="/devices", tags=["devices"])
|
||||
|
||||
|
||||
@router.get("", response_model=PaginatedResponse[DeviceRead])
|
||||
async def list_devices(
|
||||
page: int = Query(1, ge=1),
|
||||
size: int = Query(20, ge=1, le=100),
|
||||
_: dict = Depends(get_current_user_payload),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> PaginatedResponse[DeviceRead]:
|
||||
service = DeviceService(session)
|
||||
skip = (page - 1) * size
|
||||
items = await service.list_devices(skip=skip, limit=size)
|
||||
total = await service.count_devices()
|
||||
return PaginatedResponse(
|
||||
items=items, total=total, page=page, size=size, pages=(total + size - 1) // size
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{device_id}", response_model=DeviceRead)
|
||||
async def get_device(
|
||||
device_id: int,
|
||||
_: dict = Depends(get_current_user_payload),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> DeviceRead:
|
||||
service = DeviceService(session)
|
||||
return await service.get_device(device_id)
|
||||
|
||||
|
||||
@router.post("", response_model=DeviceRead, status_code=201)
|
||||
async def create_device(
|
||||
body: DeviceCreate,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> DeviceRead:
|
||||
service = DeviceService(session)
|
||||
return await service.create_device(body)
|
||||
|
||||
|
||||
@router.patch("/{device_id}", response_model=DeviceRead)
|
||||
async def update_device(
|
||||
device_id: int,
|
||||
body: DeviceUpdate,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN, Role.MANAGER)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> DeviceRead:
|
||||
service = DeviceService(session)
|
||||
return await service.update_device(device_id, body)
|
||||
|
||||
|
||||
@router.delete("/{device_id}", status_code=204)
|
||||
async def delete_device(
|
||||
device_id: int,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> None:
|
||||
service = DeviceService(session)
|
||||
await service.delete_device(device_id)
|
||||
61
app/api/v1/endpoints/monitoring.py
Normal file
61
app/api/v1/endpoints/monitoring.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_session
|
||||
from app.core.constants import Role
|
||||
from app.core.dependencies import get_current_user_id, require_role
|
||||
from app.schemas.monitoring import AlertRead, AlertRuleCreate, AlertRuleRead, SystemHealthResponse
|
||||
from app.services.monitoring_service import MonitoringService
|
||||
|
||||
router = APIRouter(prefix="/monitoring", tags=["monitoring"])
|
||||
|
||||
|
||||
@router.get("/health", response_model=SystemHealthResponse)
|
||||
async def system_health(
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> SystemHealthResponse:
|
||||
service = MonitoringService(session)
|
||||
return await service.get_system_health()
|
||||
|
||||
|
||||
@router.get("/alerts", response_model=list[AlertRead])
|
||||
async def list_alerts(
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(50, ge=1, le=200),
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> list[AlertRead]:
|
||||
service = MonitoringService(session)
|
||||
return await service.list_active_alerts(skip=skip, limit=limit)
|
||||
|
||||
|
||||
@router.post("/alerts/{alert_id}/acknowledge", response_model=AlertRead)
|
||||
async def acknowledge_alert(
|
||||
alert_id: int,
|
||||
user_id: int = Depends(get_current_user_id),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> AlertRead:
|
||||
service = MonitoringService(session)
|
||||
return await service.acknowledge_alert(alert_id, user_id)
|
||||
|
||||
|
||||
@router.get("/alert-rules", response_model=list[AlertRuleRead])
|
||||
async def list_alert_rules(
|
||||
_: dict = Depends(require_role(*Role.MANAGEMENT_ROLES)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> list[AlertRuleRead]:
|
||||
service = MonitoringService(session)
|
||||
return await service.list_alert_rules()
|
||||
|
||||
|
||||
@router.post("/alert-rules", response_model=AlertRuleRead, status_code=201)
|
||||
async def create_alert_rule(
|
||||
body: AlertRuleCreate,
|
||||
user_id: int = Depends(get_current_user_id),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> AlertRuleRead:
|
||||
service = MonitoringService(session)
|
||||
return await service.create_alert_rule(body, user_id)
|
||||
32
app/api/v1/endpoints/system.py
Normal file
32
app/api/v1/endpoints/system.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_session
|
||||
from app.core.config import settings
|
||||
from app.core.constants import Role
|
||||
from app.core.dependencies import require_role
|
||||
|
||||
router = APIRouter(prefix="/system", tags=["system"])
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check() -> dict:
|
||||
return {
|
||||
"status": "ok",
|
||||
"service": settings.APP_NAME,
|
||||
"version": "0.1.0",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/info")
|
||||
async def system_info(
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN)),
|
||||
) -> dict:
|
||||
return {
|
||||
"app_name": settings.APP_NAME,
|
||||
"environment": settings.APP_ENV,
|
||||
"debug": settings.DEBUG,
|
||||
"api_prefix": settings.API_V1_PREFIX,
|
||||
}
|
||||
91
app/api/v1/endpoints/users.py
Normal file
91
app/api/v1/endpoints/users.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.deps import get_session
|
||||
from app.core.constants import Role
|
||||
from app.core.dependencies import get_current_user_id, require_role
|
||||
from app.schemas.common import PaginatedResponse
|
||||
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
||||
from app.services.user_service import UserService
|
||||
|
||||
router = APIRouter(prefix="/users", tags=["users"])
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserRead)
|
||||
async def get_me(
|
||||
user_id: int = Depends(get_current_user_id),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> UserRead:
|
||||
service = UserService(session)
|
||||
return await service.get_user(user_id)
|
||||
|
||||
|
||||
@router.patch("/me", response_model=UserRead)
|
||||
async def update_me(
|
||||
body: UserUpdate,
|
||||
user_id: int = Depends(get_current_user_id),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> UserRead:
|
||||
body.role = None
|
||||
body.is_active = None
|
||||
service = UserService(session)
|
||||
return await service.update_user(user_id, body)
|
||||
|
||||
|
||||
@router.get("", response_model=PaginatedResponse[UserRead])
|
||||
async def list_users(
|
||||
page: int = Query(1, ge=1),
|
||||
size: int = Query(20, ge=1, le=100),
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> PaginatedResponse[UserRead]:
|
||||
service = UserService(session)
|
||||
skip = (page - 1) * size
|
||||
items = await service.list_users(skip=skip, limit=size)
|
||||
total = await service.count_users()
|
||||
return PaginatedResponse(
|
||||
items=items, total=total, page=page, size=size, pages=(total + size - 1) // size
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{user_id}", response_model=UserRead)
|
||||
async def get_user(
|
||||
user_id: int,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> UserRead:
|
||||
service = UserService(session)
|
||||
return await service.get_user(user_id)
|
||||
|
||||
|
||||
@router.post("", response_model=UserRead, status_code=201)
|
||||
async def create_user(
|
||||
body: UserCreate,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> UserRead:
|
||||
service = UserService(session)
|
||||
return await service.create_user(body)
|
||||
|
||||
|
||||
@router.patch("/{user_id}", response_model=UserRead)
|
||||
async def update_user(
|
||||
user_id: int,
|
||||
body: UserUpdate,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> UserRead:
|
||||
service = UserService(session)
|
||||
return await service.update_user(user_id, body)
|
||||
|
||||
|
||||
@router.delete("/{user_id}", status_code=204)
|
||||
async def delete_user(
|
||||
user_id: int,
|
||||
_: dict = Depends(require_role(Role.SUPERADMIN, Role.ADMIN)),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> None:
|
||||
service = UserService(session)
|
||||
await service.delete_user(user_id)
|
||||
13
app/api/v1/router.py
Normal file
13
app/api/v1/router.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.api.v1.endpoints import analytics, auth, devices, monitoring, system, users
|
||||
|
||||
v1_router = APIRouter()
|
||||
v1_router.include_router(system.router)
|
||||
v1_router.include_router(auth.router)
|
||||
v1_router.include_router(users.router)
|
||||
v1_router.include_router(devices.router)
|
||||
v1_router.include_router(monitoring.router)
|
||||
v1_router.include_router(analytics.router)
|
||||
15
app/asgi.py
Normal file
15
app/asgi.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import socketio
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
from app.main import create_app
|
||||
|
||||
# Import namespace handlers to register them
|
||||
import app.communication.socketio.events # noqa: F401
|
||||
import app.communication.socketio.namespaces.device_ns # noqa: F401
|
||||
import app.communication.socketio.namespaces.monitoring_ns # noqa: F401
|
||||
import app.communication.socketio.namespaces.notification_ns # noqa: F401
|
||||
|
||||
fastapi_app = create_app()
|
||||
|
||||
# Socket.IO wraps FastAPI as the outermost ASGI app
|
||||
app = socketio.ASGIApp(sio, other_app=fastapi_app)
|
||||
0
app/communication/__init__.py
Normal file
0
app/communication/__init__.py
Normal file
0
app/communication/external/__init__.py
vendored
Normal file
0
app/communication/external/__init__.py
vendored
Normal file
19
app/communication/external/http_client.py
vendored
Normal file
19
app/communication/external/http_client.py
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
|
||||
_client: httpx.AsyncClient | None = None
|
||||
|
||||
|
||||
async def get_http_client() -> httpx.AsyncClient:
|
||||
global _client
|
||||
if _client is None or _client.is_closed:
|
||||
_client = httpx.AsyncClient(timeout=30.0)
|
||||
return _client
|
||||
|
||||
|
||||
async def close_http_client() -> None:
|
||||
global _client
|
||||
if _client and not _client.is_closed:
|
||||
await _client.aclose()
|
||||
_client = None
|
||||
114
app/communication/external/oauth_providers.py
vendored
Normal file
114
app/communication/external/oauth_providers.py
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.communication.external.http_client import get_http_client
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
@dataclass
|
||||
class OAuthUserInfo:
|
||||
provider: str
|
||||
provider_user_id: str
|
||||
email: str
|
||||
name: str
|
||||
|
||||
|
||||
async def get_google_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
|
||||
client = await get_http_client()
|
||||
|
||||
token_resp = await client.post(
|
||||
"https://oauth2.googleapis.com/token",
|
||||
data={
|
||||
"code": code,
|
||||
"client_id": settings.GOOGLE_CLIENT_ID,
|
||||
"client_secret": settings.GOOGLE_CLIENT_SECRET,
|
||||
"redirect_uri": redirect_uri,
|
||||
"grant_type": "authorization_code",
|
||||
},
|
||||
)
|
||||
token_resp.raise_for_status()
|
||||
access_token = token_resp.json()["access_token"]
|
||||
|
||||
user_resp = await client.get(
|
||||
"https://www.googleapis.com/oauth2/v2/userinfo",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
user_resp.raise_for_status()
|
||||
data = user_resp.json()
|
||||
|
||||
return OAuthUserInfo(
|
||||
provider="google",
|
||||
provider_user_id=data["id"],
|
||||
email=data["email"],
|
||||
name=data.get("name", ""),
|
||||
)
|
||||
|
||||
|
||||
async def get_kakao_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
|
||||
client = await get_http_client()
|
||||
|
||||
token_resp = await client.post(
|
||||
"https://kauth.kakao.com/oauth/token",
|
||||
data={
|
||||
"grant_type": "authorization_code",
|
||||
"client_id": settings.KAKAO_CLIENT_ID,
|
||||
"client_secret": settings.KAKAO_CLIENT_SECRET,
|
||||
"redirect_uri": redirect_uri,
|
||||
"code": code,
|
||||
},
|
||||
)
|
||||
token_resp.raise_for_status()
|
||||
access_token = token_resp.json()["access_token"]
|
||||
|
||||
user_resp = await client.get(
|
||||
"https://kapi.kakao.com/v2/user/me",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
user_resp.raise_for_status()
|
||||
data = user_resp.json()
|
||||
|
||||
account = data.get("kakao_account", {})
|
||||
return OAuthUserInfo(
|
||||
provider="kakao",
|
||||
provider_user_id=str(data["id"]),
|
||||
email=account.get("email", ""),
|
||||
name=account.get("profile", {}).get("nickname", ""),
|
||||
)
|
||||
|
||||
|
||||
async def get_naver_user_info(code: str, redirect_uri: str) -> OAuthUserInfo:
|
||||
client = await get_http_client()
|
||||
|
||||
token_resp = await client.post(
|
||||
"https://nid.naver.com/oauth2.0/token",
|
||||
data={
|
||||
"grant_type": "authorization_code",
|
||||
"client_id": settings.NAVER_CLIENT_ID,
|
||||
"client_secret": settings.NAVER_CLIENT_SECRET,
|
||||
"code": code,
|
||||
},
|
||||
)
|
||||
token_resp.raise_for_status()
|
||||
access_token = token_resp.json()["access_token"]
|
||||
|
||||
user_resp = await client.get(
|
||||
"https://openapi.naver.com/v1/nid/me",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
)
|
||||
user_resp.raise_for_status()
|
||||
data = user_resp.json()["response"]
|
||||
|
||||
return OAuthUserInfo(
|
||||
provider="naver",
|
||||
provider_user_id=data["id"],
|
||||
email=data.get("email", ""),
|
||||
name=data.get("name", ""),
|
||||
)
|
||||
|
||||
|
||||
OAUTH_PROVIDERS = {
|
||||
"google": get_google_user_info,
|
||||
"kakao": get_kakao_user_info,
|
||||
"naver": get_naver_user_info,
|
||||
}
|
||||
0
app/communication/mqtt/__init__.py
Normal file
0
app/communication/mqtt/__init__.py
Normal file
26
app/communication/mqtt/client.py
Normal file
26
app/communication/mqtt/client.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi_mqtt import FastMQTT, MQTTConfig
|
||||
|
||||
from app.core.config import settings
|
||||
from app.communication.mqtt.topics import SUBSCRIBE_TOPICS
|
||||
|
||||
mqtt_config = MQTTConfig(
|
||||
host=settings.MQTT_HOST,
|
||||
port=settings.MQTT_PORT,
|
||||
username=settings.MQTT_USERNAME or None,
|
||||
password=settings.MQTT_PASSWORD or None,
|
||||
keepalive=60,
|
||||
)
|
||||
|
||||
mqtt = FastMQTT(config=mqtt_config)
|
||||
|
||||
|
||||
async def mqtt_startup() -> None:
|
||||
await mqtt.mqtt_startup()
|
||||
for topic in SUBSCRIBE_TOPICS:
|
||||
mqtt.client.subscribe(topic)
|
||||
|
||||
|
||||
async def mqtt_shutdown() -> None:
|
||||
await mqtt.mqtt_shutdown()
|
||||
86
app/communication/mqtt/handlers.py
Normal file
86
app/communication/mqtt/handlers.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
import structlog
|
||||
|
||||
from app.communication.mqtt.client import mqtt
|
||||
from app.models.mongodb.device_log import DeviceLog
|
||||
from app.models.mongodb.telemetry import TelemetryData
|
||||
|
||||
logger = structlog.get_logger("mqtt")
|
||||
|
||||
|
||||
def _extract_device_uid(topic: str) -> str:
|
||||
parts = topic.split("/")
|
||||
return parts[1] if len(parts) >= 3 else "unknown"
|
||||
|
||||
|
||||
@mqtt.on_message()
|
||||
async def on_message(client, topic: str, payload: bytes, qos: int, properties) -> None: # type: ignore[no-untyped-def]
|
||||
device_uid = _extract_device_uid(topic)
|
||||
|
||||
try:
|
||||
data = json.loads(payload.decode())
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
logger.warning("invalid_mqtt_payload", topic=topic)
|
||||
return
|
||||
|
||||
if "/telemetry" in topic:
|
||||
await _handle_telemetry(device_uid, data)
|
||||
elif "/status" in topic:
|
||||
await _handle_status(device_uid, data)
|
||||
elif "/log" in topic:
|
||||
await _handle_log(device_uid, data)
|
||||
elif "/response" in topic:
|
||||
await _handle_response(device_uid, data)
|
||||
|
||||
|
||||
async def _handle_telemetry(device_uid: str, data: dict) -> None:
|
||||
telemetry = TelemetryData(device_id=device_uid, metrics=data)
|
||||
await telemetry.insert()
|
||||
|
||||
# Broadcast via Socket.IO
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
await sio.emit(
|
||||
"telemetry",
|
||||
{"device_uid": device_uid, "data": data},
|
||||
namespace="/monitoring",
|
||||
)
|
||||
logger.debug("telemetry_saved", device_uid=device_uid)
|
||||
|
||||
|
||||
async def _handle_status(device_uid: str, data: dict) -> None:
|
||||
log = DeviceLog(device_id=device_uid, event_type="status_change", payload=data)
|
||||
await log.insert()
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
await sio.emit(
|
||||
"device_status",
|
||||
{"device_uid": device_uid, "status": data},
|
||||
namespace="/device",
|
||||
)
|
||||
logger.debug("status_update", device_uid=device_uid)
|
||||
|
||||
|
||||
async def _handle_log(device_uid: str, data: dict) -> None:
|
||||
log = DeviceLog(
|
||||
device_id=device_uid,
|
||||
event_type=data.get("event_type", "log"),
|
||||
payload=data,
|
||||
)
|
||||
await log.insert()
|
||||
logger.debug("device_log_saved", device_uid=device_uid)
|
||||
|
||||
|
||||
async def _handle_response(device_uid: str, data: dict) -> None:
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
await sio.emit(
|
||||
"device_response",
|
||||
{"device_uid": device_uid, "data": data},
|
||||
namespace="/device",
|
||||
)
|
||||
logger.debug("device_response", device_uid=device_uid)
|
||||
21
app/communication/mqtt/publisher.py
Normal file
21
app/communication/mqtt/publisher.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
from app.communication.mqtt.client import mqtt
|
||||
from app.communication.mqtt.topics import DEVICE_COMMAND, DEVICE_CONFIG, DEVICE_OTA
|
||||
|
||||
|
||||
async def publish_command(device_uid: str, command: dict) -> None:
|
||||
topic = DEVICE_COMMAND.format(device_uid=device_uid)
|
||||
mqtt.client.publish(topic, json.dumps(command))
|
||||
|
||||
|
||||
async def publish_config(device_uid: str, config: dict) -> None:
|
||||
topic = DEVICE_CONFIG.format(device_uid=device_uid)
|
||||
mqtt.client.publish(topic, json.dumps(config))
|
||||
|
||||
|
||||
async def publish_ota(device_uid: str, ota_info: dict) -> None:
|
||||
topic = DEVICE_OTA.format(device_uid=device_uid)
|
||||
mqtt.client.publish(topic, json.dumps(ota_info))
|
||||
25
app/communication/mqtt/topics.py
Normal file
25
app/communication/mqtt/topics.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
# ── Device → Server ──────────────────────────────────
|
||||
DEVICE_TELEMETRY = "devices/{device_uid}/telemetry"
|
||||
DEVICE_STATUS = "devices/{device_uid}/status"
|
||||
DEVICE_LOG = "devices/{device_uid}/log"
|
||||
DEVICE_RESPONSE = "devices/{device_uid}/response"
|
||||
|
||||
# ── Server → Device ──────────────────────────────────
|
||||
DEVICE_COMMAND = "devices/{device_uid}/command"
|
||||
DEVICE_CONFIG = "devices/{device_uid}/config"
|
||||
DEVICE_OTA = "devices/{device_uid}/ota"
|
||||
|
||||
# ── Wildcard subscriptions ───────────────────────────
|
||||
SUB_ALL_TELEMETRY = "devices/+/telemetry"
|
||||
SUB_ALL_STATUS = "devices/+/status"
|
||||
SUB_ALL_LOG = "devices/+/log"
|
||||
SUB_ALL_RESPONSE = "devices/+/response"
|
||||
|
||||
SUBSCRIBE_TOPICS = [
|
||||
SUB_ALL_TELEMETRY,
|
||||
SUB_ALL_STATUS,
|
||||
SUB_ALL_LOG,
|
||||
SUB_ALL_RESPONSE,
|
||||
]
|
||||
0
app/communication/socketio/__init__.py
Normal file
0
app/communication/socketio/__init__.py
Normal file
17
app/communication/socketio/events.py
Normal file
17
app/communication/socketio/events.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import structlog
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
logger = structlog.get_logger("socketio")
|
||||
|
||||
|
||||
@sio.event
|
||||
async def connect(sid: str, environ: dict) -> None:
|
||||
logger.info("client_connected", sid=sid)
|
||||
|
||||
|
||||
@sio.event
|
||||
async def disconnect(sid: str) -> None:
|
||||
logger.info("client_disconnected", sid=sid)
|
||||
0
app/communication/socketio/namespaces/__init__.py
Normal file
0
app/communication/socketio/namespaces/__init__.py
Normal file
28
app/communication/socketio/namespaces/device_ns.py
Normal file
28
app/communication/socketio/namespaces/device_ns.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import structlog
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
logger = structlog.get_logger("socketio.device")
|
||||
|
||||
|
||||
@sio.on("connect", namespace="/device")
|
||||
async def device_connect(sid: str, environ: dict) -> None:
|
||||
logger.info("device_ns_connected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("disconnect", namespace="/device")
|
||||
async def device_disconnect(sid: str) -> None:
|
||||
logger.info("device_ns_disconnected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("send_command", namespace="/device")
|
||||
async def send_command(sid: str, data: dict) -> None:
|
||||
device_uid = data.get("device_uid")
|
||||
command = data.get("command")
|
||||
if device_uid and command:
|
||||
from app.communication.mqtt.publisher import publish_command
|
||||
|
||||
await publish_command(device_uid, command)
|
||||
logger.info("command_sent", device_uid=device_uid)
|
||||
32
app/communication/socketio/namespaces/monitoring_ns.py
Normal file
32
app/communication/socketio/namespaces/monitoring_ns.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import structlog
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
logger = structlog.get_logger("socketio.monitoring")
|
||||
|
||||
|
||||
@sio.on("connect", namespace="/monitoring")
|
||||
async def monitoring_connect(sid: str, environ: dict) -> None:
|
||||
logger.info("monitoring_connected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("disconnect", namespace="/monitoring")
|
||||
async def monitoring_disconnect(sid: str) -> None:
|
||||
logger.info("monitoring_disconnected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("subscribe_device", namespace="/monitoring")
|
||||
async def subscribe_device(sid: str, data: dict) -> None:
|
||||
device_uid = data.get("device_uid")
|
||||
if device_uid:
|
||||
await sio.enter_room(sid, f"device:{device_uid}", namespace="/monitoring")
|
||||
logger.info("subscribed_device", sid=sid, device_uid=device_uid)
|
||||
|
||||
|
||||
@sio.on("unsubscribe_device", namespace="/monitoring")
|
||||
async def unsubscribe_device(sid: str, data: dict) -> None:
|
||||
device_uid = data.get("device_uid")
|
||||
if device_uid:
|
||||
await sio.leave_room(sid, f"device:{device_uid}", namespace="/monitoring")
|
||||
25
app/communication/socketio/namespaces/notification_ns.py
Normal file
25
app/communication/socketio/namespaces/notification_ns.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import structlog
|
||||
|
||||
from app.communication.socketio.server import sio
|
||||
|
||||
logger = structlog.get_logger("socketio.notification")
|
||||
|
||||
|
||||
@sio.on("connect", namespace="/notification")
|
||||
async def notification_connect(sid: str, environ: dict) -> None:
|
||||
logger.info("notification_connected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("disconnect", namespace="/notification")
|
||||
async def notification_disconnect(sid: str) -> None:
|
||||
logger.info("notification_disconnected", sid=sid)
|
||||
|
||||
|
||||
@sio.on("join_user_room", namespace="/notification")
|
||||
async def join_user_room(sid: str, data: dict) -> None:
|
||||
user_id = data.get("user_id")
|
||||
if user_id:
|
||||
await sio.enter_room(sid, f"user:{user_id}", namespace="/notification")
|
||||
logger.info("joined_user_room", sid=sid, user_id=user_id)
|
||||
14
app/communication/socketio/server.py
Normal file
14
app/communication/socketio/server.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import socketio
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
sio = socketio.AsyncServer(
|
||||
async_mode="asgi",
|
||||
cors_allowed_origins=settings.CORS_ORIGINS,
|
||||
logger=settings.DEBUG,
|
||||
engineio_logger=False,
|
||||
)
|
||||
|
||||
sio_app = socketio.ASGIApp(sio)
|
||||
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
93
app/core/config.py
Normal file
93
app/core/config.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
)
|
||||
|
||||
# ── Application ──────────────────────────────────
|
||||
APP_NAME: str = "core-api"
|
||||
APP_ENV: str = "development"
|
||||
DEBUG: bool = True
|
||||
SECRET_KEY: str = "change-me-to-a-random-secret-key"
|
||||
API_V1_PREFIX: str = "/api/v1"
|
||||
|
||||
# ── MariaDB ──────────────────────────────────────
|
||||
MARIADB_HOST: str = "127.0.0.1"
|
||||
MARIADB_PORT: int = 3306
|
||||
MARIADB_USER: str = "root"
|
||||
MARIADB_PASSWORD: str = "changeme"
|
||||
MARIADB_DATABASE: str = "core_api"
|
||||
|
||||
@property
|
||||
def MARIADB_DSN(self) -> str:
|
||||
return (
|
||||
f"mysql+aiomysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}"
|
||||
f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}"
|
||||
)
|
||||
|
||||
@property
|
||||
def MARIADB_DSN_SYNC(self) -> str:
|
||||
return (
|
||||
f"mysql+pymysql://{self.MARIADB_USER}:{self.MARIADB_PASSWORD}"
|
||||
f"@{self.MARIADB_HOST}:{self.MARIADB_PORT}/{self.MARIADB_DATABASE}"
|
||||
)
|
||||
|
||||
# ── MongoDB ──────────────────────────────────────
|
||||
MONGODB_URL: str = "mongodb://127.0.0.1:27017"
|
||||
MONGODB_DATABASE: str = "core_api"
|
||||
|
||||
# ── Redis ────────────────────────────────────────
|
||||
REDIS_URL: str = "redis://127.0.0.1:6379/0"
|
||||
|
||||
# ── JWT ──────────────────────────────────────────
|
||||
JWT_SECRET_KEY: str = "change-me-jwt-secret"
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
|
||||
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = 7
|
||||
|
||||
# ── MQTT ─────────────────────────────────────────
|
||||
MQTT_HOST: str = "127.0.0.1"
|
||||
MQTT_PORT: int = 1883
|
||||
MQTT_USERNAME: str = ""
|
||||
MQTT_PASSWORD: str = ""
|
||||
|
||||
# ── Celery ───────────────────────────────────────
|
||||
CELERY_BROKER_URL: str = "redis://127.0.0.1:6379/1"
|
||||
CELERY_RESULT_BACKEND: str = "redis://127.0.0.1:6379/2"
|
||||
|
||||
# ── CORS ─────────────────────────────────────────
|
||||
CORS_ORIGINS: list[str] = ["http://localhost:3000", "http://localhost:8080"]
|
||||
|
||||
@field_validator("CORS_ORIGINS", mode="before")
|
||||
@classmethod
|
||||
def assemble_cors_origins(cls, v: str | list[str]) -> list[str]:
|
||||
if isinstance(v, str):
|
||||
return [i.strip() for i in v.strip("[]").split(",") if i.strip()]
|
||||
return v
|
||||
|
||||
# ── OAuth ────────────────────────────────────────
|
||||
GOOGLE_CLIENT_ID: str = ""
|
||||
GOOGLE_CLIENT_SECRET: str = ""
|
||||
KAKAO_CLIENT_ID: str = ""
|
||||
KAKAO_CLIENT_SECRET: str = ""
|
||||
NAVER_CLIENT_ID: str = ""
|
||||
NAVER_CLIENT_SECRET: str = ""
|
||||
|
||||
# ── SMTP ─────────────────────────────────────────
|
||||
SMTP_HOST: str = "smtp.gmail.com"
|
||||
SMTP_PORT: int = 587
|
||||
SMTP_USERNAME: str = ""
|
||||
SMTP_PASSWORD: str = ""
|
||||
|
||||
# ── Logging ──────────────────────────────────────
|
||||
LOG_LEVEL: str = "DEBUG"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
31
app/core/constants.py
Normal file
31
app/core/constants.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class Role:
|
||||
SUPERADMIN = "superadmin"
|
||||
ADMIN = "admin"
|
||||
MANAGER = "manager"
|
||||
USER = "user"
|
||||
DEVICE = "device"
|
||||
|
||||
ALL = [SUPERADMIN, ADMIN, MANAGER, USER, DEVICE]
|
||||
ADMIN_ROLES = [SUPERADMIN, ADMIN]
|
||||
MANAGEMENT_ROLES = [SUPERADMIN, ADMIN, MANAGER]
|
||||
|
||||
|
||||
class DeviceStatus:
|
||||
ONLINE = "online"
|
||||
OFFLINE = "offline"
|
||||
ERROR = "error"
|
||||
MAINTENANCE = "maintenance"
|
||||
|
||||
|
||||
class AlertSeverity:
|
||||
CRITICAL = "critical"
|
||||
WARNING = "warning"
|
||||
INFO = "info"
|
||||
|
||||
|
||||
class TokenType:
|
||||
ACCESS = "access"
|
||||
REFRESH = "refresh"
|
||||
36
app/core/dependencies.py
Normal file
36
app/core/dependencies.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
|
||||
from app.core.constants import TokenType
|
||||
from app.core.exceptions import ForbiddenException, UnauthorizedException
|
||||
from app.core.security import decode_token
|
||||
|
||||
bearer_scheme = HTTPBearer()
|
||||
|
||||
|
||||
async def get_current_user_payload(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
|
||||
) -> dict:
|
||||
payload = decode_token(credentials.credentials)
|
||||
if payload is None:
|
||||
raise UnauthorizedException("Invalid or expired token")
|
||||
if payload.get("type") != TokenType.ACCESS:
|
||||
raise UnauthorizedException("Invalid token type")
|
||||
return payload
|
||||
|
||||
|
||||
async def get_current_user_id(
|
||||
payload: dict = Depends(get_current_user_payload),
|
||||
) -> int:
|
||||
return int(payload["sub"])
|
||||
|
||||
|
||||
def require_role(*allowed_roles: str):
|
||||
async def _check(payload: dict = Depends(get_current_user_payload)) -> dict:
|
||||
if payload.get("role") not in allowed_roles:
|
||||
raise ForbiddenException("Insufficient permissions")
|
||||
return payload
|
||||
|
||||
return _check
|
||||
22
app/core/error_handlers.py
Normal file
22
app/core/error_handlers.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from app.core.exceptions import AppException
|
||||
|
||||
|
||||
def register_error_handlers(app: FastAPI) -> None:
|
||||
@app.exception_handler(AppException)
|
||||
async def app_exception_handler(request: Request, exc: AppException) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={"detail": exc.detail},
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"detail": "Internal server error"},
|
||||
)
|
||||
32
app/core/exceptions.py
Normal file
32
app/core/exceptions.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class AppException(Exception):
|
||||
def __init__(self, status_code: int, detail: str):
|
||||
self.status_code = status_code
|
||||
self.detail = detail
|
||||
|
||||
|
||||
class NotFoundException(AppException):
|
||||
def __init__(self, detail: str = "Resource not found"):
|
||||
super().__init__(status_code=404, detail=detail)
|
||||
|
||||
|
||||
class UnauthorizedException(AppException):
|
||||
def __init__(self, detail: str = "Not authenticated"):
|
||||
super().__init__(status_code=401, detail=detail)
|
||||
|
||||
|
||||
class ForbiddenException(AppException):
|
||||
def __init__(self, detail: str = "Permission denied"):
|
||||
super().__init__(status_code=403, detail=detail)
|
||||
|
||||
|
||||
class ConflictException(AppException):
|
||||
def __init__(self, detail: str = "Resource already exists"):
|
||||
super().__init__(status_code=409, detail=detail)
|
||||
|
||||
|
||||
class ValidationException(AppException):
|
||||
def __init__(self, detail: str = "Validation error"):
|
||||
super().__init__(status_code=422, detail=detail)
|
||||
43
app/core/logging_config.py
Normal file
43
app/core/logging_config.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import structlog
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO)
|
||||
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.contextvars.merge_contextvars,
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
if settings.DEBUG
|
||||
else structlog.processors.JSONRenderer(),
|
||||
],
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
format="%(message)s",
|
||||
stream=sys.stdout,
|
||||
level=log_level,
|
||||
)
|
||||
|
||||
|
||||
def get_logger(name: str) -> structlog.stdlib.BoundLogger:
|
||||
return structlog.get_logger(name)
|
||||
16
app/core/permissions.py
Normal file
16
app/core/permissions.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.core.constants import Role
|
||||
|
||||
|
||||
def is_admin(role: str) -> bool:
|
||||
return role in Role.ADMIN_ROLES
|
||||
|
||||
|
||||
def is_management(role: str) -> bool:
|
||||
return role in Role.MANAGEMENT_ROLES
|
||||
|
||||
|
||||
def can_manage_user(actor_role: str, target_role: str) -> bool:
|
||||
hierarchy = {Role.SUPERADMIN: 4, Role.ADMIN: 3, Role.MANAGER: 2, Role.USER: 1, Role.DEVICE: 0}
|
||||
return hierarchy.get(actor_role, 0) > hierarchy.get(target_role, 0)
|
||||
47
app/core/security.py
Normal file
47
app/core/security.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.constants import TokenType
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def create_access_token(subject: int | str, role: str) -> str:
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
payload = {
|
||||
"sub": str(subject),
|
||||
"role": role,
|
||||
"type": TokenType.ACCESS,
|
||||
"exp": expire,
|
||||
}
|
||||
return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
|
||||
|
||||
|
||||
def create_refresh_token(subject: int | str) -> str:
|
||||
expire = datetime.utcnow() + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
payload = {
|
||||
"sub": str(subject),
|
||||
"type": TokenType.REFRESH,
|
||||
"exp": expire,
|
||||
}
|
||||
return jwt.encode(payload, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM)
|
||||
|
||||
|
||||
def decode_token(token: str) -> dict | None:
|
||||
try:
|
||||
return jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM])
|
||||
except JWTError:
|
||||
return None
|
||||
0
app/db/__init__.py
Normal file
0
app/db/__init__.py
Normal file
22
app/db/base.py
Normal file
22
app/db/base.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlmodel import Field, SQLModel
|
||||
|
||||
|
||||
class TimestampMixin(SQLModel):
|
||||
created_at: datetime = Field(
|
||||
default_factory=datetime.utcnow,
|
||||
sa_column_kwargs={"server_default": func.now()},
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
default_factory=datetime.utcnow,
|
||||
sa_column_kwargs={"server_default": func.now(), "onupdate": func.now()},
|
||||
)
|
||||
|
||||
|
||||
class SoftDeleteMixin(SQLModel):
|
||||
is_deleted: bool = Field(default=False)
|
||||
deleted_at: datetime | None = Field(default=None)
|
||||
42
app/db/mariadb.py
Normal file
42
app/db/mariadb.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
async_engine = create_async_engine(
|
||||
settings.MARIADB_DSN,
|
||||
echo=settings.DEBUG,
|
||||
pool_pre_ping=True,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
)
|
||||
|
||||
AsyncSessionLocal = sessionmaker(
|
||||
bind=async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
async def init_mariadb() -> None:
|
||||
async with async_engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.create_all)
|
||||
|
||||
|
||||
async def close_mariadb() -> None:
|
||||
await async_engine.dispose()
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
37
app/db/mongodb.py
Normal file
37
app/db/mongodb.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from beanie import init_beanie
|
||||
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
mongo_client: AsyncIOMotorClient | None = None
|
||||
mongo_db: AsyncIOMotorDatabase | None = None
|
||||
|
||||
|
||||
async def init_mongodb() -> None:
|
||||
global mongo_client, mongo_db
|
||||
|
||||
mongo_client = AsyncIOMotorClient(settings.MONGODB_URL)
|
||||
mongo_db = mongo_client[settings.MONGODB_DATABASE]
|
||||
|
||||
from app.models.mongodb.analytics_result import AnalyticsResult
|
||||
from app.models.mongodb.device_log import DeviceLog
|
||||
from app.models.mongodb.notification import Notification
|
||||
from app.models.mongodb.telemetry import TelemetryData
|
||||
|
||||
await init_beanie(
|
||||
database=mongo_db,
|
||||
document_models=[DeviceLog, TelemetryData, AnalyticsResult, Notification],
|
||||
)
|
||||
|
||||
|
||||
async def close_mongodb() -> None:
|
||||
global mongo_client
|
||||
if mongo_client:
|
||||
mongo_client.close()
|
||||
|
||||
|
||||
def get_mongo_db() -> AsyncIOMotorDatabase:
|
||||
assert mongo_db is not None, "MongoDB not initialized"
|
||||
return mongo_db
|
||||
27
app/db/redis.py
Normal file
27
app/db/redis.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from redis.asyncio import Redis, from_url
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
redis_client: Redis | None = None
|
||||
|
||||
|
||||
async def init_redis() -> None:
|
||||
global redis_client
|
||||
redis_client = from_url(
|
||||
settings.REDIS_URL,
|
||||
encoding="utf-8",
|
||||
decode_responses=True,
|
||||
)
|
||||
|
||||
|
||||
async def close_redis() -> None:
|
||||
global redis_client
|
||||
if redis_client:
|
||||
await redis_client.close()
|
||||
|
||||
|
||||
def get_redis() -> Redis:
|
||||
assert redis_client is not None, "Redis not initialized"
|
||||
return redis_client
|
||||
67
app/main.py
Normal file
67
app/main.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.logging_config import setup_logging
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
# ── Startup ──────────────────────────────────────
|
||||
setup_logging()
|
||||
|
||||
from app.db.mariadb import init_mariadb
|
||||
from app.db.mongodb import init_mongodb
|
||||
from app.db.redis import init_redis
|
||||
|
||||
await init_mariadb()
|
||||
await init_mongodb()
|
||||
await init_redis()
|
||||
|
||||
yield
|
||||
|
||||
# ── Shutdown ─────────────────────────────────────
|
||||
from app.communication.external.http_client import close_http_client
|
||||
from app.db.mariadb import close_mariadb
|
||||
from app.db.mongodb import close_mongodb
|
||||
from app.db.redis import close_redis
|
||||
|
||||
await close_http_client()
|
||||
await close_redis()
|
||||
await close_mongodb()
|
||||
await close_mariadb()
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(
|
||||
title=settings.APP_NAME,
|
||||
version="0.1.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# ── Middleware (order matters: last added = first executed) ───
|
||||
from app.middleware.cors import add_cors_middleware
|
||||
from app.middleware.request_id import RequestIDMiddleware
|
||||
from app.middleware.request_logging import RequestLoggingMiddleware
|
||||
|
||||
add_cors_middleware(app)
|
||||
app.add_middleware(RequestLoggingMiddleware)
|
||||
app.add_middleware(RequestIDMiddleware)
|
||||
|
||||
# ── Error handlers ───────────────────────────────
|
||||
from app.core.error_handlers import register_error_handlers
|
||||
|
||||
register_error_handlers(app)
|
||||
|
||||
# ── Routers ──────────────────────────────────────
|
||||
from app.api.v1.router import v1_router
|
||||
|
||||
app.include_router(v1_router, prefix=settings.API_V1_PREFIX)
|
||||
|
||||
return app
|
||||
0
app/middleware/__init__.py
Normal file
0
app/middleware/__init__.py
Normal file
16
app/middleware/cors.py
Normal file
16
app/middleware/cors.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def add_cors_middleware(app: FastAPI) -> None:
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
37
app/middleware/rate_limit.py
Normal file
37
app/middleware/rate_limit.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse, Response
|
||||
|
||||
from app.db.redis import get_redis
|
||||
|
||||
|
||||
class RateLimitMiddleware(BaseHTTPMiddleware):
|
||||
def __init__(self, app, max_requests: int = 100, window_seconds: int = 60): # type: ignore[no-untyped-def]
|
||||
super().__init__(app)
|
||||
self.max_requests = max_requests
|
||||
self.window_seconds = window_seconds
|
||||
|
||||
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
||||
if request.url.path.startswith("/docs") or request.url.path.startswith("/redoc"):
|
||||
return await call_next(request)
|
||||
|
||||
client_ip = request.client.host if request.client else "unknown"
|
||||
key = f"rate_limit:{client_ip}"
|
||||
|
||||
try:
|
||||
redis = get_redis()
|
||||
current = await redis.incr(key)
|
||||
if current == 1:
|
||||
await redis.expire(key, self.window_seconds)
|
||||
|
||||
if current > self.max_requests:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"detail": "Too many requests"},
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return await call_next(request)
|
||||
17
app/middleware/request_id.py
Normal file
17
app/middleware/request_id.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
import structlog
|
||||
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
|
||||
|
||||
class RequestIDMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
||||
request_id = request.headers.get("X-Request-ID", str(uuid.uuid4()))
|
||||
structlog.contextvars.bind_contextvars(request_id=request_id)
|
||||
response = await call_next(request)
|
||||
response.headers["X-Request-ID"] = request_id
|
||||
return response
|
||||
26
app/middleware/request_logging.py
Normal file
26
app/middleware/request_logging.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
|
||||
import structlog
|
||||
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
|
||||
logger = structlog.get_logger("request")
|
||||
|
||||
|
||||
class RequestLoggingMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
||||
start = time.perf_counter()
|
||||
response = await call_next(request)
|
||||
elapsed_ms = round((time.perf_counter() - start) * 1000, 2)
|
||||
|
||||
logger.info(
|
||||
"request",
|
||||
method=request.method,
|
||||
path=request.url.path,
|
||||
status=response.status_code,
|
||||
elapsed_ms=elapsed_ms,
|
||||
)
|
||||
return response
|
||||
0
app/models/__init__.py
Normal file
0
app/models/__init__.py
Normal file
0
app/models/mariadb/__init__.py
Normal file
0
app/models/mariadb/__init__.py
Normal file
39
app/models/mariadb/auth.py
Normal file
39
app/models/mariadb/auth.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
|
||||
from app.db.base import TimestampMixin
|
||||
|
||||
|
||||
class RefreshToken(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "refresh_tokens"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id", index=True)
|
||||
token: str = Field(max_length=500, unique=True, index=True)
|
||||
expires_at: datetime
|
||||
is_revoked: bool = Field(default=False)
|
||||
device_info: str = Field(default="", max_length=255)
|
||||
|
||||
# Relationships
|
||||
user: User | None = Relationship(back_populates="refresh_tokens")
|
||||
|
||||
|
||||
class OAuthAccount(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "oauth_accounts"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id", index=True)
|
||||
provider: str = Field(max_length=50)
|
||||
provider_user_id: str = Field(max_length=255)
|
||||
access_token: str = Field(default="", max_length=500)
|
||||
refresh_token: str = Field(default="", max_length=500)
|
||||
expires_at: datetime | None = Field(default=None)
|
||||
|
||||
|
||||
# Avoid circular import
|
||||
from app.models.mariadb.user import User # noqa: E402, F811
|
||||
|
||||
RefreshToken.model_rebuild()
|
||||
36
app/models/mariadb/device.py
Normal file
36
app/models/mariadb/device.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
|
||||
from app.core.constants import DeviceStatus
|
||||
from app.db.base import SoftDeleteMixin, TimestampMixin
|
||||
|
||||
|
||||
class DeviceGroup(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "device_groups"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
name: str = Field(max_length=100, unique=True)
|
||||
description: str = Field(default="", max_length=500)
|
||||
|
||||
devices: list[Device] = Relationship(back_populates="group")
|
||||
|
||||
|
||||
class Device(TimestampMixin, SoftDeleteMixin, SQLModel, table=True):
|
||||
__tablename__ = "devices"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
device_uid: str = Field(max_length=100, unique=True, index=True)
|
||||
name: str = Field(max_length=100)
|
||||
device_type: str = Field(default="", max_length=50)
|
||||
status: str = Field(default=DeviceStatus.OFFLINE, max_length=20)
|
||||
firmware_version: str = Field(default="", max_length=50)
|
||||
ip_address: str = Field(default="", max_length=45)
|
||||
group_id: int | None = Field(default=None, foreign_key="device_groups.id")
|
||||
owner_id: int | None = Field(default=None, foreign_key="users.id")
|
||||
last_seen_at: datetime | None = Field(default=None)
|
||||
metadata_json: str = Field(default="{}", max_length=2000)
|
||||
|
||||
group: DeviceGroup | None = Relationship(back_populates="devices")
|
||||
36
app/models/mariadb/monitoring.py
Normal file
36
app/models/mariadb/monitoring.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlmodel import Field, SQLModel
|
||||
|
||||
from app.core.constants import AlertSeverity
|
||||
from app.db.base import TimestampMixin
|
||||
|
||||
|
||||
class AlertRule(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "alert_rules"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
name: str = Field(max_length=100)
|
||||
description: str = Field(default="", max_length=500)
|
||||
metric: str = Field(max_length=100)
|
||||
condition: str = Field(max_length=50)
|
||||
threshold: float
|
||||
severity: str = Field(default=AlertSeverity.WARNING, max_length=20)
|
||||
is_enabled: bool = Field(default=True)
|
||||
device_group_id: int | None = Field(default=None, foreign_key="device_groups.id")
|
||||
created_by: int | None = Field(default=None, foreign_key="users.id")
|
||||
|
||||
|
||||
class Alert(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "alerts"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
rule_id: int | None = Field(default=None, foreign_key="alert_rules.id")
|
||||
device_id: int | None = Field(default=None, foreign_key="devices.id")
|
||||
severity: str = Field(default=AlertSeverity.WARNING, max_length=20)
|
||||
message: str = Field(max_length=500)
|
||||
is_acknowledged: bool = Field(default=False)
|
||||
acknowledged_by: int | None = Field(default=None, foreign_key="users.id")
|
||||
acknowledged_at: datetime | None = Field(default=None)
|
||||
27
app/models/mariadb/system.py
Normal file
27
app/models/mariadb/system.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlmodel import Field, SQLModel
|
||||
|
||||
from app.db.base import TimestampMixin
|
||||
|
||||
|
||||
class SystemConfig(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "system_configs"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
key: str = Field(max_length=100, unique=True, index=True)
|
||||
value: str = Field(default="", max_length=2000)
|
||||
description: str = Field(default="", max_length=500)
|
||||
is_secret: bool = Field(default=False)
|
||||
|
||||
|
||||
class AuditLog(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "audit_logs"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int | None = Field(default=None, foreign_key="users.id")
|
||||
action: str = Field(max_length=100)
|
||||
resource_type: str = Field(max_length=50)
|
||||
resource_id: str = Field(default="", max_length=50)
|
||||
details: str = Field(default="{}", max_length=2000)
|
||||
ip_address: str = Field(default="", max_length=45)
|
||||
44
app/models/mariadb/user.py
Normal file
44
app/models/mariadb/user.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
|
||||
from app.core.constants import Role
|
||||
from app.db.base import SoftDeleteMixin, TimestampMixin
|
||||
|
||||
|
||||
class User(TimestampMixin, SoftDeleteMixin, SQLModel, table=True):
|
||||
__tablename__ = "users"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
email: str = Field(max_length=255, unique=True, index=True)
|
||||
hashed_password: str = Field(max_length=255)
|
||||
role: str = Field(default=Role.USER, max_length=20)
|
||||
is_active: bool = Field(default=True)
|
||||
is_verified: bool = Field(default=False)
|
||||
last_login_at: datetime | None = Field(default=None)
|
||||
|
||||
# Relationships
|
||||
profile: UserProfile | None = Relationship(back_populates="user")
|
||||
refresh_tokens: list[RefreshToken] = Relationship(back_populates="user")
|
||||
|
||||
|
||||
class UserProfile(TimestampMixin, SQLModel, table=True):
|
||||
__tablename__ = "user_profiles"
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id", unique=True, index=True)
|
||||
full_name: str = Field(default="", max_length=100)
|
||||
phone: str = Field(default="", max_length=20)
|
||||
organization: str = Field(default="", max_length=100)
|
||||
avatar_url: str = Field(default="", max_length=500)
|
||||
|
||||
# Relationships
|
||||
user: User | None = Relationship(back_populates="profile")
|
||||
|
||||
|
||||
# Forward reference resolution
|
||||
from app.models.mariadb.auth import RefreshToken # noqa: E402
|
||||
|
||||
User.model_rebuild()
|
||||
0
app/models/mongodb/__init__.py
Normal file
0
app/models/mongodb/__init__.py
Normal file
24
app/models/mongodb/analytics_result.py
Normal file
24
app/models/mongodb/analytics_result.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from beanie import Document
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class AnalyticsResult(Document):
|
||||
analysis_type: str
|
||||
parameters: dict = Field(default_factory=dict)
|
||||
result: dict = Field(default_factory=dict)
|
||||
device_id: str | None = None
|
||||
period_start: datetime | None = None
|
||||
period_end: datetime | None = None
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
class Settings:
|
||||
name = "analytics_results"
|
||||
indexes = [
|
||||
"analysis_type",
|
||||
"device_id",
|
||||
[("created_at", -1)],
|
||||
]
|
||||
27
app/models/mongodb/device_log.py
Normal file
27
app/models/mongodb/device_log.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from beanie import Document
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class DeviceLog(Document):
|
||||
device_id: str
|
||||
event_type: str
|
||||
payload: dict = Field(default_factory=dict)
|
||||
ip_address: str | None = None
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
class Settings:
|
||||
name = "device_logs"
|
||||
indexes = [
|
||||
"device_id",
|
||||
"event_type",
|
||||
[("timestamp", -1)],
|
||||
]
|
||||
# TTL: 90일 후 자동 삭제
|
||||
timeseries = {
|
||||
"timeField": "timestamp",
|
||||
"expireAfterSeconds": 90 * 24 * 3600,
|
||||
}
|
||||
24
app/models/mongodb/notification.py
Normal file
24
app/models/mongodb/notification.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from beanie import Document
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class Notification(Document):
|
||||
user_id: int
|
||||
title: str
|
||||
message: str
|
||||
notification_type: str = "info"
|
||||
is_read: bool = False
|
||||
read_at: datetime | None = None
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
class Settings:
|
||||
name = "notifications"
|
||||
indexes = [
|
||||
"user_id",
|
||||
[("user_id", 1), ("is_read", 1)],
|
||||
[("created_at", -1)],
|
||||
]
|
||||
20
app/models/mongodb/telemetry.py
Normal file
20
app/models/mongodb/telemetry.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from beanie import Document
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class TelemetryData(Document):
|
||||
device_id: str
|
||||
metrics: dict = Field(default_factory=dict)
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
class Settings:
|
||||
name = "telemetry_data"
|
||||
indexes = [
|
||||
"device_id",
|
||||
[("timestamp", -1)],
|
||||
[("device_id", 1), ("timestamp", -1)],
|
||||
]
|
||||
0
app/processing/__init__.py
Normal file
0
app/processing/__init__.py
Normal file
0
app/processing/analyzers/__init__.py
Normal file
0
app/processing/analyzers/__init__.py
Normal file
37
app/processing/analyzers/device_analyzer.py
Normal file
37
app/processing/analyzers/device_analyzer.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.mongodb.device_log import DeviceLog
|
||||
|
||||
|
||||
async def analyze_device_status(
|
||||
device_id: str, start: datetime, end: datetime
|
||||
) -> dict:
|
||||
"""Analyze device status changes over a period."""
|
||||
logs = await (
|
||||
DeviceLog.find(
|
||||
DeviceLog.device_id == device_id,
|
||||
DeviceLog.event_type == "status_change",
|
||||
DeviceLog.timestamp >= start,
|
||||
DeviceLog.timestamp <= end,
|
||||
)
|
||||
.sort("+timestamp")
|
||||
.to_list()
|
||||
)
|
||||
|
||||
status_counts: dict[str, int] = {}
|
||||
for log in logs:
|
||||
status = log.payload.get("status", "unknown")
|
||||
status_counts[status] = status_counts.get(status, 0) + 1
|
||||
|
||||
total_events = len(logs)
|
||||
uptime_events = status_counts.get("online", 0)
|
||||
uptime_ratio = uptime_events / total_events if total_events > 0 else 0.0
|
||||
|
||||
return {
|
||||
"total_events": total_events,
|
||||
"status_counts": status_counts,
|
||||
"uptime_ratio": round(uptime_ratio, 4),
|
||||
"period": {"start": start.isoformat(), "end": end.isoformat()},
|
||||
}
|
||||
45
app/processing/analyzers/trend_analyzer.py
Normal file
45
app/processing/analyzers/trend_analyzer.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
|
||||
from app.models.mongodb.telemetry import TelemetryData
|
||||
|
||||
|
||||
async def analyze_trend(
|
||||
device_id: str, start: datetime, end: datetime
|
||||
) -> dict:
|
||||
"""Analyze telemetry data trends using linear regression."""
|
||||
docs = await (
|
||||
TelemetryData.find(
|
||||
TelemetryData.device_id == device_id,
|
||||
TelemetryData.timestamp >= start,
|
||||
TelemetryData.timestamp <= end,
|
||||
)
|
||||
.sort("+timestamp")
|
||||
.to_list()
|
||||
)
|
||||
|
||||
if len(docs) < 2:
|
||||
return {"status": "insufficient_data", "count": len(docs)}
|
||||
|
||||
timestamps = np.array([d.timestamp.timestamp() for d in docs])
|
||||
values = np.array([d.metrics.get("value", 0) for d in docs], dtype=float)
|
||||
|
||||
# Normalize timestamps
|
||||
t_norm = timestamps - timestamps[0]
|
||||
|
||||
# Linear regression
|
||||
coeffs = np.polyfit(t_norm, values, 1)
|
||||
slope = float(coeffs[0])
|
||||
|
||||
return {
|
||||
"count": len(docs),
|
||||
"mean": float(np.mean(values)),
|
||||
"std": float(np.std(values)),
|
||||
"min": float(np.min(values)),
|
||||
"max": float(np.max(values)),
|
||||
"slope": slope,
|
||||
"trend": "increasing" if slope > 0.001 else "decreasing" if slope < -0.001 else "stable",
|
||||
}
|
||||
0
app/processing/pipelines/__init__.py
Normal file
0
app/processing/pipelines/__init__.py
Normal file
29
app/processing/pipelines/report_pipeline.py
Normal file
29
app/processing/pipelines/report_pipeline.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.mongodb.analytics_result import AnalyticsResult
|
||||
from app.processing.analyzers.device_analyzer import analyze_device_status
|
||||
from app.processing.analyzers.trend_analyzer import analyze_trend
|
||||
|
||||
|
||||
async def generate_device_report(
|
||||
device_id: str, start: datetime, end: datetime
|
||||
) -> AnalyticsResult:
|
||||
"""Generate a comprehensive device report."""
|
||||
status_report = await analyze_device_status(device_id, start, end)
|
||||
trend_report = await analyze_trend(device_id, start, end)
|
||||
|
||||
result = AnalyticsResult(
|
||||
analysis_type="device_report",
|
||||
device_id=device_id,
|
||||
parameters={"start": start.isoformat(), "end": end.isoformat()},
|
||||
result={
|
||||
"status": status_report,
|
||||
"trends": trend_report,
|
||||
},
|
||||
period_start=start,
|
||||
period_end=end,
|
||||
)
|
||||
await result.insert()
|
||||
return result
|
||||
57
app/processing/pipelines/telemetry_pipeline.py
Normal file
57
app/processing/pipelines/telemetry_pipeline.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import polars as pl
|
||||
|
||||
from app.models.mongodb.telemetry import TelemetryData
|
||||
|
||||
|
||||
async def aggregate_telemetry(
|
||||
device_id: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
interval: str = "1h",
|
||||
) -> pl.DataFrame:
|
||||
"""Aggregate telemetry data for a device over a time range."""
|
||||
docs = await (
|
||||
TelemetryData.find(
|
||||
TelemetryData.device_id == device_id,
|
||||
TelemetryData.timestamp >= start,
|
||||
TelemetryData.timestamp <= end,
|
||||
)
|
||||
.sort("+timestamp")
|
||||
.to_list()
|
||||
)
|
||||
|
||||
if not docs:
|
||||
return pl.DataFrame()
|
||||
|
||||
records = [
|
||||
{"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics}
|
||||
for d in docs
|
||||
]
|
||||
|
||||
df = pl.DataFrame(records)
|
||||
return df.sort("timestamp").group_by_dynamic("timestamp", every=interval).agg(
|
||||
pl.all().exclude("timestamp", "device_id").mean()
|
||||
)
|
||||
|
||||
|
||||
async def get_latest_telemetry(device_id: str, limit: int = 100) -> pl.DataFrame:
|
||||
"""Get latest telemetry records as a Polars DataFrame."""
|
||||
docs = await (
|
||||
TelemetryData.find(TelemetryData.device_id == device_id)
|
||||
.sort("-timestamp")
|
||||
.limit(limit)
|
||||
.to_list()
|
||||
)
|
||||
|
||||
if not docs:
|
||||
return pl.DataFrame()
|
||||
|
||||
records = [
|
||||
{"timestamp": d.timestamp, "device_id": d.device_id, **d.metrics}
|
||||
for d in docs
|
||||
]
|
||||
return pl.DataFrame(records)
|
||||
0
app/processing/utils/__init__.py
Normal file
0
app/processing/utils/__init__.py
Normal file
26
app/processing/utils/dataframe_utils.py
Normal file
26
app/processing/utils/dataframe_utils.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import polars as pl
|
||||
|
||||
|
||||
def filter_time_range(
|
||||
df: pl.DataFrame, column: str, start: datetime, end: datetime
|
||||
) -> pl.DataFrame:
|
||||
return df.filter(
|
||||
(pl.col(column) >= start) & (pl.col(column) <= end)
|
||||
)
|
||||
|
||||
|
||||
def resample(df: pl.DataFrame, time_column: str, interval: str) -> pl.DataFrame:
|
||||
numeric_cols = [
|
||||
c for c in df.columns if c != time_column and df[c].dtype.is_numeric()
|
||||
]
|
||||
return df.sort(time_column).group_by_dynamic(time_column, every=interval).agg(
|
||||
[pl.col(c).mean().alias(c) for c in numeric_cols]
|
||||
)
|
||||
|
||||
|
||||
def to_records(df: pl.DataFrame) -> list[dict]:
|
||||
return df.to_dicts()
|
||||
39
app/processing/utils/statistics.py
Normal file
39
app/processing/utils/statistics.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
def moving_average(values: list[float], window: int = 5) -> list[float]:
|
||||
if len(values) < window:
|
||||
return values
|
||||
arr = np.array(values, dtype=float)
|
||||
return np.convolve(arr, np.ones(window) / window, mode="valid").tolist()
|
||||
|
||||
|
||||
def detect_anomalies(
|
||||
values: list[float], threshold: float = 2.0
|
||||
) -> list[dict]:
|
||||
"""Detect anomalies using Z-score method."""
|
||||
arr = np.array(values, dtype=float)
|
||||
mean = np.mean(arr)
|
||||
std = np.std(arr)
|
||||
|
||||
if std == 0:
|
||||
return []
|
||||
|
||||
z_scores = np.abs((arr - mean) / std)
|
||||
anomalies = []
|
||||
for i, (val, z) in enumerate(zip(values, z_scores)):
|
||||
if z > threshold:
|
||||
anomalies.append({"index": i, "value": val, "z_score": float(z)})
|
||||
return anomalies
|
||||
|
||||
|
||||
def percentile_stats(values: list[float]) -> dict:
|
||||
arr = np.array(values, dtype=float)
|
||||
return {
|
||||
"p50": float(np.percentile(arr, 50)),
|
||||
"p90": float(np.percentile(arr, 90)),
|
||||
"p95": float(np.percentile(arr, 95)),
|
||||
"p99": float(np.percentile(arr, 99)),
|
||||
}
|
||||
0
app/repositories/__init__.py
Normal file
0
app/repositories/__init__.py
Normal file
44
app/repositories/analytics_repo.py
Normal file
44
app/repositories/analytics_repo.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.mongodb.analytics_result import AnalyticsResult
|
||||
|
||||
|
||||
class AnalyticsRepository:
|
||||
async def create(self, result: AnalyticsResult) -> AnalyticsResult:
|
||||
return await result.insert()
|
||||
|
||||
async def get_by_type(
|
||||
self,
|
||||
analysis_type: str,
|
||||
device_id: str | None = None,
|
||||
skip: int = 0,
|
||||
limit: int = 20,
|
||||
) -> list[AnalyticsResult]:
|
||||
query: dict = {"analysis_type": analysis_type}
|
||||
if device_id:
|
||||
query["device_id"] = device_id
|
||||
return await (
|
||||
AnalyticsResult.find(query)
|
||||
.sort("-created_at")
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.to_list()
|
||||
)
|
||||
|
||||
async def get_by_period(
|
||||
self,
|
||||
analysis_type: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
device_id: str | None = None,
|
||||
) -> list[AnalyticsResult]:
|
||||
query: dict = {
|
||||
"analysis_type": analysis_type,
|
||||
"period_start": {"$gte": start},
|
||||
"period_end": {"$lte": end},
|
||||
}
|
||||
if device_id:
|
||||
query["device_id"] = device_id
|
||||
return await AnalyticsResult.find(query).sort("-created_at").to_list()
|
||||
50
app/repositories/auth_repo.py
Normal file
50
app/repositories/auth_repo.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.mariadb.auth import OAuthAccount, RefreshToken
|
||||
from app.repositories.base import BaseRepository
|
||||
|
||||
|
||||
class AuthRepository(BaseRepository[RefreshToken]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(RefreshToken, session)
|
||||
|
||||
async def get_by_token(self, token: str) -> RefreshToken | None:
|
||||
stmt = select(RefreshToken).where(
|
||||
RefreshToken.token == token,
|
||||
RefreshToken.is_revoked == False, # noqa: E712
|
||||
RefreshToken.expires_at > datetime.utcnow(),
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def revoke_all_for_user(self, user_id: int) -> None:
|
||||
stmt = select(RefreshToken).where(
|
||||
RefreshToken.user_id == user_id,
|
||||
RefreshToken.is_revoked == False, # noqa: E712
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
for token in result.scalars().all():
|
||||
token.is_revoked = True
|
||||
self.session.add(token)
|
||||
await self.session.flush()
|
||||
|
||||
async def get_oauth_account(
|
||||
self, provider: str, provider_user_id: str
|
||||
) -> OAuthAccount | None:
|
||||
stmt = select(OAuthAccount).where(
|
||||
OAuthAccount.provider == provider,
|
||||
OAuthAccount.provider_user_id == provider_user_id,
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create_oauth_account(self, account: OAuthAccount) -> OAuthAccount:
|
||||
self.session.add(account)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(account)
|
||||
return account
|
||||
58
app/repositories/base.py
Normal file
58
app/repositories/base.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Generic, Sequence, TypeVar
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
T = TypeVar("T", bound=SQLModel)
|
||||
|
||||
|
||||
class BaseRepository(Generic[T]):
|
||||
def __init__(self, model: type[T], session: AsyncSession):
|
||||
self.model = model
|
||||
self.session = session
|
||||
|
||||
async def get_by_id(self, id: int) -> T | None:
|
||||
return await self.session.get(self.model, id)
|
||||
|
||||
async def get_all(
|
||||
self, *, skip: int = 0, limit: int = 100, filters: dict | None = None
|
||||
) -> Sequence[T]:
|
||||
stmt = select(self.model)
|
||||
if filters:
|
||||
for key, value in filters.items():
|
||||
if hasattr(self.model, key):
|
||||
stmt = stmt.where(getattr(self.model, key) == value)
|
||||
stmt = stmt.offset(skip).limit(limit)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
async def count(self, filters: dict | None = None) -> int:
|
||||
stmt = select(func.count()).select_from(self.model)
|
||||
if filters:
|
||||
for key, value in filters.items():
|
||||
if hasattr(self.model, key):
|
||||
stmt = stmt.where(getattr(self.model, key) == value)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one()
|
||||
|
||||
async def create(self, obj: T) -> T:
|
||||
self.session.add(obj)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(obj)
|
||||
return obj
|
||||
|
||||
async def update(self, obj: T, data: dict) -> T:
|
||||
for key, value in data.items():
|
||||
if value is not None and hasattr(obj, key):
|
||||
setattr(obj, key, value)
|
||||
self.session.add(obj)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(obj)
|
||||
return obj
|
||||
|
||||
async def delete(self, obj: T) -> None:
|
||||
await self.session.delete(obj)
|
||||
await self.session.flush()
|
||||
40
app/repositories/device_log_repo.py
Normal file
40
app/repositories/device_log_repo.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.mongodb.device_log import DeviceLog
|
||||
|
||||
|
||||
class DeviceLogRepository:
|
||||
async def create(self, log: DeviceLog) -> DeviceLog:
|
||||
return await log.insert()
|
||||
|
||||
async def get_by_device(
|
||||
self,
|
||||
device_id: str,
|
||||
event_type: str | None = None,
|
||||
since: datetime | None = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
) -> list[DeviceLog]:
|
||||
query: dict = {"device_id": device_id}
|
||||
if event_type:
|
||||
query["event_type"] = event_type
|
||||
if since:
|
||||
query["timestamp"] = {"$gte": since}
|
||||
|
||||
return await (
|
||||
DeviceLog.find(query)
|
||||
.sort("-timestamp")
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.to_list()
|
||||
)
|
||||
|
||||
async def count_by_device(
|
||||
self, device_id: str, event_type: str | None = None
|
||||
) -> int:
|
||||
query: dict = {"device_id": device_id}
|
||||
if event_type:
|
||||
query["event_type"] = event_type
|
||||
return await DeviceLog.find(query).count()
|
||||
47
app/repositories/device_repo.py
Normal file
47
app/repositories/device_repo.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.mariadb.device import Device, DeviceGroup
|
||||
from app.repositories.base import BaseRepository
|
||||
|
||||
|
||||
class DeviceRepository(BaseRepository[Device]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Device, session)
|
||||
|
||||
async def get_by_uid(self, device_uid: str) -> Device | None:
|
||||
stmt = select(Device).where(Device.device_uid == device_uid, Device.is_deleted == False) # noqa: E712
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_owner(self, owner_id: int, skip: int = 0, limit: int = 100) -> list[Device]:
|
||||
stmt = (
|
||||
select(Device)
|
||||
.where(Device.owner_id == owner_id, Device.is_deleted == False) # noqa: E712
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def get_by_group(self, group_id: int, skip: int = 0, limit: int = 100) -> list[Device]:
|
||||
stmt = (
|
||||
select(Device)
|
||||
.where(Device.group_id == group_id, Device.is_deleted == False) # noqa: E712
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
class DeviceGroupRepository(BaseRepository[DeviceGroup]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(DeviceGroup, session)
|
||||
|
||||
async def get_by_name(self, name: str) -> DeviceGroup | None:
|
||||
stmt = select(DeviceGroup).where(DeviceGroup.name == name)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
36
app/repositories/monitoring_repo.py
Normal file
36
app/repositories/monitoring_repo.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.mariadb.monitoring import Alert, AlertRule
|
||||
from app.repositories.base import BaseRepository
|
||||
|
||||
|
||||
class AlertRuleRepository(BaseRepository[AlertRule]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(AlertRule, session)
|
||||
|
||||
async def get_enabled_rules(self) -> list[AlertRule]:
|
||||
stmt = select(AlertRule).where(AlertRule.is_enabled == True) # noqa: E712
|
||||
result = await self.session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
class AlertRepository(BaseRepository[Alert]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Alert, session)
|
||||
|
||||
async def get_unacknowledged(self, skip: int = 0, limit: int = 50) -> list[Alert]:
|
||||
stmt = (
|
||||
select(Alert)
|
||||
.where(Alert.is_acknowledged == False) # noqa: E712
|
||||
.order_by(Alert.created_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def count_active(self) -> int:
|
||||
return await self.count(filters={"is_acknowledged": False})
|
||||
44
app/repositories/user_repo.py
Normal file
44
app/repositories/user_repo.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models.mariadb.user import User, UserProfile
|
||||
from app.repositories.base import BaseRepository
|
||||
|
||||
|
||||
class UserRepository(BaseRepository[User]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(User, session)
|
||||
|
||||
async def get_by_email(self, email: str) -> User | None:
|
||||
stmt = select(User).where(User.email == email)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_with_profile(self, user_id: int) -> User | None:
|
||||
stmt = (
|
||||
select(User)
|
||||
.options(selectinload(User.profile))
|
||||
.where(User.id == user_id)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create_with_profile(
|
||||
self, user: User, full_name: str = "", phone: str = "", organization: str = ""
|
||||
) -> User:
|
||||
self.session.add(user)
|
||||
await self.session.flush()
|
||||
|
||||
profile = UserProfile(
|
||||
user_id=user.id, # type: ignore[arg-type]
|
||||
full_name=full_name,
|
||||
phone=phone,
|
||||
organization=organization,
|
||||
)
|
||||
self.session.add(profile)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(user)
|
||||
return user
|
||||
0
app/schemas/__init__.py
Normal file
0
app/schemas/__init__.py
Normal file
38
app/schemas/analytics.py
Normal file
38
app/schemas/analytics.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AnalyticsRequest(BaseModel):
|
||||
device_id: str | None = None
|
||||
analysis_type: str = "telemetry_summary"
|
||||
start: datetime
|
||||
end: datetime
|
||||
parameters: dict = {}
|
||||
|
||||
|
||||
class AnalyticsResultRead(BaseModel):
|
||||
id: str
|
||||
analysis_type: str
|
||||
device_id: str | None
|
||||
result: dict
|
||||
parameters: dict
|
||||
period_start: datetime | None
|
||||
period_end: datetime | None
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class ReportResponse(BaseModel):
|
||||
report_id: str
|
||||
device_id: str
|
||||
status: dict
|
||||
trends: dict
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class TelemetryAggregateResponse(BaseModel):
|
||||
device_id: str
|
||||
records: list[dict]
|
||||
count: int
|
||||
30
app/schemas/auth.py
Normal file
30
app/schemas/auth.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
email: EmailStr
|
||||
password: str
|
||||
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
email: EmailStr
|
||||
password: str
|
||||
full_name: str = ""
|
||||
|
||||
|
||||
class TokenResponse(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
|
||||
|
||||
class RefreshTokenRequest(BaseModel):
|
||||
refresh_token: str
|
||||
|
||||
|
||||
class OAuthCallbackRequest(BaseModel):
|
||||
provider: str
|
||||
code: str
|
||||
redirect_uri: str
|
||||
19
app/schemas/common.py
Normal file
19
app/schemas/common.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
detail: str
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
items: list[T]
|
||||
total: int
|
||||
page: int
|
||||
size: int
|
||||
pages: int
|
||||
52
app/schemas/device.py
Normal file
52
app/schemas/device.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DeviceGroupRead(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: str
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class DeviceGroupCreate(BaseModel):
|
||||
name: str
|
||||
description: str = ""
|
||||
|
||||
|
||||
class DeviceRead(BaseModel):
|
||||
id: int
|
||||
device_uid: str
|
||||
name: str
|
||||
device_type: str
|
||||
status: str
|
||||
firmware_version: str
|
||||
ip_address: str
|
||||
group_id: int | None
|
||||
owner_id: int | None
|
||||
last_seen_at: datetime | None
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class DeviceCreate(BaseModel):
|
||||
device_uid: str
|
||||
name: str
|
||||
device_type: str = ""
|
||||
group_id: int | None = None
|
||||
owner_id: int | None = None
|
||||
firmware_version: str = ""
|
||||
|
||||
|
||||
class DeviceUpdate(BaseModel):
|
||||
name: str | None = None
|
||||
device_type: str | None = None
|
||||
status: str | None = None
|
||||
firmware_version: str | None = None
|
||||
group_id: int | None = None
|
||||
owner_id: int | None = None
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user