intial project

This commit is contained in:
roai_linux 2026-03-06 15:16:41 +03:30
parent 1bdd251930
commit 1f6250eea0
50 changed files with 2317 additions and 0 deletions

35
Back/.dockerignore Executable file
View File

@ -0,0 +1,35 @@
# Git
.git
.gitignore
# Python cache
__pycache__/
*.pyc
*.pyo
*.pyd
# Virtualenv
env/
venv/
.venv/
# IDE
.vscode
.idea
# Logs
*.log
logs/
# OS
.DS_Store
Thumbs.db
# Secrets
.env
# Media (در docker volume میاد)
media/
# Tests (اختیاری)
tests/

223
Back/.gitignore vendored Executable file
View File

@ -0,0 +1,223 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[codz]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py.cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
#poetry.toml
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
#pdm.lock
#pdm.toml
.pdm-python
.pdm-build/
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
#pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Abstra
# Abstra is an AI-powered process automation framework.
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Cursor
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
# refer to https://docs.cursor.com/context/ignore-files
.cursorignore
.cursorindexingignore
# Marimo
marimo/_static/
marimo/_lsp/
__marimo__/
# customs
src/users/
./src/test_images/
test_images/
.vscode
./src/test/temp
temp/
./src/test/test_data
test_data/
./src/encoding
encoding/

1
Back/.python-version Normal file
View File

@ -0,0 +1 @@
3.12

71
Back/Dockerfile Executable file
View File

@ -0,0 +1,71 @@
# ---------- BUILD STAGE ----------
FROM python:3.12-slim AS builder
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
# Use mirror
# RUN rm -rf /etc/apt/sources.list.d/* && \
# echo "deb http://mirror.arvancloud.ir/debian bookworm main contrib main" > /etc/apt/sources.list && \
# echo "deb http://mirror.arvancloud.ir/debian bookworm-updates main contrib main" >> /etc/apt/sources.list && \
# echo "deb http://mirror.arvancloud.ir/debian-security bookworm-security main contrib main" >> /etc/apt/sources.list && \
# echo 'Acquire::Check-Valid-Until "false";' > /etc/apt/apt.conf.d/99no-check-valid-until
# Build deps
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
libpq-dev \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .
# Install dependencies (cached)
# RUN --mount=type=cache,target=/root/.cache/pip \
# pip install -i https://mirror-pypi.runflare.com/simple --upgrade pip && \
# pip install -i https://mirror-pypi.runflare.com/simple --no-cache-dir -r requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# ---------- RUNTIME STAGE ----------
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
# Use mirror
# RUN rm -rf /etc/apt/sources.list.d/* && \
# echo "deb http://mirror.arvancloud.ir/debian bookworm main contrib main" > /etc/apt/sources.list && \
# echo "deb http://mirror.arvancloud.ir/debian bookworm-updates main contrib main" >> /etc/apt/sources.list && \
# echo "deb http://mirror.arvancloud.ir/debian-security bookworm-security main contrib main" >> /etc/apt/sources.list && \
# echo 'Acquire::Check-Valid-Until "false";' > /etc/apt/apt.conf.d/99no-check-valid-until
# Runtime deps only
RUN apt-get update && apt-get install -y --no-install-recommends \
libpq5 \
netcat-openbsd \
&& rm -rf /var/lib/apt/lists/*
# Non-root user
RUN useradd -m appuser
WORKDIR /app
# Copy python deps from builder
COPY --from=builder /usr/local/lib/python3.12/site-packages \
/usr/local/lib/python3.12/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
# Copy app source
COPY . .
USER appuser
EXPOSE 8000
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]

245
Back/README.md Normal file
View File

@ -0,0 +1,245 @@
# NEDA Backend
NEDA is a real-time group voice communication backend designed for wearable devices (e.g., smartwatches).
It enables secure, low-latency push-to-talk audio communication within isolated groups.
This repository contains the FastAPI backend, realtime control layer, and database schema.
---
# ✨ Features
- Real-time push-to-talk voice groups
- Single active speaker per group
- Secure group isolation
- Role-based group permissions
- Admin-managed membership
- Redis-based realtime state
- LiveKit media integration
- Async PostgreSQL (SQLAlchemy)
- Alembic migrations
- WebSocket signaling layer
---
# 🧱 Architecture
NEDA follows a **domain-oriented layered modular monolith** architecture.
```
core/ shared infrastructure
db/ database & redis
domains/ business domains
integrations/ external services
alembic/ migrations
```
Domains:
- users
- groups
- realtime
- auth
- admin
This design keeps domain logic isolated and allows future service extraction.
---
# 🎙️ Realtime Model
- Audio media → LiveKit
- Signaling → WebSocket (FastAPI)
- State → Redis
- Persistence → PostgreSQL
Active speaker is stored in Redis:
```
speaker:{group_id} = user_id
```
Presence:
```
presence:{group_id} = set(user_ids)
````
---
# 👥 Roles
System role (User):
- `admin`
- `user`
Group role (GroupMember):
- `group_manager` (exactly one per group)
- `member`
Only admins can:
- create groups
- assign group manager
- add/remove members
Group managers have realtime authority only (speaker control).
---
# 🗄️ Database
Core entities:
- User
- Group
- GroupMember
- Session
- GroupVoiceSession
- SpeakerHistory
Rules:
- soft delete for main entities
- single active group_manager per group
- unique membership (user, group)
---
# 🚀 Running with Docker
```bash
docker compose up --build
````
Services:
* API → [http://localhost:8000](http://localhost:8000)
* Docs → [http://localhost:8000/docs](http://localhost:8000/docs)
* LiveKit → [http://localhost:7880](http://localhost:7880)
* Postgres → 5432
* Redis → 6379
---
# ⚙️ Environment
`.env`
```
APP_NAME=NEDA
SECRET_KEY=change-me
POSTGRES_DB=neda
POSTGRES_USER=neda_user
POSTGRES_PASSWORD=neda_pass
DATABASE_URL=postgresql+asyncpg://neda_user:neda_pass@postgres:5432/neda
REDIS_URL=redis://redis:6379/0
LIVEKIT_API_KEY=neda_key
LIVEKIT_API_SECRET=neda_secret
LIVEKIT_HOST=http://livekit:7880
```
---
# 🧪 Development Setup
Create venv and install:
```bash
pip install -r requirements.txt
```
Run API:
```bash
uvicorn neda.main:app --reload
```
---
# 📜 Migrations (Alembic)
Init (first time):
```bash
alembic init alembic
```
Create migration:
```bash
alembic revision --autogenerate -m "init"
```
Apply:
```bash
alembic upgrade head
```
---
# 🔌 Realtime Flow
Request to speak:
1. Client → WS `REQUEST_TALK`
2. Backend → Redis `SET NX speaker:{group}`
3. If granted → LiveKit publish token
4. Others → subscribers
5. Release → Redis delete
---
# 🧭 Project Structure
```
domains/
users/
groups/
realtime/
auth/
admin/
```
Each domain contains:
* models
* schemas
* repo
* service
* api
---
# 🧠 Design Principles
* realtime state outside DB
* single responsibility domains
* admin control plane
* Redis for locks/presence
* DB for long-term truth
* media separated from signaling
---
# 📡 Future Scaling
The architecture supports:
* realtime service extraction
* horizontal scaling
* sharded groups
* multi-tenant deployments

149
Back/alembic.ini Normal file
View File

@ -0,0 +1,149 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the tzdata library which can be installed by adding
# `alembic[tz]` to the pip requirements.
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
# hooks = ruff
# ruff.type = module
# ruff.module = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Alternatively, use the exec runner to execute a binary found on your PATH
# hooks = ruff
# ruff.type = exec
# ruff.executable = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
Back/alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

78
Back/alembic/env.py Normal file
View File

@ -0,0 +1,78 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

31
Back/core/config.py Executable file
View File

@ -0,0 +1,31 @@
from pydantic_settings import BaseSettings
from functools import lru_cache
class Settings(BaseSettings):
APP_NAME: str = "NEDA"
DEBUG: bool = False
SECRET_KEY: str
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
ALGORITHM: str = "HS256"
SECRET_PASS_LENGTH: int = 16
DATABASE_URL: str
REDIS_URL: str
LIVEKIT_API_KEY: str
LIVEKIT_API_SECRET: str
LIVEKIT_HOST: str
class Config:
env_file = ".env"
case_sensitive = True
@lru_cache
def get_settings() -> Settings:
return Settings() # type: ignore
settings = get_settings()

46
Back/core/deps.py Normal file
View File

@ -0,0 +1,46 @@
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from sqlalchemy.ext.asyncio import AsyncSession
from db.session import get_db
from core.jwt import decode_token
from domains.users.repo import get_user_by_id
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/login")
async def get_current_user(
token: str = Depends(oauth2_scheme),
db: AsyncSession = Depends(get_db),
):
payload = decode_token(token)
if payload is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid authentication token",
)
user_id = payload.get("sub")
user = await get_user_by_id(db, user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User not found",
)
return user
async def get_current_admin(user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Admin privileges required",
)
return user

42
Back/core/jwt.py Executable file
View File

@ -0,0 +1,42 @@
from datetime import datetime, timedelta, timezone
from jose import jwt, JWTError
from core.config import settings
def create_access_token(
subject: str,
expires_delta: timedelta | None = None,
) -> str:
if expires_delta:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
)
payload = {
"sub": subject,
"exp": expire,
}
return jwt.encode(
payload,
settings.SECRET_KEY,
algorithm=settings.ALGORITHM,
)
def decode_token(token: str):
try:
payload = jwt.decode(
token,
settings.SECRET_KEY,
algorithms=[settings.ALGORITHM],
)
return payload
except JWTError:
return None

14
Back/core/security.py Executable file
View File

@ -0,0 +1,14 @@
from passlib.context import CryptContext
pwd_context = CryptContext(
schemes=["bcrypt"],
deprecated="auto",
)
def hash_password(password: str) -> str:
return pwd_context.hash(password)
def verify_password(password: str, hashed_password: str) -> bool:
return pwd_context.verify(password, hashed_password)

28
Back/core/websocket.py Executable file
View File

@ -0,0 +1,28 @@
from fastapi import WebSocket, WebSocketException, status
from core.jwt import decode_token
from db.session import AsyncSessionLocal
from domains.users.repo import get_user_by_id
async def get_ws_current_user(websocket: WebSocket):
token = websocket.query_params.get("token")
if not token:
raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION)
payload = decode_token(token)
if payload is None:
raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION)
user_id = payload.get("sub")
async with AsyncSessionLocal() as db:
user = await get_user_by_id(db, user_id)
if not user:
raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION)
return user

34
Back/db/base.py Executable file
View File

@ -0,0 +1,34 @@
from datetime import datetime
import uuid
from sqlalchemy.ext.asyncio import AsyncAttrs
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, declared_attr
from sqlalchemy import DateTime, func
from sqlalchemy.dialects.postgresql import UUID
class Base(AsyncAttrs, DeclarativeBase):
"""Base model with common fields"""
@declared_attr.directive
def __tablename__(cls) -> str:
return cls.__name__.lower() + "s"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
server_onupdate=func.now(),
nullable=False,
)

110
Back/db/redis.py Executable file
View File

@ -0,0 +1,110 @@
import redis.asyncio as redis
from typing import Optional, Any, Awaitable, cast
from core.config import settings
redis_client: redis.Redis = redis.from_url(
settings.REDIS_URL,
decode_responses=True,
)
# =========================
# Redis Keys
# =========================
def speaker_key(group_id: str) -> str:
return f"speaker:{group_id}"
def presence_key(group_id: str) -> str:
return f"presence:{group_id}"
# =========================
# Speaker Lock
# =========================
async def get_active_speaker(group_id: str) -> Optional[str]:
return await redis_client.get(speaker_key(group_id))
async def acquire_speaker(
group_id: str,
user_id: str,
ttl: int = 30,
) -> bool:
"""
Try to become the active speaker.
Uses Redis SET NX.
"""
result = await redis_client.set(
speaker_key(group_id),
user_id,
nx=True,
ex=ttl,
)
return result is True
async def release_speaker(group_id: str, user_id: str) -> bool:
"""
Release speaker only if owned by the user.
Prevents race conditions.
"""
lua_script = """
if redis.call("GET", KEYS[1]) == ARGV[1]
then
return redis.call("DEL", KEYS[1])
else
return 0
end
"""
result = await cast(
Awaitable[Any],
redis_client.eval(
lua_script,
1,
speaker_key(group_id),
user_id,
),
)
return result == 1
# =========================
# Presence
# =========================
async def add_presence(group_id: str, user_id: str) -> None:
await redis_client.sadd(presence_key(group_id), user_id)
async def remove_presence(group_id: str, user_id: str) -> None:
await redis_client.srem(presence_key(group_id), user_id)
async def get_presence(group_id: str) -> list[str]:
members = await redis_client.smembers(
presence_key(group_id)
)
return list(members)
# =========================
# Cleanup
# =========================
async def clear_group_state(group_id: str) -> None:
await redis_client.delete(
speaker_key(group_id),
presence_key(group_id),
)

35
Back/db/session.py Executable file
View File

@ -0,0 +1,35 @@
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from core.config import settings
engine = create_async_engine(
settings.DATABASE_URL,
echo=settings.DEBUG,
pool_size=10,
max_overflow=20,
pool_pre_ping=True,
)
AsyncSessionLocal = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def get_db():
"""
FastAPI dependency for DB session
"""
async with AsyncSessionLocal() as session:
try:
yield session
finally:
await session.close()

63
Back/docker-compose.yml Executable file
View File

@ -0,0 +1,63 @@
services:
api:
build: .
container_name: neda_api
ports:
- "8000:8000"
env_file:
- .env
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
livekit:
condition: service_started
restart: always
postgres:
image: postgres:17-alpine
container_name: neda_postgres
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
- postgres_data:/var/lib/postgresql/data
restart: always
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 5s
timeout: 5s
retries: 3
redis:
image: redis:7-alpine
container_name: neda_redis
volumes:
- redis_data:/data
restart: always
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 3s
retries: 5
livekit:
image: livekit/livekit-server
container_name: neda_livekit
command: --config /etc/livekit.yaml
volumes:
- ./livekit.yaml:/etc/livekit.yaml
ports:
- "7880:7880"
- "7881:7881"
- "7882:7882/udp"
restart: always
volumes:
postgres_data:
redis_data:

View File

71
Back/domains/admin/api.py Normal file
View File

@ -0,0 +1,71 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from db.session import get_db
from core.deps import get_current_admin
from domains.admin.schemas import (
AdminCreateUser,
AdminCreateUserResult,
AdminResetSecretResult
)
from domains.admin.service import (
admin_create_user,
admin_reset_user_secret
)
router = APIRouter(
prefix="/admin",
tags=["admin"]
)
@router.post("/users", response_model=AdminCreateUserResult)
async def create_user(
payload: AdminCreateUser,
db: AsyncSession = Depends(get_db),
admin=Depends(get_current_admin)
):
try:
user, secret = await admin_create_user(
db,
payload.username,
payload.role
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
return {
"user": user,
"secret": secret
}
@router.post("/users/{user_id}/reset-secret",
response_model=AdminResetSecretResult)
async def reset_secret(
user_id: str,
db: AsyncSession = Depends(get_db),
admin=Depends(get_current_admin)
):
new_secret = await admin_reset_user_secret(
db,
user_id
)
if not new_secret:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
return {"secret": new_secret}

View File

@ -0,0 +1,23 @@
import uuid
from pydantic import BaseModel
from domains.users.models import UserRole
class AdminCreateUser(BaseModel):
username: str
role: UserRole
class AdminUserResponse(BaseModel):
id: uuid.UUID
username: str
role: UserRole
is_active: bool
class Config:
from_attributes = True
class AdminCreateUserResult(BaseModel):
user: AdminUserResponse
secret: str
class AdminResetSecretResult(BaseModel):
secret: str

View File

@ -0,0 +1,55 @@
import secrets
from sqlalchemy.ext.asyncio import AsyncSession
from domains.users.models import User
from domains.users.repo import (
get_user_by_id,
get_user_by_username,
create_user,
)
from core.security import hash_password
from core.config import settings
def generate_user_secret():
return secrets.token_urlsafe(settings.SECRET_PASS_LENGTH)
async def admin_create_user(
db: AsyncSession,
username: str,
role: str
):
existing = await get_user_by_username(db, username)
if existing:
raise ValueError("Username already exists")
secret = generate_user_secret()
user = User(
username=username,
role=role,
secret_hash=hash_password(secret)
)
await create_user(db, user)
return user, secret
async def admin_reset_user_secret(
db: AsyncSession,
user_id
):
user = await get_user_by_id(db, user_id)
if not user:
return None
new_secret = generate_user_secret()
user.secret_hash = hash_password(new_secret)
await db.commit()
return new_secret

View File

38
Back/domains/auth/api.py Normal file
View File

@ -0,0 +1,38 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from db.session import get_db
from domains.auth.schemas import (
LoginRequest,
TokenResponse
)
from domains.auth.service import login_user
router = APIRouter(
prefix="/auth",
tags=["auth"]
)
@router.post("/login", response_model=TokenResponse)
async def login(
payload: LoginRequest,
db: AsyncSession = Depends(get_db)
):
token = await login_user(
db,
payload.username,
payload.secret
)
if not token:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid username or secret"
)
return token

View File

@ -0,0 +1,21 @@
import uuid
from pydantic import BaseModel
class LoginRequest(BaseModel):
username: str
secret: str
class TokenResponse(BaseModel):
access_token: str
token_type: str = "bearer"
class AuthUser(BaseModel):
id: uuid.UUID
username: str
role: str
class Config:
from_attributes = True

View File

@ -0,0 +1,53 @@
from sqlalchemy.ext.asyncio import AsyncSession
from core.security import verify_password
from core.jwt import create_access_token
from domains.users.repo import get_user_by_username
async def authenticate_user(
db: AsyncSession,
username: str,
secret: str
):
user = await get_user_by_username(db, username)
if not user:
return None
if not user.is_active:
return None
valid = verify_password(secret, user.secret_hash)
if not valid:
return None
return user
async def login_user(
db: AsyncSession,
username: str,
secret: str
):
user = await authenticate_user(
db,
username,
secret
)
if not user:
return None
token = create_access_token(
subject=str(user.id)
)
return {
"access_token": token,
"token_type": "bearer"
}

View File

View File

@ -0,0 +1,72 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from db.session import get_db
from core.deps import get_current_admin, get_current_user
from domains.groups.schemas import (
GroupCreate,
GroupResponse,
AddMemberRequest
)
from domains.groups.service import (
create_new_group,
add_member_to_group,
list_user_groups
)
router = APIRouter(
prefix="/groups",
tags=["groups"]
)
@router.post(
"/",
response_model=GroupResponse
)
async def create_group(
payload: GroupCreate,
db: AsyncSession = Depends(get_db),
admin = Depends(get_current_admin)
):
group = await create_new_group(
db,
payload.name,
payload.description
)
return group
@router.post("/{group_id}/members")
async def add_member(
group_id: str,
payload: AddMemberRequest,
db: AsyncSession = Depends(get_db),
admin = Depends(get_current_admin)
):
membership = await add_member_to_group(
db,
group_id,
payload.user_id,
payload.role
)
return membership
@router.get("/me", response_model=list[GroupResponse])
async def my_groups(
db: AsyncSession = Depends(get_db),
user = Depends(get_current_user)
):
return await list_user_groups(
db,
user.id
)

View File

@ -0,0 +1,57 @@
import uuid
from enum import Enum
from sqlalchemy import String, Boolean, ForeignKey, Enum as SQLEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from db.base import Base
class GroupType(str, Enum):
GROUP = "group"
DIRECT = "direct"
class GroupRole(str, Enum):
MANAGER = "manager"
MEMBER = "member"
class Group(Base):
__tablename__ = "groups" # type: ignore
name: Mapped[str] = mapped_column(
String(100),
nullable=False,
index=True
)
type: Mapped[GroupType] = mapped_column(
SQLEnum(GroupType, name="group_type"),
default=GroupType.GROUP,
nullable=False
)
is_active: Mapped[bool] = mapped_column(
Boolean,
default=True,
index=True
)
class GroupMember(Base):
__tablename__ = "group_members" # type: ignore
user_id: Mapped[uuid.UUID] = mapped_column(
ForeignKey("users.id", ondelete="CASCADE"),
index=True
)
group_id: Mapped[uuid.UUID] = mapped_column(
ForeignKey("groups.id", ondelete="CASCADE"),
index=True
)
role: Mapped[GroupRole] = mapped_column(
SQLEnum(GroupRole, name="group_role"),
default=GroupRole.MEMBER,
nullable=False
)

View File

@ -0,0 +1,31 @@
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from domains.groups.models import Group, GroupMember
async def create_group(db: AsyncSession, group: Group):
db.add(group)
await db.commit()
await db.refresh(group)
return group
async def get_group_by_id(db: AsyncSession, group_id):
result = await db.execute(
select(Group).where(Group.id == group_id)
)
return result.scalar_one_or_none()
async def add_group_member(db: AsyncSession, membership: GroupMember):
db.add(membership)
await db.commit()
return membership
async def get_user_groups(db: AsyncSession, user_id):
result = await db.execute(
select(Group)
.join(GroupMember)
.where(GroupMember.user_id == user_id)
)
return result.scalars().all()

View File

@ -0,0 +1,30 @@
import uuid
from pydantic import BaseModel
from domains.groups.models import GroupRole
class GroupCreate(BaseModel):
name: str
description: str | None = None
class GroupResponse(BaseModel):
id: uuid.UUID
name: str
description: str | None
is_active: bool
class Config:
from_attributes = True
class AddMemberRequest(BaseModel):
user_id: uuid.UUID
role: GroupRole = GroupRole.MEMBER
class GroupMemberResponse(BaseModel):
user_id: uuid.UUID
group_id: uuid.UUID
role: GroupRole
class Config:
from_attributes = True

View File

@ -0,0 +1,44 @@
from sqlalchemy.ext.asyncio import AsyncSession
from domains.groups.models import Group, GroupMember
from domains.groups.repo import (
create_group,
get_group_by_id,
add_group_member,
get_user_groups
)
async def create_new_group(
db: AsyncSession,
name: str,
description: str | None
):
group = Group(
name=name,
description=description
)
return await create_group(db, group)
async def add_member_to_group(
db: AsyncSession,
group_id,
user_id,
role
):
membership = GroupMember(
group_id=group_id,
user_id=user_id,
role=role
)
return await add_group_member(db, membership)
async def list_user_groups(
db: AsyncSession,
user_id
):
return await get_user_groups(db, user_id)

View File

View File

@ -0,0 +1,26 @@
from db.redis import (
add_presence,
remove_presence,
get_presence
)
async def user_join_group(group_id: str, user_id: str):
"""
Called when websocket connects
"""
await add_presence(group_id, user_id)
async def user_leave_group(group_id: str, user_id: str):
"""
Called when websocket disconnects
"""
await remove_presence(group_id, user_id)
async def list_online_users(group_id: str):
"""
Returns online users in a group
"""
return await get_presence(group_id)

View File

@ -0,0 +1,83 @@
from sqlalchemy.ext.asyncio import AsyncSession
from db.redis import (
acquire_speaker,
release_speaker,
get_active_speaker
)
from domains.groups.repo import get_group_by_id
from domains.groups.models import GroupType
from integrations.livekit.token_service import generate_join_token
async def request_speak(
db: AsyncSession,
group_id: str,
user_id: str
):
group = await get_group_by_id(db, group_id)
if not group:
return None
# direct chat → no speaker lock
if group.type == GroupType.DIRECT:
token = generate_join_token(
user_id=user_id,
group_id=group_id,
can_publish=True
)
return token
# group chat → push-to-talk
granted = await acquire_speaker(group_id, user_id)
if not granted:
return None
token = generate_join_token(
user_id=user_id,
group_id=group_id,
can_publish=True
)
return token
async def stop_speaking(
db: AsyncSession,
group_id: str,
user_id: str
):
group = await get_group_by_id(db, group_id)
if not group:
return False
# direct chat → nothing to release
if group.type == GroupType.DIRECT:
return True
return await release_speaker(group_id, user_id)
async def current_speaker(
db: AsyncSession,
group_id: str
):
group = await get_group_by_id(db, group_id)
if not group:
return None
if group.type == GroupType.DIRECT:
return None
return await get_active_speaker(group_id)

111
Back/domains/realtime/ws.py Normal file
View File

@ -0,0 +1,111 @@
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from core.websocket import get_ws_current_user
from domains.realtime.ws_manager import manager
from domains.realtime.presence_service import (
user_join_group,
user_leave_group,
list_online_users
)
from domains.realtime.speaker_service import (
request_speak,
stop_speaking,
current_speaker
)
from integrations.livekit.token_service import generate_join_token
from db.session import AsyncSessionLocal
router = APIRouter()
@router.websocket("/ws/groups/{group_id}")
async def group_ws(websocket: WebSocket, group_id: str):
user = await get_ws_current_user(websocket)
user_id = str(user.id)
# connect websocket
await manager.connect(group_id, websocket)
# add presence
await user_join_group(group_id, user_id)
# give listener token
listener_token = generate_join_token(
user_id=user_id,
group_id=group_id,
can_publish=False
)
await websocket.send_json({
"type": "livekit_token",
"token": listener_token
})
# broadcast updated presence
await manager.broadcast(
group_id,
{
"type": "presence",
"users": await list_online_users(group_id)
}
)
try:
while True:
data = await websocket.receive_json()
event = data.get("type")
# user wants to speak
if event == "request_speak":
async with AsyncSessionLocal() as db:
token = await request_speak(
db,
group_id,
user_id
)
if token:
await manager.broadcast(
group_id,
{
"type": "speaker",
"user_id": user_id
}
)
await websocket.send_json({
"type": "speaker_granted",
"token": token
})
else:
async with AsyncSessionLocal() as db:
speaker = await current_speaker(db, group_id)
await websocket.send_json({
"type": "speaker_busy",
"speaker": speaker
})
# user stops speaking
elif event == "stop_speak":
async with AsyncSessionLocal() as db:
await stop_speaking(db, group_id, user_id)
await manager.broadcast(
group_id,
{
"type": "speaker_released"
}
)
except WebSocketDisconnect:
manager.disconnect(group_id, websocket)
await user_leave_group(group_id, user_id)
await manager.broadcast(
group_id,
{
"type": "presence",
"users": await list_online_users(group_id)
}
)

View File

@ -0,0 +1,27 @@
from fastapi import WebSocket
from collections import defaultdict
class ConnectionManager:
def __init__(self):
self.groups: dict[str, set[WebSocket]] = defaultdict(set)
async def connect(self, group_id: str, websocket: WebSocket):
await websocket.accept()
self.groups[group_id].add(websocket)
def disconnect(self, group_id: str, websocket: WebSocket):
if group_id in self.groups:
self.groups[group_id].discard(websocket)
async def broadcast(self, group_id: str, message: dict):
if group_id not in self.groups:
return
for ws in list(self.groups[group_id]):
try:
await ws.send_json(message)
except:
self.groups[group_id].discard(ws)
manager = ConnectionManager()

View File

58
Back/domains/users/api.py Normal file
View File

@ -0,0 +1,58 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from db.session import get_db
from core.deps import get_current_admin
from domains.users.schemas import (
UserCreate,
UserCreateResult
)
from domains.users.service import (
create_user_by_admin,
reset_user_secret
)
from domains.users.repo import get_user_by_id
router = APIRouter(
prefix="/users",
tags=["users"]
)
@router.post("/", response_model=UserCreateResult)
async def create_user(
payload: UserCreate,
db: AsyncSession = Depends(get_db),
admin = Depends(get_current_admin)
):
user, secret = await create_user_by_admin(
db,
payload.username,
payload.role
)
return {
"user": user,
"secret": secret
}
@router.post("/{user_id}/reset-secret")
async def reset_secret(
user_id: str,
db: AsyncSession = Depends(get_db),
admin = Depends(get_current_admin)
):
user = await get_user_by_id(db, user_id)
if not user:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found"
)
new_secret = await reset_user_secret(db, user)
return {"secret": new_secret}

View File

@ -0,0 +1,38 @@
from enum import Enum
from sqlalchemy import String, Boolean, Enum as SQLEnum
from sqlalchemy.orm import Mapped, mapped_column
from db.base import Base
class UserRole(str, Enum):
ADMIN = "admin"
GROUP_MANAGER = "group_manager"
MEMBER = "member"
class User(Base):
username: Mapped[str] = mapped_column(
String(50),
unique=True,
index=True,
nullable=False,
)
secret_hash: Mapped[str] = mapped_column(
String(255),
nullable=False,
)
role: Mapped[UserRole] = mapped_column(
SQLEnum(UserRole, name="user_role"),
default=UserRole.MEMBER,
index=True,
nullable=False,
)
is_active: Mapped[bool] = mapped_column(
Boolean,
default=True,
index=True,
nullable=False,
)

View File

@ -0,0 +1,25 @@
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from domains.users.models import User
async def get_user_by_id(db: AsyncSession, user_id):
result = await db.execute(
select(User).where(User.id == user_id)
)
return result.scalar_one_or_none()
async def get_user_by_username(db: AsyncSession, username):
result = await db.execute(
select(User).where(User.username == username)
)
return result.scalar_one_or_none()
async def create_user(db: AsyncSession, user: User):
db.add(user)
await db.commit()
await db.refresh(user)
return user

View File

@ -0,0 +1,20 @@
import uuid
from pydantic import BaseModel
from domains.users.models import UserRole
class UserCreate(BaseModel):
username: str
role: UserRole
class UserResponse(BaseModel):
id: uuid.UUID
username: str
role: str
is_active: bool
class Config:
from_attributes = True
class UserCreateResult(BaseModel):
user: UserResponse
secret: str

View File

@ -0,0 +1,43 @@
import secrets
from sqlalchemy.ext.asyncio import AsyncSession
from core.security import hash_password
from domains.users.models import User
from domains.users.repo import create_user
from core.config import settings
def generate_user_secret():
return secrets.token_urlsafe(settings.SECRET_PASS_LENGTH)
async def create_user_by_admin(
db: AsyncSession,
username: str,
role: str
):
secret = generate_user_secret()
user = User(
username=username,
role=role,
secret_hash=hash_password(secret)
)
await create_user(db, user)
return user, secret
async def get_user(db: AsyncSession, user_id):
from domains.users.repo import get_user_by_id
return await get_user_by_id(db, user_id)
async def reset_user_secret(db: AsyncSession, user: User):
new_secret = generate_user_secret()
user.secret_hash = hash_password(new_secret)
await db.commit()
return new_secret

View File

@ -0,0 +1,10 @@
from livekit import api
from core.config import settings
def get_livekit_api():
return api.LiveKitAPI(
settings.LIVEKIT_HOST,
settings.LIVEKIT_API_KEY,
settings.LIVEKIT_API_SECRET
)

View File

@ -0,0 +1,28 @@
from livekit import api
from core.config import settings
def generate_join_token(
user_id: str,
group_id: str,
can_publish: bool
):
token = api.AccessToken(
settings.LIVEKIT_API_KEY,
settings.LIVEKIT_API_SECRET
)
token.with_identity(user_id)
token.with_grants(
api.VideoGrants(
room=group_id,
room_join=True,
can_publish=can_publish,
can_subscribe=True
)
)
return token.to_jwt()

7
Back/livekit.yaml Executable file
View File

@ -0,0 +1,7 @@
port: 7880
rtc:
udp_port: 7882
tcp_port: 7881
keys:
neda_key: neda_secret

71
Back/main.py Normal file
View File

@ -0,0 +1,71 @@
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from domains.auth.api import router as auth_router
from domains.users.api import router as users_router
from domains.admin.api import router as admin_router
from domains.groups.api import router as groups_router
from domains.realtime.ws import router as realtime_router
from db.redis import redis_client
@asynccontextmanager
async def lifespan(app: FastAPI):
# ---------- Startup ----------
try:
await redis_client.ping()
print("Redis connected")
except Exception as e:
print("Redis connection failed:", e)
yield
# ---------- Shutdown ----------
await redis_client.close()
app = FastAPI(
title="NEDA API",
description="Realtime Voice Communication Backend",
version="1.0.0",
lifespan=lifespan
)
# -------------------------
# CORS
# -------------------------
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# -------------------------
# Routers
# -------------------------
app.include_router(auth_router)
app.include_router(users_router)
app.include_router(admin_router)
app.include_router(groups_router)
app.include_router(realtime_router)
# -------------------------
# Health Check
# -------------------------
@app.get("/health")
async def health_check():
return {"status": "ok"}

7
Back/pyproject.toml Normal file
View File

@ -0,0 +1,7 @@
[project]
name = "neda"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []

22
Back/requirements.txt Executable file
View File

@ -0,0 +1,22 @@
fastapi
uvicorn[standard]
sqlalchemy
asyncpg
alembic
redis
python-jose[cryptography]
passlib[bcrypt]
pydantic-settings
python-dotenv
livekit-api
websockets
# Testing
pytest
pytest-asyncio
httpx

12
Back/tests/test_health.py Normal file
View File

@ -0,0 +1,12 @@
import pytest
from httpx import AsyncClient, ASGITransport
from main import app
@pytest.mark.asyncio
async def test_health_check():
transport = ASGITransport(app=app)
async with AsyncClient(transport=transport, base_url="http://test") as ac:
response = await ac.get("/health")
assert response.status_code == 200
assert response.json() == {"status": "ok"}