Phase 2: Chat & AI Core — Claude API streaming, chat UI, admin context

Backend:
- Chat, Message, ContextFile models + Alembic migration
- Chat CRUD with per-user limit enforcement (max_chats)
- SSE streaming endpoint: saves user message, streams Claude response,
  saves assistant message with token usage metadata
- Context assembly: primary context file + conversation history
- Admin context CRUD (GET/PUT with version tracking)
- Anthropic SDK integration with async streaming
- Chat ownership isolation (users can't access each other's chats)

Frontend:
- Chat page with sidebar chat list + main chat window
- Real-time SSE streaming via fetch + ReadableStream
- Message bubbles with Markdown rendering (react-markdown)
- Auto-growing message input (Enter to send, Shift+Enter newline)
- Zustand chat store for streaming state management
- Admin primary context editor with unsaved changes warning
- Updated routing: /chat, /chat/:chatId, /admin/context
- Enabled Chat and Admin sidebar navigation
- English + Russian translations for all new UI

Infrastructure:
- nginx: disabled proxy buffering for SSE support
- Added ANTHROPIC_API_KEY and CLAUDE_MODEL to config

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-19 12:38:30 +03:00
parent 7c752cae6b
commit 70469beef8
39 changed files with 4168 additions and 47 deletions

View File

@@ -0,0 +1,33 @@
from typing import Annotated
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import require_admin
from app.database import get_db
from app.models.user import User
from app.schemas.chat import ContextFileResponse, UpdateContextRequest
from app.services import context_service
router = APIRouter(prefix="/admin", tags=["admin"])
@router.get("/context", response_model=ContextFileResponse | None)
async def get_primary_context(
_admin: Annotated[User, Depends(require_admin)],
db: Annotated[AsyncSession, Depends(get_db)],
):
ctx = await context_service.get_primary_context(db)
if not ctx:
return None
return ContextFileResponse.model_validate(ctx)
@router.put("/context", response_model=ContextFileResponse)
async def update_primary_context(
data: UpdateContextRequest,
admin: Annotated[User, Depends(require_admin)],
db: Annotated[AsyncSession, Depends(get_db)],
):
ctx = await context_service.upsert_primary_context(db, data.content, admin.id)
return ContextFileResponse.model_validate(ctx)

103
backend/app/api/v1/chats.py Normal file
View File

@@ -0,0 +1,103 @@
import uuid
from typing import Annotated
from fastapi import APIRouter, Depends, Query, status
from fastapi.responses import StreamingResponse
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.database import get_db
from app.models.user import User
from app.schemas.chat import (
ChatListResponse,
ChatResponse,
CreateChatRequest,
MessageListResponse,
MessageResponse,
SendMessageRequest,
UpdateChatRequest,
)
from app.services import chat_service
from app.services.ai_service import stream_ai_response
router = APIRouter(prefix="/chats", tags=["chats"])
@router.post("/", response_model=ChatResponse, status_code=status.HTTP_201_CREATED)
async def create_chat(
data: CreateChatRequest,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
):
chat = await chat_service.create_chat(db, user, data.title)
return ChatResponse.model_validate(chat)
@router.get("/", response_model=ChatListResponse)
async def list_chats(
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
archived: bool | None = Query(default=None),
):
chats = await chat_service.get_user_chats(db, user.id, archived)
return ChatListResponse(chats=[ChatResponse.model_validate(c) for c in chats])
@router.get("/{chat_id}", response_model=ChatResponse)
async def get_chat(
chat_id: uuid.UUID,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
):
chat = await chat_service.get_chat(db, chat_id, user.id)
return ChatResponse.model_validate(chat)
@router.patch("/{chat_id}", response_model=ChatResponse)
async def update_chat(
chat_id: uuid.UUID,
data: UpdateChatRequest,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
):
chat = await chat_service.update_chat(db, chat_id, user.id, data.title, data.is_archived)
return ChatResponse.model_validate(chat)
@router.delete("/{chat_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_chat(
chat_id: uuid.UUID,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
):
await chat_service.delete_chat(db, chat_id, user.id)
@router.get("/{chat_id}/messages", response_model=MessageListResponse)
async def list_messages(
chat_id: uuid.UUID,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
limit: int = Query(default=50, le=200),
before: uuid.UUID | None = Query(default=None),
):
messages = await chat_service.get_messages(db, chat_id, user.id, limit, before)
return MessageListResponse(messages=[MessageResponse.model_validate(m) for m in messages])
@router.post("/{chat_id}/messages")
async def send_message(
chat_id: uuid.UUID,
data: SendMessageRequest,
user: Annotated[User, Depends(get_current_user)],
db: Annotated[AsyncSession, Depends(get_db)],
):
return StreamingResponse(
stream_ai_response(db, chat_id, user.id, data.content),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)

View File

@@ -1,10 +1,14 @@
from fastapi import APIRouter
from app.api.v1.auth import router as auth_router
from app.api.v1.chats import router as chats_router
from app.api.v1.admin import router as admin_router
api_v1_router = APIRouter(prefix="/api/v1")
api_v1_router.include_router(auth_router)
api_v1_router.include_router(chats_router)
api_v1_router.include_router(admin_router)
@api_v1_router.get("/health")