Backend: - Chat, Message, ContextFile models + Alembic migration - Chat CRUD with per-user limit enforcement (max_chats) - SSE streaming endpoint: saves user message, streams Claude response, saves assistant message with token usage metadata - Context assembly: primary context file + conversation history - Admin context CRUD (GET/PUT with version tracking) - Anthropic SDK integration with async streaming - Chat ownership isolation (users can't access each other's chats) Frontend: - Chat page with sidebar chat list + main chat window - Real-time SSE streaming via fetch + ReadableStream - Message bubbles with Markdown rendering (react-markdown) - Auto-growing message input (Enter to send, Shift+Enter newline) - Zustand chat store for streaming state management - Admin primary context editor with unsaved changes warning - Updated routing: /chat, /chat/:chatId, /admin/context - Enabled Chat and Admin sidebar navigation - English + Russian translations for all new UI Infrastructure: - nginx: disabled proxy buffering for SSE support - Added ANTHROPIC_API_KEY and CLAUDE_MODEL to config Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
104 lines
3.3 KiB
Python
104 lines
3.3 KiB
Python
import uuid
|
|
from typing import Annotated
|
|
|
|
from fastapi import APIRouter, Depends, Query, status
|
|
from fastapi.responses import StreamingResponse
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
from app.api.deps import get_current_user
|
|
from app.database import get_db
|
|
from app.models.user import User
|
|
from app.schemas.chat import (
|
|
ChatListResponse,
|
|
ChatResponse,
|
|
CreateChatRequest,
|
|
MessageListResponse,
|
|
MessageResponse,
|
|
SendMessageRequest,
|
|
UpdateChatRequest,
|
|
)
|
|
from app.services import chat_service
|
|
from app.services.ai_service import stream_ai_response
|
|
|
|
router = APIRouter(prefix="/chats", tags=["chats"])
|
|
|
|
|
|
@router.post("/", response_model=ChatResponse, status_code=status.HTTP_201_CREATED)
|
|
async def create_chat(
|
|
data: CreateChatRequest,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
):
|
|
chat = await chat_service.create_chat(db, user, data.title)
|
|
return ChatResponse.model_validate(chat)
|
|
|
|
|
|
@router.get("/", response_model=ChatListResponse)
|
|
async def list_chats(
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
archived: bool | None = Query(default=None),
|
|
):
|
|
chats = await chat_service.get_user_chats(db, user.id, archived)
|
|
return ChatListResponse(chats=[ChatResponse.model_validate(c) for c in chats])
|
|
|
|
|
|
@router.get("/{chat_id}", response_model=ChatResponse)
|
|
async def get_chat(
|
|
chat_id: uuid.UUID,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
):
|
|
chat = await chat_service.get_chat(db, chat_id, user.id)
|
|
return ChatResponse.model_validate(chat)
|
|
|
|
|
|
@router.patch("/{chat_id}", response_model=ChatResponse)
|
|
async def update_chat(
|
|
chat_id: uuid.UUID,
|
|
data: UpdateChatRequest,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
):
|
|
chat = await chat_service.update_chat(db, chat_id, user.id, data.title, data.is_archived)
|
|
return ChatResponse.model_validate(chat)
|
|
|
|
|
|
@router.delete("/{chat_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
async def delete_chat(
|
|
chat_id: uuid.UUID,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
):
|
|
await chat_service.delete_chat(db, chat_id, user.id)
|
|
|
|
|
|
@router.get("/{chat_id}/messages", response_model=MessageListResponse)
|
|
async def list_messages(
|
|
chat_id: uuid.UUID,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
limit: int = Query(default=50, le=200),
|
|
before: uuid.UUID | None = Query(default=None),
|
|
):
|
|
messages = await chat_service.get_messages(db, chat_id, user.id, limit, before)
|
|
return MessageListResponse(messages=[MessageResponse.model_validate(m) for m in messages])
|
|
|
|
|
|
@router.post("/{chat_id}/messages")
|
|
async def send_message(
|
|
chat_id: uuid.UUID,
|
|
data: SendMessageRequest,
|
|
user: Annotated[User, Depends(get_current_user)],
|
|
db: Annotated[AsyncSession, Depends(get_db)],
|
|
):
|
|
return StreamingResponse(
|
|
stream_ai_response(db, chat_id, user.id, data.content),
|
|
media_type="text/event-stream",
|
|
headers={
|
|
"Cache-Control": "no-cache",
|
|
"Connection": "keep-alive",
|
|
"X-Accel-Buffering": "no",
|
|
},
|
|
)
|