diff --git a/backend/app/routes/chat.py b/backend/app/routes/chat.py index 9222de206ecd9587192ad02289b17badb17c10f6..13bd01dee150c3171db976cd2c88dd671a99bb29 100644 --- a/backend/app/routes/chat.py +++ b/backend/app/routes/chat.py @@ -4,49 +4,38 @@ import requests import os from dotenv import load_dotenv -# Load API key from .env load_dotenv() API_KEY = os.getenv("OPENROUTER_API_KEY") -# Mistral API details MODEL_NAME = "mistralai/mistral-small-24b-instruct-2501:free" API_URL = "https://openrouter.ai/api/v1/chat/completions" -# Initialize FastAPI router chatRouter = APIRouter(prefix="/chat", tags=["chat"]) -# Message schema + class ChatMessage(BaseModel): session_id: str role: str content: str -# Temporary chat history storage + chat_sessions = {} + @chatRouter.post("/") async def chat_with_ai(message: ChatMessage): session_id = message.session_id user_message = {"role": message.role, "content": message.content} - # Store chat history per session if session_id not in chat_sessions: chat_sessions[session_id] = [] chat_sessions[session_id].append(user_message) - # Prepare request - headers = { - "Authorization": f"Bearer {API_KEY}", - "Content-Type": "application/json" - } + headers = {"Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json"} - data = { - "model": MODEL_NAME, - "messages": chat_sessions[session_id] - } + data = {"model": MODEL_NAME, "messages": chat_sessions[session_id]} - # Call Mistral AI response = requests.post(API_URL, headers=headers, json=data) if response.status_code == 200: