From 05603caa12ae8bace5ef5c84a5ee76159819002a Mon Sep 17 00:00:00 2001 From: DavePk04 <Dave.Pikop.Pokam@ulb.be> Date: Sat, 15 Mar 2025 16:56:05 +0100 Subject: [PATCH] Black: fix linting --- backend/app/routes/chat.py | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/backend/app/routes/chat.py b/backend/app/routes/chat.py index 9222de20..13bd01de 100644 --- a/backend/app/routes/chat.py +++ b/backend/app/routes/chat.py @@ -4,49 +4,38 @@ import requests import os from dotenv import load_dotenv -# Load API key from .env load_dotenv() API_KEY = os.getenv("OPENROUTER_API_KEY") -# Mistral API details MODEL_NAME = "mistralai/mistral-small-24b-instruct-2501:free" API_URL = "https://openrouter.ai/api/v1/chat/completions" -# Initialize FastAPI router chatRouter = APIRouter(prefix="/chat", tags=["chat"]) -# Message schema + class ChatMessage(BaseModel): session_id: str role: str content: str -# Temporary chat history storage + chat_sessions = {} + @chatRouter.post("/") async def chat_with_ai(message: ChatMessage): session_id = message.session_id user_message = {"role": message.role, "content": message.content} - # Store chat history per session if session_id not in chat_sessions: chat_sessions[session_id] = [] chat_sessions[session_id].append(user_message) - # Prepare request - headers = { - "Authorization": f"Bearer {API_KEY}", - "Content-Type": "application/json" - } + headers = {"Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json"} - data = { - "model": MODEL_NAME, - "messages": chat_sessions[session_id] - } + data = {"model": MODEL_NAME, "messages": chat_sessions[session_id]} - # Call Mistral AI response = requests.post(API_URL, headers=headers, json=data) if response.status_code == 200: -- GitLab