from .base_llm import BaseLLM import requests class MistralLarge(BaseLLM): def __init__(self): super().__init__("mistral-large-latest") self.configurer(temperature=0.2, top_p=1) def urlBase(self) -> str: return "https://api.mistral.ai/v1/" def cleAPI(self) -> str: return "2iGzTzE9csRQ9IoASoUjplHwEjA200Vh" def urlFonction(self) -> str: return "chat/completions" def _preparer_contenu(self, question: str) -> dict: return { "model": self.modele, "messages": [ {"role": "system", "content": self.prompt_system}, {"role": "user", "content": question} ], **self.params } def _traiter_reponse(self, reponse: requests.Response) -> str: data = reponse.json() return data["choices"][0]["message"]["content"]