import sys import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) from core.factory import LLMFactory from agents.roles import AGENTS # Cas d'utilisation : analyse du texte de ticket uniquement role = "resolution_checker" prompt = "Based on the support conversation below, has the issue been resolved? Explain your reasoning." json_ticket_path = "data/ticket_001.txt" # version déjà filtrée et lisible custom_params = { "temperature": 0.2, "top_p": 1.0, "format": "json" } with open(json_ticket_path, "r", encoding="utf-8") as f: ticket_text = f.read() model = LLMFactory.create("llama3.2-vision:90b") model.set_role(role, AGENTS[role]) model.params.update(custom_params) response_en, response_fr = model.generate( user_prompt=f"""{prompt} {ticket_text} """, translate=True ) print("[EN]", response_en) print("[FR]", response_fr)