mirror of
https://github.com/Ladebeze66/llm_lab_perso.git
synced 2025-12-15 19:26:52 +01:00
53 lines
1.8 KiB
Python
53 lines
1.8 KiB
Python
from core.mistral7b import Mistral7B
|
|
from core.codellama13b_python import CodeLlama13bPython
|
|
from core.llama2_13b import Llama2_13b
|
|
|
|
class LLMFactory:
|
|
"""
|
|
Factory pour créer des instances de modèles LLM dynamiquement en fonction d'un identifiant text
|
|
"""
|
|
_registry = {
|
|
"mistral:latest": Mistral7B,
|
|
"codellama:13b-python": CodeLlama13bPython,
|
|
"llama2:13b": Llama2_13b,
|
|
# Aliases pour compatibilité rétroactive
|
|
"mistral7b": Mistral7B,
|
|
"codellama13b-python": CodeLlama13bPython,
|
|
"llama2-13b": Llama2_13b
|
|
# Ajouter d'autres modèles LLM ici
|
|
}
|
|
|
|
# Conversion entre noms Ollama et identifiants internes
|
|
_model_aliases = {
|
|
"mistral:latest": ["mistral7b", "mistral"],
|
|
"codellama:13b-python": ["codellama13b-python", "codellama-python", "codellama"],
|
|
"llama2:13b": ["llama2-13b", "llama2", "llama"]
|
|
}
|
|
|
|
@staticmethod
|
|
def create(model_name: str):
|
|
"""
|
|
Crée une instance d'un modèle LLM en fonction de l'identifiant textuel
|
|
"""
|
|
model_name = model_name.lower()
|
|
|
|
# Essayer directement
|
|
if model_name in LLMFactory._registry:
|
|
return LLMFactory._registry[model_name]()
|
|
|
|
# Vérifier les alias
|
|
for canonical, aliases in LLMFactory._model_aliases.items():
|
|
if model_name in aliases:
|
|
return LLMFactory._registry[canonical]()
|
|
|
|
# Si on arrive ici, modèle non supporté
|
|
raise ValueError(f"Modèle LLM non supporté: {model_name}")
|
|
|
|
@staticmethod
|
|
def get_available_models():
|
|
"""
|
|
Renvoie la liste des modèles disponibles (noms Ollama)
|
|
"""
|
|
return [m for m in LLMFactory._registry.keys()
|
|
if m not in sum(LLMFactory._model_aliases.values(), [])] # Exclure les alias
|
|
|