diff --git a/llm_classes/utils/list_mistral_models.py b/llm_classes/utils/list_mistral_models.py new file mode 100644 index 0000000..3d93013 --- /dev/null +++ b/llm_classes/utils/list_mistral_models.py @@ -0,0 +1,18 @@ +import requests + +MISTRAL_API_KEY = "2iGzTzE9csRQ9IoASoUjplHwEjA200Vh" +MISTRAL_API_URL = "https://api.mistral.ai/v1/models" + +headers = { + "Authorization": f"Bearer {MISTRAL_API_KEY}" +} + +response = requests.get(MISTRAL_API_URL, headers=headers) + +if response.status_code == 200: + models = response.json().get('data', []) + print("Modèles disponibles via Mistral :") + for model in models: + print(f"- {model['id']}") +else: + print(f"Erreur : {response.status_code} - {response.text}") diff --git a/llm_classes/utils/list_ollama_models.py b/llm_classes/utils/list_ollama_models.py new file mode 100644 index 0000000..380e302 --- /dev/null +++ b/llm_classes/utils/list_ollama_models.py @@ -0,0 +1,13 @@ +import requests + +OLLAMA_API_URL = "http://217.182.105.173:11434/api/tags" + +response = requests.get(OLLAMA_API_URL) + +if response.status_code == 200: + models = response.json().get('models', []) + print("Modèles disponibles via Ollama :") + for model in models: + print(f"- {model['name']}") +else: + print(f"Erreur : {response.status_code} - {response.text}")