mirror of
https://github.com/Ladebeze66/llm_lab_perso.git
synced 2025-12-13 09:06:50 +01:00
141 lines
5.5 KiB
PowerShell
141 lines
5.5 KiB
PowerShell
# PowerShell script to launch the LLM Lab API server autonomously
|
|
Write-Host "=== LLM Lab API Server Automatic Launch for Cursor and Obsidian ===" -ForegroundColor Green
|
|
|
|
# Function to check if a process is running
|
|
function Test-ProcessRunning {
|
|
param (
|
|
[string]$ProcessName
|
|
)
|
|
return (Get-Process -Name $ProcessName -ErrorAction SilentlyContinue)
|
|
}
|
|
|
|
# Function to start Ollama if not already running
|
|
function Start-OllamaIfNeeded {
|
|
if (-not (Test-ProcessRunning -ProcessName "ollama")) {
|
|
Write-Host "Starting Ollama..." -ForegroundColor Yellow
|
|
|
|
# Find Ollama location
|
|
$ollamaPath = "C:\Program Files\Ollama\ollama.exe"
|
|
|
|
if (Test-Path -Path $ollamaPath) {
|
|
# Start Ollama in background
|
|
Start-Process -FilePath $ollamaPath -WindowStyle Minimized
|
|
Write-Host "Ollama started. Waiting for server initialization..." -ForegroundColor Green
|
|
Start-Sleep -Seconds 5 # Wait for Ollama server to start
|
|
} else {
|
|
Write-Host "Unable to find Ollama. Please ensure it is installed." -ForegroundColor Red
|
|
Write-Host "You can download it from https://ollama.com/download/windows" -ForegroundColor Yellow
|
|
}
|
|
} else {
|
|
Write-Host "Ollama is already running." -ForegroundColor Green
|
|
}
|
|
}
|
|
|
|
# Check if virtual environment exists
|
|
if (-not (Test-Path -Path ".\llmlab")) {
|
|
Write-Host "Virtual environment not found. Please run setup_env.bat to create it." -ForegroundColor Red
|
|
exit 1
|
|
}
|
|
|
|
# Start Ollama automatically if needed
|
|
Start-OllamaIfNeeded
|
|
|
|
# Activate virtual environment
|
|
Write-Host "Activating virtual environment..." -ForegroundColor Cyan
|
|
try {
|
|
& .\llmlab\Scripts\Activate.ps1
|
|
} catch {
|
|
Write-Host "Error activating virtual environment: $_" -ForegroundColor Red
|
|
Write-Host "Trying alternative activation..." -ForegroundColor Yellow
|
|
& cmd /c ".\llmlab\Scripts\activate.bat && powershell -NoExit"
|
|
exit 1
|
|
}
|
|
|
|
# Check that Flask and Flask-CORS are installed
|
|
Write-Host "Checking dependencies..." -ForegroundColor Cyan
|
|
$flaskInstalled = $false
|
|
$flaskCorsInstalled = $false
|
|
|
|
try {
|
|
$modules = pip list
|
|
$flaskInstalled = $modules -match "flask" -and $modules -notmatch "flask-cors"
|
|
$flaskCorsInstalled = $modules -match "flask-cors"
|
|
} catch {
|
|
Write-Host "Error checking modules: $_" -ForegroundColor Red
|
|
}
|
|
|
|
# Install missing dependencies
|
|
if (-not $flaskInstalled) {
|
|
Write-Host "Installing Flask..." -ForegroundColor Yellow
|
|
pip install flask
|
|
}
|
|
|
|
if (-not $flaskCorsInstalled) {
|
|
Write-Host "Installing Flask-CORS..." -ForegroundColor Yellow
|
|
pip install flask-cors
|
|
}
|
|
|
|
# Check if Ollama is ready
|
|
$ollamaReady = $false
|
|
$retryCount = 0
|
|
$maxRetries = 5
|
|
|
|
Write-Host "Checking if Ollama server is ready..." -ForegroundColor Cyan
|
|
while (-not $ollamaReady -and $retryCount -lt $maxRetries) {
|
|
try {
|
|
$ollamaResponse = Invoke-WebRequest -Uri "http://localhost:11434/api/tags" -UseBasicParsing -ErrorAction SilentlyContinue
|
|
if ($ollamaResponse.StatusCode -eq 200) {
|
|
$ollamaReady = $true
|
|
$models = ($ollamaResponse.Content | ConvertFrom-Json).models
|
|
$modelCount = $models.Length
|
|
|
|
Write-Host "Ollama is ready with $modelCount available models." -ForegroundColor Green
|
|
|
|
# Display available models
|
|
if ($modelCount -gt 0) {
|
|
Write-Host "Available models:" -ForegroundColor Cyan
|
|
foreach ($model in $models) {
|
|
Write-Host " - $($model.name)" -ForegroundColor Cyan
|
|
}
|
|
}
|
|
}
|
|
} catch {
|
|
$retryCount++
|
|
if ($retryCount -lt $maxRetries) {
|
|
Write-Host "Ollama not ready yet. Retrying in 3 seconds..." -ForegroundColor Yellow
|
|
Start-Sleep -Seconds 3
|
|
} else {
|
|
Write-Host "Unable to connect to Ollama after several attempts." -ForegroundColor Red
|
|
Write-Host "API server will start, but Ollama models might not be available." -ForegroundColor Yellow
|
|
}
|
|
}
|
|
}
|
|
|
|
# Start the API server
|
|
Write-Host "`nStarting API server on http://localhost:8000..." -ForegroundColor Green
|
|
Write-Host "Available endpoints:" -ForegroundColor Cyan
|
|
Write-Host " - http://localhost:8000/v1/chat/completions (OpenAI/Cursor compatible)" -ForegroundColor Cyan
|
|
Write-Host " - http://localhost:8000/v1/models (model list)" -ForegroundColor Cyan
|
|
Write-Host " - http://localhost:8000/generate (simplified API for Obsidian)" -ForegroundColor Cyan
|
|
Write-Host " - http://localhost:8000/health (server status)" -ForegroundColor Cyan
|
|
Write-Host "`nUse Ctrl+C to stop the server" -ForegroundColor Yellow
|
|
|
|
# Run the server in background if -Background parameter is specified
|
|
$runInBackground = $args -contains "-background" -or $args -contains "-b"
|
|
|
|
if ($runInBackground) {
|
|
Write-Host "Starting server in background..." -ForegroundColor Green
|
|
Start-Process -FilePath "python" -ArgumentList "api_server.py" -WindowStyle Minimized
|
|
Write-Host "API server started in background. Use 'Get-Process python' to check its status." -ForegroundColor Green
|
|
|
|
# Keep virtual environment active
|
|
Write-Host "Virtual environment remains active. Type 'deactivate' to deactivate it." -ForegroundColor Yellow
|
|
} else {
|
|
# Normal server execution
|
|
python api_server.py
|
|
|
|
# This code will only run after server shutdown
|
|
Write-Host "`nAPI server stopped." -ForegroundColor Cyan
|
|
Write-Host "Deactivating virtual environment..." -ForegroundColor Cyan
|
|
deactivate
|
|
} |