Synced files: - Grepai optimization documentation - Ollama Assistant MCP server implementation - Session logs and context updates Machine: ACG-M-L5090 Timestamp: 2026-01-22 19:22:24 Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
85 lines
3.0 KiB
PowerShell
85 lines
3.0 KiB
PowerShell
# Setup Ollama MCP Server
|
|
# Run this script to install dependencies
|
|
|
|
$ErrorActionPreference = "Stop"
|
|
|
|
Write-Host "="*80 -ForegroundColor Cyan
|
|
Write-Host "Ollama MCP Server Setup" -ForegroundColor Cyan
|
|
Write-Host "="*80 -ForegroundColor Cyan
|
|
Write-Host ""
|
|
|
|
# Check if Python is available
|
|
Write-Host "[INFO] Checking Python..." -ForegroundColor Cyan
|
|
try {
|
|
$pythonVersion = python --version 2>&1
|
|
Write-Host "[OK] $pythonVersion" -ForegroundColor Green
|
|
}
|
|
catch {
|
|
Write-Host "[ERROR] Python not found. Install Python 3.8+ from python.org" -ForegroundColor Red
|
|
exit 1
|
|
}
|
|
|
|
# Create virtual environment
|
|
Write-Host "[INFO] Creating virtual environment..." -ForegroundColor Cyan
|
|
if (Test-Path "venv") {
|
|
Write-Host "[SKIP] Virtual environment already exists" -ForegroundColor Yellow
|
|
}
|
|
else {
|
|
python -m venv venv
|
|
Write-Host "[OK] Virtual environment created" -ForegroundColor Green
|
|
}
|
|
|
|
# Activate and install dependencies
|
|
Write-Host "[INFO] Installing dependencies..." -ForegroundColor Cyan
|
|
& "venv\Scripts\activate.ps1"
|
|
python -m pip install --upgrade pip -q
|
|
pip install -r requirements.txt
|
|
|
|
Write-Host "[OK] Dependencies installed" -ForegroundColor Green
|
|
Write-Host ""
|
|
|
|
# Check Ollama installation
|
|
Write-Host "[INFO] Checking Ollama installation..." -ForegroundColor Cyan
|
|
try {
|
|
$ollamaVersion = ollama --version 2>&1
|
|
Write-Host "[OK] Ollama installed: $ollamaVersion" -ForegroundColor Green
|
|
|
|
# Check if Ollama is running
|
|
try {
|
|
$response = Invoke-WebRequest -Uri "http://localhost:11434" -Method GET -TimeoutSec 2 -ErrorAction Stop
|
|
Write-Host "[OK] Ollama server is running" -ForegroundColor Green
|
|
}
|
|
catch {
|
|
Write-Host "[WARNING] Ollama is installed but not running" -ForegroundColor Yellow
|
|
Write-Host "[INFO] Start Ollama with: ollama serve" -ForegroundColor Cyan
|
|
}
|
|
|
|
# Check for models
|
|
Write-Host "[INFO] Checking for installed models..." -ForegroundColor Cyan
|
|
$models = ollama list 2>&1
|
|
if ($models -match "llama3.1:8b|qwen2.5-coder|codellama") {
|
|
Write-Host "[OK] Found compatible models" -ForegroundColor Green
|
|
}
|
|
else {
|
|
Write-Host "[WARNING] No recommended models found" -ForegroundColor Yellow
|
|
Write-Host "[INFO] Pull a model with: ollama pull llama3.1:8b" -ForegroundColor Cyan
|
|
}
|
|
}
|
|
catch {
|
|
Write-Host "[WARNING] Ollama not installed" -ForegroundColor Yellow
|
|
Write-Host "[INFO] Install from: https://ollama.ai/download" -ForegroundColor Cyan
|
|
Write-Host "[INFO] Or run: winget install Ollama.Ollama" -ForegroundColor Cyan
|
|
}
|
|
|
|
Write-Host ""
|
|
Write-Host "="*80 -ForegroundColor Cyan
|
|
Write-Host "Setup Complete!" -ForegroundColor Green
|
|
Write-Host "="*80 -ForegroundColor Cyan
|
|
Write-Host ""
|
|
Write-Host "Next steps:" -ForegroundColor Cyan
|
|
Write-Host "1. Install Ollama if not already installed: winget install Ollama.Ollama"
|
|
Write-Host "2. Pull a model: ollama pull llama3.1:8b"
|
|
Write-Host "3. Start Ollama: ollama serve"
|
|
Write-Host "4. Add to .mcp.json and restart Claude Code"
|
|
Write-Host ""
|