LLM router - proxies to provider skills (claude, openai, ollama)
This commit is contained in:
25
scripts/run.sh
Normal file
25
scripts/run.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
LLM_PORT="${LLM_PORT:-8082}"
|
||||
SKILL_DIR="$(dirname "$(dirname "$0")")"
|
||||
VENV_DIR="$SKILL_DIR/.venv"
|
||||
|
||||
# Export config for Python
|
||||
export LLM_PORT
|
||||
export LLM_PROVIDER="${LLM_PROVIDER:-claude}"
|
||||
export CLAUDE_URL="${CLAUDE_URL:-http://localhost:8888}"
|
||||
export OPENAI_URL="${OPENAI_URL:-http://localhost:8889}"
|
||||
export OLLAMA_URL="${OLLAMA_URL:-http://localhost:11434}"
|
||||
export MEMORY_URL="${MEMORY_URL:-}"
|
||||
|
||||
echo "Starting LLM Router on port $LLM_PORT..."
|
||||
echo "Provider: $LLM_PROVIDER"
|
||||
|
||||
case "$LLM_PROVIDER" in
|
||||
claude) echo "Backend: $CLAUDE_URL" ;;
|
||||
openai) echo "Backend: $OPENAI_URL" ;;
|
||||
ollama) echo "Backend: $OLLAMA_URL" ;;
|
||||
esac
|
||||
|
||||
exec "$VENV_DIR/bin/python" "$SKILL_DIR/src/api.py"
|
||||
Reference in New Issue
Block a user