LLM router - proxies to provider skills (claude, openai, ollama)

This commit is contained in:
Azat
2026-02-03 00:09:06 +01:00
commit 1d9de9d770
4 changed files with 551 additions and 0 deletions

48
scripts/autorun.sh Normal file
View File

@@ -0,0 +1,48 @@
#!/bin/bash
set -e
SKILL_DIR="$(dirname "$(dirname "$0")")"
# Install Python if not present
install_python() {
if command -v python3 &>/dev/null; then
echo "Python already installed: $(python3 --version)"
return 0
fi
echo "Installing Python..."
apt-get update
apt-get install -y python3 python3-pip python3-venv
echo "Python installed: $(python3 --version)"
}
# Setup Python virtual environment and dependencies
setup_python_env() {
local venv_dir="$SKILL_DIR/.venv"
if [ -d "$venv_dir" ]; then
echo "Python venv already exists"
return 0
fi
echo "Creating Python virtual environment..."
python3 -m venv "$venv_dir"
echo "Installing Python dependencies..."
"$venv_dir/bin/pip" install --upgrade pip
"$venv_dir/bin/pip" install \
fastapi==0.109.0 \
uvicorn==0.27.0 \
websockets==12.0 \
httpx==0.26.0 \
pydantic==2.5.0 \
python-ulid==2.2.0
echo "Python environment ready"
}
install_python
setup_python_env
echo "LLM router setup complete"

25
scripts/run.sh Normal file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
set -e
LLM_PORT="${LLM_PORT:-8082}"
SKILL_DIR="$(dirname "$(dirname "$0")")"
VENV_DIR="$SKILL_DIR/.venv"
# Export config for Python
export LLM_PORT
export LLM_PROVIDER="${LLM_PROVIDER:-claude}"
export CLAUDE_URL="${CLAUDE_URL:-http://localhost:8888}"
export OPENAI_URL="${OPENAI_URL:-http://localhost:8889}"
export OLLAMA_URL="${OLLAMA_URL:-http://localhost:11434}"
export MEMORY_URL="${MEMORY_URL:-}"
echo "Starting LLM Router on port $LLM_PORT..."
echo "Provider: $LLM_PROVIDER"
case "$LLM_PROVIDER" in
claude) echo "Backend: $CLAUDE_URL" ;;
openai) echo "Backend: $OPENAI_URL" ;;
ollama) echo "Backend: $OLLAMA_URL" ;;
esac
exec "$VENV_DIR/bin/python" "$SKILL_DIR/src/api.py"