Files
dev-intel-poc/.env.example
2026-03-04 04:41:05 +00:00

18 lines
396 B
Plaintext

# LLM Backend: "ollama" or "openai"
LLM_BACKEND=ollama
# Ollama settings
OLLAMA_URL=http://192.168.86.172:11434
OLLAMA_MODEL=qwen2.5:7b
# OpenAI-compatible settings (Kiro gateway, OpenRouter, etc.)
# OPENAI_URL=http://192.168.86.11:8000
# OPENAI_MODEL=claude-haiku-4
# OPENAI_API_KEY=not-needed
# Repo to ingest
TARGET_REPO=https://github.com/labstack/echo.git
# Parallelism
MAX_CONCURRENT=4