Files
llm-proxy-go/llm-proxy.toml

15 lines
235 B
TOML

# LLM Proxy Configuration
# Upstream LLM API URL
upstream_url = "https://api.openai.com/v1/chat/completions"
# Listen address
listen_addr = "127.0.0.1:8080"
# Upstream API key
api_key = ""
# Skip TLS verification
insecure = false