Files
llm-proxy-go/llm-proxy.toml

13 lines
224 B
TOML

# LLM Proxy Configuration
# Upstream LLM API URL
upstream_url = "https://api.openai.com/v1/chat/completions"
# Listen address
listen_addr = ":8080"
# Upstream API key
api_key = ""
# Skip TLS verification
insecure = false