# Merlin Configuration Example [server] host = "0.0.0.0" port = 7777 [providers.openai] enabled = false api_key = "${OPENAI_API_KEY}" model = "gpt-4-turbo" base_url = "https://api.openai.com/v1" [providers.anthropic] enabled = false api_key = "${ANTHROPIC_API_KEY}" model = "claude-3-sonnet-20240229" base_url = "https://api.anthropic.com" [providers.mistral] enabled = false api_key = "${MISTRAL_API_KEY}" model = "mistral-medium" base_url = "https://api.mistral.ai/v1" [providers.gemini] enabled = false api_key = "${GEMINI_API_KEY}" model = "gemini-pro" [providers.groq] enabled = false api_key = "${GROQ_API_KEY}" model = "llama-3-70b" base_url = "https://api.groq.com/openai/v1" [providers.ollama] enabled = true endpoint = "http://localhost:11434" model = "llama2" [providers.grok] enabled = false api_key = "${GROK_API_KEY}" model = "grok-beta" base_url = "https://api.x.ai/v1" [providers.zai] enabled = false api_key = "${ZAI_API_KEY}" model = "zai-13b" base_url = "https://api.z.ai/v1" [providers.moonshot] enabled = false api_key = "${MOONSHOT_API_KEY}" model = "moonshot-v1-8k" base_url = "https://api.moonshot.cn/v1" [providers.bedrock] enabled = false access_key = "${AWS_ACCESS_KEY_ID}" secret_key = "${AWS_SECRET_ACCESS_KEY}" region = "${AWS_REGION:-us-east-1}" model = "anthropic.claude-3-sonnet-20240229-v1:0" [providers.lambdalabs] enabled = false api_key = "${LAMBDA_LABS_API_KEY}" model = "hermes-2-pro-llama-3-8b" base_url = "https://api.lambdalabs.com/v1" [routing] policy = "epsilon_greedy" # or "thompson_sampling" epsilon = 0.15 capabilities_file = "capabilities.toml" [metrics] redis_url = "redis://127.0.0.1:6379" [telemetry] prometheus_port = 9090 jaeger_endpoint = "http://localhost:14268/api/traces"