-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathclaude-code-proxy.env
More file actions
29 lines (25 loc) · 1.21 KB
/
claude-code-proxy.env
File metadata and controls
29 lines (25 loc) · 1.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
# Required API Keys
# Note: All API keys below are dummy values for local testing/evaluation
# They are not real secrets and are safe to commit to version control
ANTHROPIC_API_KEY="ignored-ansi-uses-network-auth"
# Dummy LiteLLM virtual key - generated for local testing only
OPENAI_API_KEY="st-aighie0ZahY6Ic"
GEMINI_API_KEY="ignored-not-needed"
# GROQ_API_KEY="gsk_... maybe your groq key here"
# Use OpenAI provider since your ANSI endpoint is OpenAI-compatible
PREFERRED_PROVIDER="openai"
# Point to your ANSI Bedrock endpoint instead of OpenAI
OPENAI_BASE_URL="http://192.168.64.60:4000/v1"
# OPENAI_BASE_URL="https://llm-api-access.my-endpoint.example.com/v1"
# Map to your actual ANSI model
# Since you only have one model available, use it for both big and small
BIG_MODEL="groq-gpt-oss-120b"
SMALL_MODEL="groq-llama-3-8b"
## This didn't work! Tool calls went nowhere
# BIG_MODEL="groq-meta-llama-4-maverick"
# SMALL_MODEL="groq-meta-llama-4-scout"
# Optional: Provider Preference and Model Mapping
# Controls which provider (google, openai, or anthropic) is preferred for mapping haiku/sonnet.
# Defaults to openai if not set.
# Set to "anthropic" for "just an Anthropic proxy" mode (no remapping)
PREFERRED_PROVIDER="openai"