Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 57 additions & 18 deletions functions/pipes/anthropic/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,52 @@
class Pipe:
class Valves(BaseModel):
ANTHROPIC_API_KEY: str = Field(default="")
ENABLE_1M_CONTEXT: bool = Field(
default=False,
description="Enable 1M token context window for compatible Sonnet 4/4.5 models (requires Tier 4 access)",
)

def __init__(self):
self.type = "manifold"
self.id = "anthropic"
self.name = "anthropic/"
self.valves = self.Valves(
**{"ANTHROPIC_API_KEY": os.getenv("ANTHROPIC_API_KEY", "")}
**{
"ANTHROPIC_API_KEY": os.getenv("ANTHROPIC_API_KEY", ""),
"ENABLE_1M_CONTEXT": os.getenv("ANTHROPIC_ENABLE_1M_CONTEXT", "false").lower() == "true"
}
)
self.MAX_IMAGE_SIZE = 5 * 1024 * 1024 # 5MB per image

# Model cache
self._model_cache: Optional[List[Dict[str, str]]] = None
self._model_cache_time: float = 0
self._cache_ttl = int(os.getenv("ANTHROPIC_MODEL_CACHE_TTL", "600"))

def get_anthropic_models_from_api(self, force_refresh: bool = False) -> List[Dict[str, str]]:
def _is_1m_compatible_model(self, model_id: str) -> bool:
"""
Check if a model is compatible with the 1M token context window.
Currently supports Claude Sonnet 4 and Sonnet 4.5.

Args:
model_id: The model identifier

Returns:
True if the model supports 1M context window
"""
compatible_models = [
"claude-sonnet-4-5",
"claude-4-sonnet",
"claude-sonnet-4",
]

# Check if model_id starts with or contains any compatible model identifier
model_lower = model_id.lower()
return any(compatible in model_lower for compatible in compatible_models)

def get_anthropic_models_from_api(
self, force_refresh: bool = False
) -> List[Dict[str, str]]:
"""
Retrieve available Anthropic models from the API.
Uses caching to reduce API calls.
Expand Down Expand Up @@ -69,35 +99,38 @@ def get_anthropic_models_from_api(self, force_refresh: bool = False) -> List[Dic
"anthropic-version": "2023-06-01",
"content-type": "application/json",
}

response = requests.get(
"https://api.anthropic.com/v1/models",
headers=headers,
timeout=10
"https://api.anthropic.com/v1/models", headers=headers, timeout=10
)

if response.status_code != 200:
raise Exception(f"HTTP Error {response.status_code}: {response.text}")

data = response.json()
models = []

for model in data.get("data", []):
models.append({
"id": model["id"],
"name": model.get("display_name", model["id"]),
})

models.append(
{
"id": model["id"],
"name": model.get("display_name", model["id"]),
}
)

# Update cache
self._model_cache = models
self._model_cache_time = current_time

return models

except Exception as e:
print(f"Error fetching Anthropic models: {e}")
return [
{"id": "error", "name": f"Could not fetch models from Anthropic: {str(e)}"}
{
"id": "error",
"name": f"Could not fetch models from Anthropic: {str(e)}",
}
]

def get_anthropic_models(self) -> List[Dict[str, str]]:
Expand Down Expand Up @@ -199,6 +232,12 @@ def pipe(self, body: dict) -> Union[str, Generator, Iterator]:
"content-type": "application/json",
}

# Add beta header for 1M context window if enabled and using compatible model
model_id = body["model"][body["model"].find(".") + 1 :]
if self.valves.ENABLE_1M_CONTEXT and self._is_1m_compatible_model(model_id):
headers["anthropic-beta"] = "context-1m-2025-08-07"
print(f"Using 1M context window for model: {model_id}")

url = "https://api.anthropic.com/v1/messages"

try:
Expand Down