Skip to content

Commit d94459d

Browse files
committed
refactor: remove fallbacks from Anthropic context window detection
1 parent a9fc083 commit d94459d

File tree

1 file changed

+25
-37
lines changed

1 file changed

+25
-37
lines changed

packages/agent/src/core/llm/providers/anthropic.ts

Lines changed: 25 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -12,21 +12,8 @@ import {
1212
ProviderOptions,
1313
} from '../types.js';
1414

15-
// Fallback model context window sizes for Anthropic models
16-
// Used only if models.list() call fails or returns incomplete data
17-
const ANTHROPIC_MODEL_LIMITS_FALLBACK: Record<string, number> = {
18-
default: 200000,
19-
'claude-3-7-sonnet-20250219': 200000,
20-
'claude-3-7-sonnet-latest': 200000,
21-
'claude-3-5-sonnet-20241022': 200000,
22-
'claude-3-5-sonnet-latest': 200000,
23-
'claude-3-haiku-20240307': 200000,
24-
'claude-3-opus-20240229': 200000,
25-
'claude-3-sonnet-20240229': 200000,
26-
'claude-2.1': 100000,
27-
'claude-2.0': 100000,
28-
'claude-instant-1.2': 100000,
29-
};
15+
// Cache for model context window sizes
16+
const modelContextWindowCache: Record<string, number> = {};
3017

3118
/**
3219
* Anthropic-specific options
@@ -97,9 +84,6 @@ function addCacheControlToMessages(
9784
});
9885
}
9986

100-
// Cache for model context window sizes
101-
const modelContextWindowCache: Record<string, number> = {};
102-
10387
function tokenUsageFromMessage(
10488
message: Anthropic.Message,
10589
model: string,
@@ -112,12 +96,15 @@ function tokenUsageFromMessage(
11296
usage.output = message.usage.output_tokens;
11397

11498
const totalTokens = usage.input + usage.output;
115-
// Use provided context window, or fallback to cached value, or use hardcoded fallback
116-
const maxTokens =
117-
contextWindow ||
118-
modelContextWindowCache[model] ||
119-
ANTHROPIC_MODEL_LIMITS_FALLBACK[model] ||
120-
ANTHROPIC_MODEL_LIMITS_FALLBACK.default;
99+
100+
// Use provided context window or fallback to cached value
101+
const maxTokens = contextWindow || modelContextWindowCache[model];
102+
103+
if (!maxTokens) {
104+
throw new Error(
105+
`Context window size not available for model: ${model}. Make sure to initialize the model properly.`,
106+
);
107+
}
121108

122109
return {
123110
usage,
@@ -155,26 +142,28 @@ export class AnthropicProvider implements LLMProvider {
155142

156143
// Initialize model context window detection
157144
// This is async but we don't need to await it here
158-
// If it fails, we'll fall back to hardcoded limits
145+
// If it fails, an error will be thrown when the model is used
159146
this.initializeModelContextWindow().catch((error) => {
160-
console.warn(
161-
`Failed to initialize model context window: ${error.message}`,
147+
console.error(
148+
`Failed to initialize model context window: ${error.message}. The model will not work until context window information is available.`,
162149
);
163150
});
164151
}
165152

166153
/**
167154
* Fetches the model context window size from the Anthropic API
168155
*
169-
* @returns The context window size if successfully fetched, otherwise undefined
156+
* @returns The context window size
157+
* @throws Error if the context window size cannot be determined
170158
*/
171-
private async initializeModelContextWindow(): Promise<number | undefined> {
159+
private async initializeModelContextWindow(): Promise<number> {
172160
try {
173161
const response = await this.client.models.list();
174162

175163
if (!response?.data || !Array.isArray(response.data)) {
176-
console.warn(`Invalid response from models.list() for ${this.model}`);
177-
return undefined;
164+
throw new Error(
165+
`Invalid response from models.list() for ${this.model}`,
166+
);
178167
}
179168

180169
// Try to find the exact model
@@ -208,15 +197,14 @@ export class AnthropicProvider implements LLMProvider {
208197
modelContextWindowCache[this.model] = contextWindow;
209198
return contextWindow;
210199
} else {
211-
console.warn(`No context window information found for ${this.model}`);
212-
return undefined;
200+
throw new Error(
201+
`No context window information found for model: ${this.model}`,
202+
);
213203
}
214204
} catch (error) {
215-
console.warn(
216-
`Failed to fetch model context window for ${this.model}: ${(error as Error).message}`,
205+
throw new Error(
206+
`Failed to determine context window size for model ${this.model}: ${(error as Error).message}`,
217207
);
218-
// Will fall back to hardcoded limits
219-
return undefined;
220208
}
221209
}
222210

0 commit comments

Comments
 (0)