Skip to content

Commit 16837ec

Browse files
authored
Anirudh/add o3 (#452)
* add o3 * changeset * fix * reset example * remove modelswithvision * rm o3 from ci evals
1 parent d088a46 commit 16837ec

File tree

7 files changed

+17
-17
lines changed

7 files changed

+17
-17
lines changed

Diff for: .changeset/dull-buckets-cross.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@browserbasehq/stagehand": minor
3+
---
4+
5+
add o3-mini to availablemodel

Diff for: examples/example.ts

+5-1
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,12 @@ import { Stagehand } from "@/dist";
99
import StagehandConfig from "@/stagehand.config";
1010

1111
async function example() {
12-
const stagehand = new Stagehand(StagehandConfig);
12+
const stagehand = new Stagehand({
13+
...StagehandConfig,
14+
modelName: "o3-mini",
15+
});
1316
await stagehand.init();
17+
await stagehand.page.goto("https://www.google.com");
1418
}
1519

1620
(async () => {

Diff for: lib/handlers/actHandler.ts

+2-3
Original file line numberDiff line numberDiff line change
@@ -140,9 +140,8 @@ export class StagehandActHandler {
140140
// o1 is overkill for this task + this task uses a lot of tokens. So we switch it 4o
141141
let verifyLLmClient = llmClient;
142142
if (
143-
llmClient.modelName === "o1-mini" ||
144-
llmClient.modelName === "o1-preview" ||
145-
llmClient.modelName.startsWith("o1-")
143+
llmClient.modelName.startsWith("o1") ||
144+
llmClient.modelName.startsWith("o3")
146145
) {
147146
verifyLLmClient = this.llmProvider.getClient(
148147
"gpt-4o",

Diff for: lib/llm/LLMClient.ts

-10
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,6 @@ export interface ChatMessageTextContent {
2323
text: string;
2424
}
2525

26-
export const modelsWithVision: AvailableModel[] = [
27-
"gpt-4o",
28-
"gpt-4o-mini",
29-
"claude-3-5-sonnet-latest",
30-
"claude-3-5-sonnet-20240620",
31-
"claude-3-5-sonnet-20241022",
32-
"gpt-4o-2024-08-06",
33-
];
34-
3526
export const AnnotatedScreenshotText =
3627
"This is a screenshot of the current page state with the elements annotated on it. Each element id is annotated with a number to the top left of it. Duplicate annotations at the same location are under each other vertically.";
3728

@@ -98,7 +89,6 @@ export abstract class LLMClient {
9889

9990
constructor(modelName: AvailableModel, userProvidedInstructions?: string) {
10091
this.modelName = modelName;
101-
this.hasVision = modelsWithVision.includes(modelName);
10292
this.userProvidedInstructions = userProvidedInstructions;
10393
}
10494

Diff for: lib/llm/LLMProvider.ts

+1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ export class LLMProvider {
1616
"gpt-4o-2024-08-06": "openai",
1717
"o1-mini": "openai",
1818
"o1-preview": "openai",
19+
"o3-mini": "openai",
1920
"claude-3-5-sonnet-latest": "anthropic",
2021
"claude-3-5-sonnet-20240620": "anthropic",
2122
"claude-3-5-sonnet-20241022": "anthropic",

Diff for: lib/llm/OpenAIClient.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ export class OpenAIClient extends LLMClient {
5959
// O1 models do not support most of the options. So we override them.
6060
// For schema and tools, we add them as user messages.
6161
let isToolsOverridedForO1 = false;
62-
if (this.modelName === "o1-mini" || this.modelName === "o1-preview") {
62+
if (this.modelName.startsWith("o1") || this.modelName.startsWith("o3")) {
6363
/* eslint-disable */
6464
// Remove unsupported options
6565
let {
@@ -111,7 +111,7 @@ export class OpenAIClient extends LLMClient {
111111
}
112112
if (
113113
options.temperature &&
114-
(this.modelName === "o1-mini" || this.modelName === "o1-preview")
114+
(this.modelName.startsWith("o1") || this.modelName.startsWith("o3"))
115115
) {
116116
throw new Error("Temperature is not supported for o1 models");
117117
}
@@ -207,7 +207,7 @@ export class OpenAIClient extends LLMClient {
207207
let responseFormat = undefined;
208208
if (options.response_model) {
209209
// For O1 models, we need to add the schema as a user message.
210-
if (this.modelName === "o1-mini" || this.modelName === "o1-preview") {
210+
if (this.modelName.startsWith("o1") || this.modelName.startsWith("o3")) {
211211
try {
212212
const parsedSchema = JSON.stringify(
213213
zodToJsonSchema(options.response_model.schema),

Diff for: types/model.ts

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ export const AvailableModelSchema = z.enum([
1111
"claude-3-5-sonnet-20240620",
1212
"o1-mini",
1313
"o1-preview",
14+
"o3-mini",
1415
]);
1516

1617
export type AvailableModel = z.infer<typeof AvailableModelSchema>;

0 commit comments

Comments
 (0)