Skip to content

Commit 5899ec2

Browse files
sameelarifkamath
andauthored
Pass LLM logger in createChatCompletion (#385)
* migrate to new options syntax * fix * input logger in extract/observe * changeset * patch -> minor * import and override stagehandconfig --------- Co-authored-by: Anirudh Kamath <[email protected]>
1 parent 30e7d09 commit 5899ec2

File tree

11 files changed

+218
-225
lines changed

11 files changed

+218
-225
lines changed

Diff for: .changeset/soft-snails-lick.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@browserbasehq/stagehand": minor
3+
---
4+
5+
Moved the LLMClient logger paremeter to the createChatCompletion method options.

Diff for: examples/external_client.ts

+8-13
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,15 @@
1-
import { type ConstructorParams, Stagehand } from "../lib";
1+
import { Stagehand } from "../lib";
22
import { z } from "zod";
33
import { OllamaClient } from "./external_clients/ollama";
4-
5-
const StagehandConfig: ConstructorParams = {
6-
env: "BROWSERBASE",
7-
apiKey: process.env.BROWSERBASE_API_KEY,
8-
projectId: process.env.BROWSERBASE_PROJECT_ID,
9-
verbose: 1,
10-
llmClient: new OllamaClient({
11-
modelName: "llama3.2",
12-
}),
13-
debugDom: true,
14-
};
4+
import StagehandConfig from "./stagehand.config";
155

166
async function example() {
17-
const stagehand = new Stagehand(StagehandConfig);
7+
const stagehand = new Stagehand({
8+
...StagehandConfig,
9+
llmClient: new OllamaClient({
10+
modelName: "llama3.2",
11+
}),
12+
});
1813

1914
await stagehand.init();
2015
await stagehand.page.goto("https://news.ycombinator.com");

Diff for: examples/external_clients/ollama.ts

+23-23
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,5 @@
11
import OpenAI, { type ClientOptions } from "openai";
22
import { zodResponseFormat } from "openai/helpers/zod";
3-
import type { LLMCache } from "../../lib/cache/LLMCache";
4-
import { validateZodSchema } from "../../lib/utils";
5-
import {
6-
type ChatCompletionOptions,
7-
type ChatMessage,
8-
LLMClient,
9-
} from "../../lib/llm/LLMClient";
10-
import type { LogLine } from "../../types/log";
11-
import type { AvailableModel } from "../../types/model";
123
import type {
134
ChatCompletion,
145
ChatCompletionAssistantMessageParam,
@@ -19,23 +10,28 @@ import type {
1910
ChatCompletionSystemMessageParam,
2011
ChatCompletionUserMessageParam,
2112
} from "openai/resources/chat";
13+
import type { LLMCache } from "../../lib/cache/LLMCache";
14+
import {
15+
type ChatMessage,
16+
CreateChatCompletionOptions,
17+
LLMClient,
18+
} from "../../lib/llm/LLMClient";
19+
import { validateZodSchema } from "../../lib/utils";
20+
import type { AvailableModel } from "../../types/model";
2221

2322
export class OllamaClient extends LLMClient {
2423
public type = "ollama" as const;
2524
private client: OpenAI;
2625
private cache: LLMCache | undefined;
27-
public logger: (message: LogLine) => void;
2826
private enableCaching: boolean;
2927
public clientOptions: ClientOptions;
3028

3129
constructor({
32-
logger,
3330
enableCaching = false,
3431
cache = undefined,
3532
modelName = "llama3.2",
3633
clientOptions,
3734
}: {
38-
logger?: (message: LogLine) => void;
3935
enableCaching?: boolean;
4036
cache?: LLMCache;
4137
modelName?: string;
@@ -47,16 +43,16 @@ export class OllamaClient extends LLMClient {
4743
baseURL: clientOptions?.baseURL || "http://localhost:11434/v1",
4844
apiKey: "ollama",
4945
});
50-
this.logger = logger;
5146
this.cache = cache;
5247
this.enableCaching = enableCaching;
5348
this.modelName = modelName as AvailableModel;
5449
}
5550

56-
async createChatCompletion<T = ChatCompletion>(
57-
options: ChatCompletionOptions,
51+
async createChatCompletion<T = ChatCompletion>({
52+
options,
5853
retries = 3,
59-
): Promise<T> {
54+
logger,
55+
}: CreateChatCompletionOptions): Promise<T> {
6056
const { image, requestId, ...optionsWithoutImageAndRequestId } = options;
6157

6258
// TODO: Implement vision support
@@ -66,7 +62,7 @@ export class OllamaClient extends LLMClient {
6662
);
6763
}
6864

69-
this.logger({
65+
logger({
7066
category: "ollama",
7167
message: "creating chat completion",
7268
level: 1,
@@ -122,7 +118,7 @@ export class OllamaClient extends LLMClient {
122118
);
123119

124120
if (cachedResponse) {
125-
this.logger({
121+
logger({
126122
category: "llm_cache",
127123
message: "LLM cache hit - returning cached response",
128124
level: 1,
@@ -140,7 +136,7 @@ export class OllamaClient extends LLMClient {
140136
return cachedResponse;
141137
}
142138

143-
this.logger({
139+
logger({
144140
category: "llm_cache",
145141
message: "LLM cache miss - no cached response found",
146142
level: 1,
@@ -168,7 +164,7 @@ export class OllamaClient extends LLMClient {
168164
model: this.modelName,
169165
};
170166

171-
this.logger({
167+
logger({
172168
category: "ollama",
173169
message: "creating chat completion",
174170
level: 1,
@@ -257,7 +253,7 @@ export class OllamaClient extends LLMClient {
257253

258254
const response = await this.client.chat.completions.create(body);
259255

260-
this.logger({
256+
logger({
261257
category: "ollama",
262258
message: "response",
263259
level: 1,
@@ -279,7 +275,11 @@ export class OllamaClient extends LLMClient {
279275

280276
if (!validateZodSchema(options.response_model.schema, parsedData)) {
281277
if (retries > 0) {
282-
return this.createChatCompletion(options, retries - 1);
278+
return this.createChatCompletion({
279+
options,
280+
logger,
281+
retries: retries - 1,
282+
});
283283
}
284284

285285
throw new Error("Invalid response schema");
@@ -299,7 +299,7 @@ export class OllamaClient extends LLMClient {
299299
}
300300

301301
if (this.enableCaching) {
302-
this.logger({
302+
logger({
303303
category: "llm_cache",
304304
message: "caching response",
305305
level: 1,

Diff for: lib/handlers/extractHandler.ts

+2
Original file line numberDiff line numberDiff line change
@@ -306,6 +306,7 @@ export class StagehandExtractHandler {
306306
chunksTotal: 1,
307307
llmClient,
308308
requestId,
309+
logger: this.logger,
309310
});
310311

311312
const {
@@ -434,6 +435,7 @@ export class StagehandExtractHandler {
434435
chunksTotal: chunks.length,
435436
requestId,
436437
isUsingTextExtract: false,
438+
logger: this.logger,
437439
});
438440

439441
const {

Diff for: lib/handlers/observeHandler.ts

+1
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,7 @@ export class StagehandObserveHandler {
120120
llmClient,
121121
image: annotatedScreenshot,
122122
requestId,
123+
logger: this.logger,
123124
});
124125

125126
const elementsWithSelectors = observationResponse.elements.map(

Diff for: lib/index.ts

-3
Original file line numberDiff line numberDiff line change
@@ -372,9 +372,6 @@ export class Stagehand {
372372
this.llmClient = undefined;
373373
}
374374
}
375-
if (this.llmClient && !this.llmClient.logger) {
376-
this.llmClient.logger = this.logger;
377-
}
378375
this.domSettleTimeoutMs = domSettleTimeoutMs ?? 30_000;
379376
this.headless = headless ?? false;
380377
this.browserbaseSessionCreateParams = browserbaseSessionCreateParams;

0 commit comments

Comments
 (0)