Skip to content

Commit 75c0e20

Browse files
kamatharihanv
andauthored
update: LLMClient default inherits logger from Stagehand (#366) (#367)
* update: LLMClient default inherits logger from Stagehand * update: add changeset * update: export LLMClient Co-authored-by: Arihan Varanasi <[email protected]>
1 parent a77efcc commit 75c0e20

File tree

7 files changed

+61
-36
lines changed

7 files changed

+61
-36
lines changed

Diff for: .changeset/mean-swans-fix.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@browserbasehq/stagehand": minor
3+
---
4+
5+
Logger in LLMClient is inherited by default from Stagehand. Named rather than positional arguments are used in implemented LLMClients.

Diff for: examples/external_client.ts

+4-8
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { type ConstructorParams, type LogLine, Stagehand } from "../lib";
1+
import { type ConstructorParams, Stagehand } from "../lib";
22
import { z } from "zod";
33
import { OllamaClient } from "./external_clients/ollama";
44

@@ -7,13 +7,9 @@ const StagehandConfig: ConstructorParams = {
77
apiKey: process.env.BROWSERBASE_API_KEY,
88
projectId: process.env.BROWSERBASE_PROJECT_ID,
99
verbose: 1,
10-
llmClient: new OllamaClient(
11-
(message: LogLine) =>
12-
console.log(`[stagehand::${message.category}] ${message.message}`),
13-
false,
14-
undefined,
15-
"llama3.2",
16-
),
10+
llmClient: new OllamaClient({
11+
modelName: "llama3.2",
12+
}),
1713
debugDom: true,
1814
};
1915

Diff for: examples/external_clients/ollama.ts

+12-6
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,19 @@ export class OllamaClient extends LLMClient {
2828
private enableCaching: boolean;
2929
public clientOptions: ClientOptions;
3030

31-
constructor(
32-
logger: (message: LogLine) => void,
31+
constructor({
32+
logger,
3333
enableCaching = false,
34-
cache: LLMCache | undefined,
35-
modelName: "llama3.2",
36-
clientOptions?: ClientOptions,
37-
) {
34+
cache = undefined,
35+
modelName = "llama3.2",
36+
clientOptions,
37+
}: {
38+
logger?: (message: LogLine) => void;
39+
enableCaching?: boolean;
40+
cache?: LLMCache;
41+
modelName?: string;
42+
clientOptions?: ClientOptions;
43+
}) {
3844
super(modelName as AvailableModel);
3945
this.client = new OpenAI({
4046
...clientOptions,

Diff for: lib/index.ts

+6
Original file line numberDiff line numberDiff line change
@@ -365,6 +365,11 @@ export class Stagehand {
365365
modelName ?? DEFAULT_MODEL_NAME,
366366
modelClientOptions,
367367
);
368+
369+
if (!this.llmClient.logger) {
370+
this.llmClient.logger = this.logger;
371+
}
372+
368373
this.domSettleTimeoutMs = domSettleTimeoutMs ?? 30_000;
369374
this.headless = headless ?? false;
370375
this.browserbaseSessionCreateParams = browserbaseSessionCreateParams;
@@ -614,3 +619,4 @@ export * from "../types/model";
614619
export * from "../types/playwright";
615620
export * from "../types/stagehand";
616621
export * from "../types/page";
622+
export { LLMClient } from "./llm/LLMClient";

Diff for: lib/llm/AnthropicClient.ts

+12-6
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,19 @@ export class AnthropicClient extends LLMClient {
1919
private enableCaching: boolean;
2020
public clientOptions: ClientOptions;
2121

22-
constructor(
23-
logger: (message: LogLine) => void,
22+
constructor({
23+
logger,
2424
enableCaching = false,
25-
cache: LLMCache | undefined,
26-
modelName: AvailableModel,
27-
clientOptions?: ClientOptions,
28-
) {
25+
cache,
26+
modelName,
27+
clientOptions,
28+
}: {
29+
logger: (message: LogLine) => void;
30+
enableCaching?: boolean;
31+
cache?: LLMCache;
32+
modelName: AvailableModel;
33+
clientOptions?: ClientOptions;
34+
}) {
2935
super(modelName);
3036
this.client = new Anthropic(clientOptions);
3137
this.logger = logger;

Diff for: lib/llm/LLMProvider.ts

+10-10
Original file line numberDiff line numberDiff line change
@@ -61,21 +61,21 @@ export class LLMProvider {
6161

6262
switch (provider) {
6363
case "openai":
64-
return new OpenAIClient(
65-
this.logger,
66-
this.enableCaching,
67-
this.cache,
64+
return new OpenAIClient({
65+
logger: this.logger,
66+
enableCaching: this.enableCaching,
67+
cache: this.cache,
6868
modelName,
6969
clientOptions,
70-
);
70+
});
7171
case "anthropic":
72-
return new AnthropicClient(
73-
this.logger,
74-
this.enableCaching,
75-
this.cache,
72+
return new AnthropicClient({
73+
logger: this.logger,
74+
enableCaching: this.enableCaching,
75+
cache: this.cache,
7676
modelName,
7777
clientOptions,
78-
);
78+
});
7979
default:
8080
throw new Error(`Unsupported provider: ${provider}`);
8181
}

Diff for: lib/llm/OpenAIClient.ts

+12-6
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,19 @@ export class OpenAIClient extends LLMClient {
2929
private enableCaching: boolean;
3030
public clientOptions: ClientOptions;
3131

32-
constructor(
33-
logger: (message: LogLine) => void,
32+
constructor({
33+
logger,
3434
enableCaching = false,
35-
cache: LLMCache | undefined,
36-
modelName: AvailableModel,
37-
clientOptions?: ClientOptions,
38-
) {
35+
cache,
36+
modelName,
37+
clientOptions,
38+
}: {
39+
logger: (message: LogLine) => void;
40+
enableCaching?: boolean;
41+
cache?: LLMCache;
42+
modelName: AvailableModel;
43+
clientOptions?: ClientOptions;
44+
}) {
3945
super(modelName);
4046
this.clientOptions = clientOptions;
4147
this.client = new OpenAI(clientOptions);

0 commit comments

Comments
 (0)