Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
00aae08
add new flow logger
pirate Nov 18, 2025
9c7d138
hide unused extract args
pirate Nov 18, 2025
06472d6
fix the lint errors
pirate Nov 18, 2025
9d06f31
fix unused label var
pirate Nov 18, 2025
b788e4d
Write flow logs to configurable file paths instead of stdout
github-actions[bot] Dec 4, 2025
b289fa8
simplify flow logging
pirate Dec 4, 2025
6b632cc
improve flow logging timestamp prefixes
pirate Dec 4, 2025
aa02157
more accurate flow logging context var tracking
pirate Dec 5, 2025
d69fe15
fix id scoping for spans where no task is running, use uuidv7 for eve…
pirate Dec 5, 2025
121f463
move logging and formatting into flowLogger.ts and add logging for CU…
pirate Dec 5, 2025
cc03642
refactor to use pino for logging
pirate Dec 6, 2025
b2af3fe
better prompr preview for CUA llm calls
pirate Dec 6, 2025
ccd0a96
code cleanups
pirate Dec 6, 2025
b651f85
make truncation consistent across all lines
pirate Dec 6, 2025
5f34408
disable logging by default unless BROWSERBASE_CONFIG_DIR is set
pirate Dec 6, 2025
f05434e
use decorator for cleaner logging, add skip arrows back
pirate Dec 6, 2025
bc1c239
fix no-op screenshot logging in v3cua
pirate Dec 6, 2025
9005489
fix imports from bad rebase
pirate Dec 6, 2025
9453a31
reduce diff in page.ts by using decorator
pirate Dec 8, 2025
86a5484
reduce diff in v3.ts by using decorator
pirate Dec 8, 2025
3b62ab2
add safety guards to prevent errors in main logic
pirate Dec 8, 2025
a7863f9
fix indentation diffs
pirate Dec 8, 2025
4cb2a69
fix lint issues
pirate Dec 8, 2025
081c201
remove uneeded pkg
pirate Dec 8, 2025
1a4ba75
also log llm response even in the case of parsing error
pirate Dec 8, 2025
2f47d8a
log errors in the case of llm response parsing issue
pirate Dec 8, 2025
558aef1
fix missing SessionFileLogger.logAgentTaskCompleted in agent stream mode
pirate Dec 8, 2025
d8cd98b
bump lockfiles
pirate Dec 8, 2025
74f4567
dont modify root lockfiles
pirate Dec 8, 2025
705b957
comment why Page.setViewportSize is disabled
pirate Dec 8, 2025
23aa321
bump lockfiles
pirate Dec 8, 2025
81621b1
Update packages/core/lib/v3/flowLogger.ts
pirate Dec 9, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ screenshot.png
.env
downloads/
dist/
.browserbase/
packages/evals/**/public
packages/core/lib/dom/build/
packages/core/lib/v3/dom/build/
Expand Down
25 changes: 25 additions & 0 deletions packages/core/lib/v3/agent/AnthropicCUAClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ import { mapKeyToPlaywright } from "./utils/cuaKeyMapping";
import { compressConversationImages } from "./utils/imageCompression";
import { toJsonSchema } from "../zodCompat";
import type { StagehandZodSchema } from "../zodCompat";
import {
SessionFileLogger,
formatCuaPromptPreview,
formatCuaResponsePreview,
} from "../flowLogger";
import { v7 as uuidv7 } from "uuid";

export type ResponseInputItem = AnthropicMessage | AnthropicToolResult;

Expand Down Expand Up @@ -481,6 +487,15 @@ export class AnthropicCUAClient extends AgentClient {
requestParams.thinking = thinking;
}

// Log LLM request
const llmRequestId = uuidv7();
SessionFileLogger.logLlmRequest({
requestId: llmRequestId,
model: this.modelName,
operation: "CUA.getAction",
prompt: formatCuaPromptPreview(messages),
});

const startTime = Date.now();
// Create the message using the Anthropic Messages API
// @ts-expect-error - The Anthropic SDK types are stricter than what we need
Expand All @@ -493,6 +508,16 @@ export class AnthropicCUAClient extends AgentClient {
inference_time_ms: elapsedMs,
};

// Log LLM response
SessionFileLogger.logLlmResponse({
requestId: llmRequestId,
model: this.modelName,
operation: "CUA.getAction",
output: formatCuaResponsePreview(response.content),
inputTokens: response.usage.input_tokens,
outputTokens: response.usage.output_tokens,
});

// Store the message ID for future use
this.lastMessageId = response.id;

Expand Down
25 changes: 25 additions & 0 deletions packages/core/lib/v3/agent/GoogleCUAClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@ import {
convertToolSetToFunctionDeclarations,
} from "./utils/googleCustomToolHandler";
import { ToolSet } from "ai";
import {
SessionFileLogger,
formatCuaPromptPreview,
formatCuaResponsePreview,
} from "../flowLogger";
import { v7 as uuidv7 } from "uuid";

/**
* Client for Google's Computer Use Assistant API
Expand Down Expand Up @@ -300,6 +306,15 @@ export class GoogleCUAClient extends AgentClient {
let lastError: Error | null = null;
let response: GenerateContentResponse | null = null;

// Log LLM request
const llmRequestId = uuidv7();
SessionFileLogger.logLlmRequest({
requestId: llmRequestId,
model: this.modelName,
operation: "CUA.generateContent",
prompt: formatCuaPromptPreview(compressedHistory),
});

for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
// Add exponential backoff delay for retries
Expand Down Expand Up @@ -357,6 +372,16 @@ export class GoogleCUAClient extends AgentClient {
const elapsedMs = endTime - startTime;
const { usageMetadata } = response;

// Log LLM response
SessionFileLogger.logLlmResponse({
requestId: llmRequestId,
model: this.modelName,
operation: "CUA.generateContent",
output: formatCuaResponsePreview(response),
inputTokens: usageMetadata?.promptTokenCount,
outputTokens: usageMetadata?.candidatesTokenCount,
});

// Process the response
const result = await this.processResponse(response, logger);

Expand Down
25 changes: 25 additions & 0 deletions packages/core/lib/v3/agent/OpenAICUAClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@ import { ClientOptions } from "../types/public/model";
import { AgentClient } from "./AgentClient";
import { AgentScreenshotProviderError } from "../types/public/sdkErrors";
import { ToolSet } from "ai";
import {
SessionFileLogger,
formatCuaPromptPreview,
formatCuaResponsePreview,
} from "../flowLogger";
import { v7 as uuidv7 } from "uuid";

/**
* Client for OpenAI's Computer Use Assistant API
Expand Down Expand Up @@ -409,6 +415,15 @@ export class OpenAICUAClient extends AgentClient {
requestParams.previous_response_id = previousResponseId;
}

// Log LLM request
const llmRequestId = uuidv7();
SessionFileLogger.logLlmRequest({
requestId: llmRequestId,
model: this.modelName,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

note for the future, we might want to keep track of other params (non-sensitive) in model based on the ModelConfiguration type

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

note for compactness we can also store the default model config in sessions/{id}/session.json dir, and then only add extra to the individual loglines if they are different from the session-wide defaults.

operation: "CUA.getAction",
prompt: formatCuaPromptPreview(inputItems),
});

const startTime = Date.now();
// Create the response using the OpenAI Responses API
// @ts-expect-error - Force type to match what the OpenAI SDK expects
Expand All @@ -423,6 +438,16 @@ export class OpenAICUAClient extends AgentClient {
inference_time_ms: elapsedMs,
};

// Log LLM response
SessionFileLogger.logLlmResponse({
requestId: llmRequestId,
model: this.modelName,
operation: "CUA.getAction",
output: formatCuaResponsePreview(response.output),
inputTokens: response.usage.input_tokens,
outputTokens: response.usage.output_tokens,
});

// Store the response ID for future use
this.lastResponseId = response.id;

Expand Down
Loading