chore: retain the source code from the underlying tools (#756)

This commit is contained in:
Pavel Feldman 2025-07-24 17:08:35 -07:00 committed by GitHub
parent ecfa10448b
commit e934d5e23e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 25 additions and 20 deletions

View File

@ -47,7 +47,7 @@ export interface LLMDelegate {
checkDoneToolCall(toolCall: LLMToolCall): string | null;
}
export async function runTask(delegate: LLMDelegate, client: Client, task: string, oneShot: boolean = false): Promise<string> {
export async function runTask(delegate: LLMDelegate, client: Client, task: string, oneShot: boolean = false): Promise<LLMMessage[]> {
const { tools } = await client.listTools();
const taskContent = oneShot ? `Perform following task: ${task}.` : `Perform following task: ${task}. Once the task is complete, call the "done" tool.`;
const conversation = delegate.createConversation(taskContent, tools, oneShot);
@ -60,10 +60,9 @@ export async function runTask(delegate: LLMDelegate, client: Client, task: strin
const toolResults: Array<{ toolCallId: string; content: string; isError?: boolean }> = [];
for (const toolCall of toolCalls) {
// Check if this is the "done" tool
const doneResult = delegate.checkDoneToolCall(toolCall);
if (doneResult !== null)
return doneResult;
return conversation.messages;
const { name, arguments: args, id } = toolCall;
try {
@ -100,10 +99,9 @@ export async function runTask(delegate: LLMDelegate, client: Client, task: strin
}
}
if (oneShot)
return toolResults.map(result => result.content).join('\n');
else
delegate.addToolResults(conversation, toolResults);
if (oneShot)
return conversation.messages;
}
throw new Error('Failed to perform step, max attempts reached');

View File

@ -44,10 +44,7 @@ export class ClaudeDelegate implements LLMDelegate {
description: 'Call this tool when the task is complete.',
inputSchema: {
type: 'object',
properties: {
result: { type: 'string', description: 'The result of the task.' },
},
required: ['result'],
properties: {},
},
});
}

View File

@ -44,10 +44,7 @@ export class OpenAIDelegate implements LLMDelegate {
description: 'Call this tool when the task is complete.',
inputSchema: {
type: 'object',
properties: {
result: { type: 'string', description: 'The result of the task.' },
},
required: ['result'],
properties: {},
},
});
}

View File

@ -49,10 +49,11 @@ async function run(delegate: LLMDelegate) {
await client.connect(transport);
await client.ping();
let lastResult: string | undefined;
for (const task of tasks)
lastResult = await runTask(delegate, client, task);
console.log(lastResult);
for (const task of tasks) {
const messages = await runTask(delegate, client, task);
for (const message of messages)
console.log(`${message.role}: ${message.content}`);
}
await client.close();
}

View File

@ -53,9 +53,21 @@ export class Context {
}
async runTask(task: string, oneShot: boolean = false): Promise<mcpServer.ToolResponse> {
const result = await runTask(this._delegate, this._client!, task, oneShot);
const messages = await runTask(this._delegate, this._client!, task, oneShot);
const lines: string[] = [];
// Skip the first message, which is the user's task.
for (const message of messages.slice(1)) {
// Trim out all page snapshots.
if (!message.content.trim())
continue;
const index = oneShot ? -1 : message.content.indexOf('### Page state');
const trimmedContent = index === -1 ? message.content : message.content.substring(0, index);
lines.push(`[${message.role}]:`, trimmedContent);
}
return {
content: [{ type: 'text', text: result }],
content: [{ type: 'text', text: lines.join('\n') }],
};
}