async function streamResponseWithTools(res, messages, threadId, tools, toolFunctions) {
try {
// First, send a thinking step
const thinkingStep = {
id: `step-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.run.step.delta',
thread_id: threadId,
model: 'agent-model',
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
step_details: {
type: 'thinking',
content: 'Analyzing the request and determining if tools are needed...'
}
}
}
]
};
res.write(`event: thread.run.step.delta\n`);
res.write(`data: ${JSON.stringify(thinkingStep)}\n\n`);
// Call the OpenAI API with streaming enabled
const stream = await openai.chat.completions.create({
model: "gpt-4.1-mini", // Using the same model as in processMessagesWithTools
messages: messages,
tools: tools,
tool_choice: "auto", // Let the LLM decide when to use tools
stream: true
});
let assistantMessage = { role: "assistant", content: "", tool_calls: [] };
let currentToolCall = null;
// Process the stream
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta;
// If there's content in the delta, add it to the assistant message
if (delta.content) {
assistantMessage.content += delta.content;
// Stream the content chunk
const messageDelta = {
id: `msg-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.message.delta',
thread_id: threadId,
model: chunk.model,
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
content: delta.content
}
}
]
};
res.write(`event: thread.message.delta\n`);
res.write(`data: ${JSON.stringify(messageDelta)}\n\n`);
}
// If there's a tool call in the delta, process it
if (delta.tool_calls && delta.tool_calls.length > 0) {
const toolCallDelta = delta.tool_calls[0];
// If this is a new tool call, initialize it
if (toolCallDelta.index === 0 && toolCallDelta.id) {
currentToolCall = {
id: toolCallDelta.id,
type: "function",
function: {
name: "",
arguments: ""
}
};
assistantMessage.tool_calls.push(currentToolCall);
}
// Update the current tool call with the delta
if (currentToolCall) {
if (toolCallDelta.function?.name) {
currentToolCall.function.name = toolCallDelta.function.name;
}
if (toolCallDelta.function?.arguments) {
currentToolCall.function.arguments += toolCallDelta.function.arguments;
}
}
}
// If this is the end of the completion, check for tool calls
if (chunk.choices[0]?.finish_reason === "tool_calls") {
// Stream a tool call step for each tool call
for (const toolCall of assistantMessage.tool_calls) {
const toolCallStep = {
id: `step-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.run.step.delta',
thread_id: threadId,
model: chunk.model,
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
step_details: {
type: 'tool_calls',
tool_calls: [
{
id: toolCall.id,
name: toolCall.function.name,
args: JSON.parse(toolCall.function.arguments)
}
]
}
}
}
]
};
res.write(`event: thread.run.step.delta\n`);
res.write(`data: ${JSON.stringify(toolCallStep)}\n\n`);
}
// Execute each tool call and stream the results
const updatedMessages = [...messages, assistantMessage];
for (const toolCall of assistantMessage.tool_calls) {
try {
const { id, function: { name, arguments: argsString } } = toolCall;
const args = JSON.parse(argsString);
// Execute the tool function
const result = await toolFunctions[name](args);
// Stream the tool response
const toolResponseStep = {
id: `step-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.run.step.delta',
thread_id: threadId,
model: chunk.model,
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
step_details: {
type: 'tool_response',
content: result,
name: name,
tool_call_id: id
}
}
}
]
};
res.write(`event: thread.run.step.delta\n`);
res.write(`data: ${JSON.stringify(toolResponseStep)}\n\n`);
// Add the tool result to the messages
updatedMessages.push({
role: "tool",
tool_call_id: id,
name: name,
content: result
});
} catch (error) {
console.error(`Error executing tool ${toolCall.function.name}:`, error);
// Stream an error response
const errorResponseStep = {
id: `step-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.run.step.delta',
thread_id: threadId,
model: chunk.model,
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
step_details: {
type: 'tool_response',
content: JSON.stringify({ error: error.message }),
name: toolCall.function.name,
tool_call_id: toolCall.id
}
}
}
]
};
res.write(`event: thread.run.step.delta\n`);
res.write(`data: ${JSON.stringify(errorResponseStep)}\n\n`);
// Add the error result to the messages
updatedMessages.push({
role: "tool",
tool_call_id: toolCall.id,
name: toolCall.function.name,
content: JSON.stringify({ error: error.message })
});
}
}
// Call the LLM again with the updated messages including tool results
const finalStream = await openai.chat.completions.create({
model: "gpt-4.1-mini", // Using the same model as in the first call
messages: updatedMessages,
stream: true
});
// Stream the final response
for await (const finalChunk of finalStream) {
if (finalChunk.choices[0]?.delta?.content) {
const messageDelta = {
id: `msg-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.message.delta',
thread_id: threadId,
model: finalChunk.model,
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
content: finalChunk.choices[0].delta.content
}
}
]
};
res.write(`event: thread.message.delta\n`);
res.write(`data: ${JSON.stringify(messageDelta)}\n\n`);
}
}
}
}
// End the stream
res.end();
} catch (error) {
console.error("Error in streamResponseWithTools:", error);
// Send an error message
const errorMessage = {
id: `error-${Math.random().toString(36).substring(2, 15)}`,
object: 'thread.message.delta',
thread_id: threadId,
model: 'agent-model',
created: Math.floor(Date.now() / 1000),
choices: [
{
delta: {
role: 'assistant',
content: `An error occurred: ${error.message}`
}
}
]
};
res.write(`event: thread.message.delta\n`);
res.write(`data: ${JSON.stringify(errorMessage)}\n\n`);
res.end();
}
}
// Helper function to split text into chunks
function splitIntoChunks(text, chunkSize = 10) {
const words = text.split(' ');
const chunks = [];
let currentChunk = [];
for (const word of words) {
currentChunk.push(word);
if (currentChunk.length >= chunkSize) {
chunks.push(currentChunk.join(' '));
currentChunk = [];
}
}
if (currentChunk.length > 0) {
chunks.push(currentChunk.join(' '));
}
return chunks;
}