Skip to content

Commit

Permalink
throw error all the way up
Browse files Browse the repository at this point in the history
  • Loading branch information
brnaba-aws committed Oct 15, 2024
1 parent e6e51c2 commit b5814a9
Show file tree
Hide file tree
Showing 2 changed files with 138 additions and 126 deletions.
156 changes: 81 additions & 75 deletions typescript/src/agents/bedrockLLMAgent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,81 +146,87 @@ export class BedrockLLMAgent extends Agent {
chatHistory: ConversationMessage[],
additionalParams?: Record<string, string>
): Promise<ConversationMessage | AsyncIterable<any>> {
// Construct the user's message based on the provided inputText
const userMessage: ConversationMessage = {
role: ParticipantRole.USER,
content: [{ text: `${inputText}` }],
};

// Combine the existing chat history with the user's message
const conversation: ConversationMessage[] = [
...chatHistory,
userMessage,
];

this.updateSystemPrompt();
try {

let systemPrompt = this.systemPrompt;
// Construct the user's message based on the provided inputText
const userMessage: ConversationMessage = {
role: ParticipantRole.USER,
content: [{ text: `${inputText}` }],
};

// Combine the existing chat history with the user's message
const conversation: ConversationMessage[] = [
...chatHistory,
userMessage,
];

this.updateSystemPrompt();

let systemPrompt = this.systemPrompt;

// Update the system prompt with the latest history, agent descriptions, and custom variables
if (this.retriever) {
// retrieve from Vector store
const response = await this.retriever.retrieveAndCombineResults(inputText);
const contextPrompt =
"\nHere is the context to use to answer the user's question:\n" +
response;
systemPrompt = systemPrompt + contextPrompt;
}

// Update the system prompt with the latest history, agent descriptions, and custom variables
if (this.retriever) {
// retrieve from Vector store
const response = await this.retriever.retrieveAndCombineResults(inputText);
const contextPrompt =
"\nHere is the context to use to answer the user's question:\n" +
response;
systemPrompt = systemPrompt + contextPrompt;
}
// Prepare the command to converse with the Bedrock API
const converseCmd = {
modelId: this.modelId,
messages: conversation, //Include the updated conversation history
system: [{ text: systemPrompt }],
inferenceConfig: {
maxTokens: this.inferenceConfig.maxTokens,
temperature: this.inferenceConfig.temperature,
topP: this.inferenceConfig.topP,
stopSequences: this.inferenceConfig.stopSequences,
},
guardrailConfig: this.guardrailConfig? this.guardrailConfig:undefined,
toolConfig: (this.toolConfig ? { tools:this.toolConfig.tool}:undefined)
};

if (this.streaming){
return this.handleStreamingResponse(converseCmd);
} else {
let continueWithTools = false;
let finalMessage:ConversationMessage = { role: ParticipantRole.USER, content:[]};
let maxRecursions = this.toolConfig?.toolMaxRecursions || this.defaultMaxRecursions;

do{
// send the conversation to Amazon Bedrock
const bedrockResponse = await this.handleSingleResponse(converseCmd);

// Append the model's response to the ongoing conversation
conversation.push(bedrockResponse);

// process model response
if (bedrockResponse?.content?.some((content) => 'toolUse' in content)){
// forward everything to the tool use handler
if (!this.toolConfig){
throw new Error("Tool config is not defined");
}
const toolResponse = await this.toolConfig.useToolHandler(bedrockResponse, conversation);
continueWithTools = true;
converseCmd.messages.push(toolResponse);
}
else {
continueWithTools = false;
finalMessage = bedrockResponse;
}
maxRecursions--;

// Prepare the command to converse with the Bedrock API
const converseCmd = {
modelId: this.modelId,
messages: conversation, //Include the updated conversation history
system: [{ text: systemPrompt }],
inferenceConfig: {
maxTokens: this.inferenceConfig.maxTokens,
temperature: this.inferenceConfig.temperature,
topP: this.inferenceConfig.topP,
stopSequences: this.inferenceConfig.stopSequences,
},
guardrailConfig: this.guardrailConfig? this.guardrailConfig:undefined,
toolConfig: (this.toolConfig ? { tools:this.toolConfig.tool}:undefined)
};
converseCmd.messages = conversation;

if (this.streaming){
return this.handleStreamingResponse(converseCmd);
} else {
let continueWithTools = false;
let finalMessage:ConversationMessage = { role: ParticipantRole.USER, content:[]};
let maxRecursions = this.toolConfig?.toolMaxRecursions || this.defaultMaxRecursions;

do{
// send the conversation to Amazon Bedrock
const bedrockResponse = await this.handleSingleResponse(converseCmd);

// Append the model's response to the ongoing conversation
conversation.push(bedrockResponse);

// process model response
if (bedrockResponse?.content?.some((content) => 'toolUse' in content)){
// forward everything to the tool use handler
if (!this.toolConfig){
throw new Error("Tool config is not defined");
}
const toolResponse = await this.toolConfig.useToolHandler(bedrockResponse, conversation);
continueWithTools = true;
converseCmd.messages.push(toolResponse);
}
else {
continueWithTools = false;
finalMessage = bedrockResponse;
}
maxRecursions--;

converseCmd.messages = conversation;

}while (continueWithTools && maxRecursions > 0)
return finalMessage;
}while (continueWithTools && maxRecursions > 0)
return finalMessage;
}
} catch (error) {
Logger.logger.error("Error processing request:", error.message);
throw `Error processing request: ${error.message}`;
}
}

Expand All @@ -234,8 +240,8 @@ export class BedrockLLMAgent extends Agent {
}
return response.output.message as ConversationMessage;
} catch (error) {
Logger.logger.error("Error invoking Bedrock model:", error);
throw error;
Logger.logger.error("Error invoking Bedrock model:", error.message);
throw `Error invoking Bedrock model: ${error.message}`;
}
}

Expand Down Expand Up @@ -273,8 +279,8 @@ export class BedrockLLMAgent extends Agent {
}
} while (toolUse && --recursions > 0)
} catch (error) {
Logger.logger.error("Error getting stream from Bedrock model:", error);
throw error;
Logger.logger.error("Error getting stream from Bedrock model:", error.message);
throw `Error getting stream from Bedrock model: ${error.message}`;
}
}

Expand Down
108 changes: 57 additions & 51 deletions typescript/src/orchestrator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -290,49 +290,54 @@ export class MultiAgentOrchestrator {
additionalParams = {},
} = params;

if (!classifierResult.selectedAgent) {
return "I'm sorry, but I need more information to understand your request. Could you please be more specific?";
} else {
const { selectedAgent } = classifierResult;
const agentChatHistory = await this.storage.fetchChat(
userId,
sessionId,
selectedAgent.id
);
try {
if (!classifierResult.selectedAgent) {
return "I'm sorry, but I need more information to understand your request. Could you please be more specific?";
} else {
const { selectedAgent } = classifierResult;
const agentChatHistory = await this.storage.fetchChat(
userId,
sessionId,
selectedAgent.id
);

this.logger.printChatHistory(agentChatHistory, selectedAgent.id);
this.logger.printChatHistory(agentChatHistory, selectedAgent.id);

this.logger.info(
`Routing intent "${userInput}" to ${selectedAgent.id} ...`
);
this.logger.info(
`Routing intent "${userInput}" to ${selectedAgent.id} ...`
);

const response = await this.measureExecutionTime(
`Agent ${selectedAgent.name} | Processing request`,
() =>
selectedAgent.processRequest(
userInput,
userId,
sessionId,
agentChatHistory,
additionalParams
)
);
const response = await this.measureExecutionTime(
`Agent ${selectedAgent.name} | Processing request`,
() =>
selectedAgent.processRequest(
userInput,
userId,
sessionId,
agentChatHistory,
additionalParams
)
);

//if (this.isStream(response)) {
if (this.isAsyncIterable(response)) {
return response;
}
//if (this.isStream(response)) {
if (this.isAsyncIterable(response)) {
return response;
}

let responseText = "No response content";
if (
response.content &&
response.content.length > 0 &&
response.content[0].text
) {
responseText = response.content[0].text;
}
let responseText = "No response content";
if (
response.content &&
response.content.length > 0 &&
response.content[0].text
) {
responseText = response.content[0].text;
}

return responseText;
return responseText;
}
} catch (error) {
this.logger.error("Error during agent dispatch:", error);
throw error;
}
}

Expand Down Expand Up @@ -362,21 +367,21 @@ export class MultiAgentOrchestrator {
};
}

// Handle case where no agent was selected
if (!classifierResult.selectedAgent) {
if (this.config.USE_DEFAULT_AGENT_IF_NONE_IDENTIFIED) {
classifierResult = this.getFallbackResult();
this.logger.info("Using default agent as no agent was selected");
} else {
return {
metadata: this.createMetadata(classifierResult, userInput, userId, sessionId, additionalParams),
output: this.config.NO_SELECTED_AGENT_MESSAGE!,
streaming: false,
};
try {
// Handle case where no agent was selected
if (!classifierResult.selectedAgent) {
if (this.config.USE_DEFAULT_AGENT_IF_NONE_IDENTIFIED) {
classifierResult = this.getFallbackResult();
this.logger.info("Using default agent as no agent was selected");
} else {
return {
metadata: this.createMetadata(classifierResult, userInput, userId, sessionId, additionalParams),
output: this.config.NO_SELECTED_AGENT_MESSAGE!,
streaming: false,
};
}
}
}

try {
const agentResponse = await this.dispatchToAgent({
userInput,
userId,
Expand Down Expand Up @@ -425,6 +430,7 @@ export class MultiAgentOrchestrator {
};
} catch (error) {
this.logger.error("Error during agent dispatch or processing:", error);

return {
metadata: this.createMetadata(classifierResult, userInput, userId, sessionId, additionalParams),
output: this.config.GENERAL_ROUTING_ERROR_MSG_MESSAGE ? this.config.GENERAL_ROUTING_ERROR_MSG_MESSAGE: String(error),
Expand Down Expand Up @@ -482,7 +488,7 @@ export class MultiAgentOrchestrator {
}
} catch (error) {
this.logger.error("Error processing stream:", error);

accumulatorTransform.end();
if (error instanceof Error) {
accumulatorTransform.destroy(error);
} else if (typeof error === "string") {
Expand Down

0 comments on commit b5814a9

Please sign in to comment.