diff --git a/.env.sample b/.env.sample index 0757696f8..8cd7407af 100644 --- a/.env.sample +++ b/.env.sample @@ -27,7 +27,7 @@ AZURE_OPENAI_TOP_P=1.0 AZURE_OPENAI_MAX_TOKENS=1000 AZURE_OPENAI_STOP_SEQUENCE= AZURE_OPENAI_SYSTEM_MESSAGE=You are an AI assistant that helps people find information. -AZURE_OPENAI_API_VERSION=2023-07-01-preview +AZURE_OPENAI_API_VERSION=2023-12-01-preview AZURE_OPENAI_STREAM=True # Backend for processing the documents and application logging in the app AzureWebJobsStorage= diff --git a/code/app/app.py b/code/app/app.py index 5ec28fd4b..bfdf0da66 100644 --- a/code/app/app.py +++ b/code/app/app.py @@ -69,7 +69,7 @@ def get_config(): "You are an AI assistant that helps people find information.", ) AZURE_OPENAI_API_VERSION = os.environ.get( - "AZURE_OPENAI_API_VERSION", "2023-06-01-preview" + "AZURE_OPENAI_API_VERSION", "2023-12-01-preview" ) AZURE_OPENAI_STREAM = os.environ.get("AZURE_OPENAI_STREAM", "true") AZURE_OPENAI_MODEL_NAME = os.environ.get( @@ -144,9 +144,9 @@ def prepare_body_headers_with_data(request): chatgpt_url = f"https://{AZURE_OPENAI_RESOURCE}.openai.azure.com/openai/deployments/{AZURE_OPENAI_MODEL}" if is_chat_model(): - chatgpt_url += "/chat/completions?api-version=2023-03-15-preview" + chatgpt_url += "/chat/completions?api-version=2023-12-01-preview" else: - chatgpt_url += "/completions?api-version=2023-03-15-preview" + chatgpt_url += "/completions?api-version=2023-12-01-preview" headers = { "Content-Type": "application/json", @@ -243,7 +243,7 @@ def stream_without_data(response): def conversation_without_data(request): openai.api_type = "azure" openai.api_base = f"https://{AZURE_OPENAI_RESOURCE}.openai.azure.com/" - openai.api_version = "2023-03-15-preview" + openai.api_version = "2023-12-01-preview" openai.api_key = AZURE_OPENAI_KEY request_messages = request.json["messages"] diff --git a/code/batch/GetConversationResponse.py b/code/batch/GetConversationResponse.py new file mode 100644 index 000000000..c17d68808 --- /dev/null +++ b/code/batch/GetConversationResponse.py @@ -0,0 +1,58 @@ +import azure.functions as func +import logging +import json +import os +import sys +from utilities.helpers.OrchestratorHelper import Orchestrator + +sys.path.append("..") + +bp_get_conversation_response = func.Blueprint() + +@bp_get_conversation_response.route(route="GetConversationResponse") +def get_conversation_response(req: func.HttpRequest) -> func.HttpResponse: + logging.info("Python HTTP trigger function processed a request.") + + + message_orchestrator = Orchestrator() + + try: + req_body = req.get_json() + user_message = req_body["messages"][-1]["content"] + conversation_id = req_body["conversation_id"] + user_assistant_messages = list( + filter( + lambda x: x["role"] in ("user", "assistant"), req_body["messages"][0:-1] + ) + ) + chat_history = [] + for i, k in enumerate(user_assistant_messages): + if i % 2 == 0: + chat_history.append( + ( + user_assistant_messages[i]["content"], + user_assistant_messages[i + 1]["content"], + ) + ) + from utilities.helpers.ConfigHelper import ConfigHelper + + messages = message_orchestrator.handle_message( + user_message=user_message, + chat_history=chat_history, + conversation_id=conversation_id, + orchestrator=ConfigHelper.get_active_config_or_default().orchestrator, + ) + + response_obj = { + "id": "response.id", + "model": os.getenv("AZURE_OPENAI_MODEL"), + "created": "response.created", + "object": "response.object", + "choices": [{"messages": messages}], + } + + return func.HttpResponse(json.dumps(response_obj), status_code=200) + + except Exception as e: + logging.exception("Exception in /api/GetConversationResponse") + return func.HttpResponse(json.dumps({"error": str(e)}), status_code=500) \ No newline at end of file diff --git a/code/batch/function_app.py b/code/batch/function_app.py index fb8e84ce9..3e3d26e3c 100644 --- a/code/batch/function_app.py +++ b/code/batch/function_app.py @@ -2,6 +2,7 @@ from AddURLEmbeddings import bp_add_url_embeddings from BatchPushResults import bp_batch_push_results from BatchStartProcessing import bp_batch_start_processing +from GetConversationResponse import bp_get_conversation_response app = func.FunctionApp( http_auth_level=func.AuthLevel.FUNCTION @@ -9,3 +10,4 @@ app.register_functions(bp_add_url_embeddings) app.register_functions(bp_batch_push_results) app.register_functions(bp_batch_start_processing) +app.register_functions(bp_get_conversation_response) diff --git a/code/requirements.txt b/code/requirements.txt index 03703c69f..f88a9ea97 100644 --- a/code/requirements.txt +++ b/code/requirements.txt @@ -16,7 +16,7 @@ beautifulsoup4==4.12.3 fake-useragent==1.4.0 chardet==5.2.0 --extra-index-url https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/ -azure-search-documents==11.4.0b8 +azure-search-documents==11.4.0 opencensus-ext-azure==1.1.13 pandas==2.2.0 python-docx==1.1.0 diff --git a/docs/LOCAL_DEPLOYMENT.md b/docs/LOCAL_DEPLOYMENT.md index 1a8d653bc..93b3807fb 100644 --- a/docs/LOCAL_DEPLOYMENT.md +++ b/docs/LOCAL_DEPLOYMENT.md @@ -187,7 +187,7 @@ docker push YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE |AZURE_OPENAI_MAX_TOKENS|1000|The maximum number of tokens allowed for the generated answer.| |AZURE_OPENAI_STOP_SEQUENCE||Up to 4 sequences where the API will stop generating further tokens. Represent these as a string joined with "|", e.g. `"stop1|stop2|stop3"`| |AZURE_OPENAI_SYSTEM_MESSAGE|You are an AI assistant that helps people find information.|A brief description of the role and tone the model should use| -|AZURE_OPENAI_API_VERSION|2023-06-01-preview|API version when using Azure OpenAI on your data| +|AZURE_OPENAI_API_VERSION|2023-12-01-preview|API version when using Azure OpenAI on your data| |AzureWebJobsStorage||The connection string to the Azure Blob Storage for the Azure Functions Batch processing| |BACKEND_URL||The URL for the Backend Batch Azure Function. Use http://localhost:7071 for local execution and http://backend for docker compose| |DOCUMENT_PROCESSING_QUEUE_NAME|doc-processing|The name of the Azure Queue to handle the Batch processing| @@ -202,4 +202,4 @@ docker push YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE |AZURE_CONTENT_SAFETY_KEY | | The key of the Azure AI Content Safety service| |AZURE_SPEECH_SERVICE_KEY | | The key of the Azure Speech service| |AZURE_SPEECH_SERVICE_REGION | | The region (location) of the Azure Speech service| -|AZURE_AUTH_TYPE | rbac | Change the value to 'keys' to authenticate using AZURE API keys. For more information refer to section [Authenticate using RBAC](#authenticate-using-rbac) +|AZURE_AUTH_TYPE | rbac | Change the value to 'keys' to authenticate using AZURE API keys. For more information refer to section [Authenticate using RBAC](#authenticate-using-rbac) \ No newline at end of file diff --git a/docs/TEAMS_EXTENSION.md b/docs/TEAMS_EXTENSION.md index efca5220c..72fbe6588 100644 --- a/docs/TEAMS_EXTENSION.md +++ b/docs/TEAMS_EXTENSION.md @@ -1,16 +1,14 @@ [Back to *Chat with your data* README](../README.md) # Teams extension -[**USER STORY**](#user-story) | [**ONE-CLICK DEPLOY**](#one-click-deploy) | [**SUPPORTING DOCUMENTATION**](#supporting-documentation) +[**USER STORY**](#user-story) | [**TEAMS DEPLOY**](#teams-deploy) | [**SUPPORTING DOCUMENTATION**](#supporting-documentation) \ \ ![User Story](/media/userStory.png) ## User story This extension enables users to experience Chat with your data within Teams, without having to switch platforms. It allows them to stay within their existing workflow and get the answers they need. Instead of building the Chat with your data solution accelerator from scratch within Teams, the same underlying backend used for the web application is leveraged within Teams. -\ -\ -![One-click Deploy](/media/oneClickDeploy.png) -## One-click deploy + +## Deployment to Teams **IMPORTANT**: Before you proceed, installation and configuration of the [Chat with your data with speech-to-text deployment](../README.md) is required. ### Pre-requisites @@ -22,31 +20,6 @@ This extension enables users to experience Chat with your data within Teams, wit - [Enable custom Teams apps and turn on custom app uploading](https://learn.microsoft.com/en-us/microsoftteams/platform/concepts/build-and-test/prepare-your-o365-tenant#enable-custom-teams-apps-and-turn-on-custom-app-uploading) (optional: Teams extension only) - In order to publish the App to the Teams Store, the Teams Administrator role is required. -### Deploy backend Azure Function - -[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure-Samples%2Fchat-with-your-data-solution-accelerator%2Fmain%2Fextensions%2Finfrastructure%2Fmain.json) - -Note: The (p) suffix on the App Setting (below) means that you should use the same resources and services deployed during the [Chat with your data with speech-to-text deployment](../README.md) - -| App Setting | Note | -| --- | ------------- | -|Resource group | The resource group that will contain the resources for this accelerator. You can select Create new to create a new group or use the existing resource group created with [Speech-to-text deployment](#speech-to-text-deployment). | -|Resource prefix | A text string that will be appended to each resource that gets created, and used as the website name for the web app. This name cannot contain spaces or special characters. | -|App Insights Connection String (p) | The Application Insights connection string to store the application logs. | -|Azure AI Search (p) | The **name** of your Azure AI Search resource. e.g. https://<**name**>.search.windows.net. | -|Azure Search Index (p) | The name of your Azure AI Search Index. | -|Azure Search Key (p) | An admin key for your Azure AI Search resource. | -|Azure OpenAI resource (p) | The name of your Azure OpenAI resource. This resource must have already been created previously. | -|Azure OpenAI key (p) | The access key is associated with your Azure OpenAI resource. | -|Orchestration strategy (p) | Use Azure OpenAI Functions (openai_functions) or LangChain (langchain) for messages orchestration. If you are using a new model version 0613 select "openai_functions" (or "langchain"), if you are using a model version 0314 select "langchain". | -|Azure Form Recognizer Endpoint (p) | The name of the Azure Form Recognizer for extracting the text from the documents. | -|Azure Form Recognizer Key (p) | The key of the Azure Form Recognizer for extracting the text from the documents. | -|Azure Blob Account Name (p) | The name of the Azure Blob Storage for storing the original documents to be processed. | -|Azure Blob Account Key (p) | The key of the Azure Blob Storage for storing the original documents to be processed. | -|Azure Blob Container Name (p) | The name of the Container in the Azure Blob Storage for storing the original documents to be processed. | - -You can find the [ARM template](/extensions/infrastructure/main.json) used, along with a [Bicep file](/extensions/infrastructure/main.bicep) for deploying this accelerator in the /infrastructure directory. - ### Deploy Teams application 1. Clone this GitHub repo. 2. Open the “extensions/teams” folder with Visual Studio Code @@ -57,8 +30,13 @@ You can find the [ARM template](/extensions/infrastructure/main.json) used, alon ![ENV](/media/teams-1.png) -4. Locate the environment variable AZURE_FUNCTION_URL. -5. Replace the with the name of your Function App resource (created in previous section) +4. Locate the environment variable _AZURE_FUNCTION_URL_. +5. Replace the `` and `` with the name of your Function App resource and its clientKey (created in previous section) + ```env + AZURE_FUNCTION_URL=https://-backend.azurewebsites.net/api/GetConversationResponse?code=&clientId=clientKey + + ``` + ![Env](/media/teams-deploy-env.png) 6. Save the file. 7. Select Teams Toolkit from the navigation panel. @@ -166,4 +144,4 @@ To customize the accelerator or run it locally, first, copy the .env.sample file ## Supporting documentation ### Resource links for Teams extension This solution accelerator deploys the following resources. It's crucial to comprehend the functionality of each. Below are the links to their respective documentation: -- [Bots in Microsoft Teams - Teams | Microsoft Learn](https://learn.microsoft.com/en-us/microsoftteams/platform/bots/what-are-bots) +- [Bots in Microsoft Teams - Teams | Microsoft Learn](https://learn.microsoft.com/en-us/microsoftteams/platform/bots/what-are-bots) \ No newline at end of file diff --git a/docs/TEAMS_LOCAL_DEPLOYMENT.md b/docs/TEAMS_LOCAL_DEPLOYMENT.md index 6e533c5cd..e40dd34cd 100644 --- a/docs/TEAMS_LOCAL_DEPLOYMENT.md +++ b/docs/TEAMS_LOCAL_DEPLOYMENT.md @@ -14,30 +14,26 @@ First, install [Azure Functions Core Tools](https://learn.microsoft.com/en-us/az ```shell -cd extensions -cd backend +cd code\batch func start ``` Or use the [Azure Functions VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-azurefunctions). -#### Building the Teams Backend Docker image -```shell -docker build -f extensions\docker\Backend.Dockerfile -t YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE . -docker run --env-file .env -p 7071:80 YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE -docker push YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE -``` ### Develop & run the Teams Frontend 1. Open the “extensions/teams” folder with Visual Studio Code ![Teams](/media/teams.png) 2. Open the file env\\.env.local -3. Locate the environment variable AZURE_FUNCTION_URL. -4. Replace the with your local Teams Backend URL (i.e., http://localhost:7071/api/http_cwyod) +3. Locate the environment variable _AZURE_FUNCTION_URL_. -![Env](/media/teams-local-3.png) +4. Replace the `` with your local Teams Backend URL (i.e., http://localhost:7071/api/GetConversationResponse) + ```env + AZURE_FUNCTION_URL=http://localhost:7071/api/GetConversationResponse + ``` + ![Env](/media/teams-local-3.png) 5. Save the file. 6. Select Teams Toolkit from the navigation panel. @@ -76,7 +72,7 @@ docker push YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE |AZURE_OPENAI_MAX_TOKENS|1000|The maximum number of tokens allowed for the generated answer.| |AZURE_OPENAI_STOP_SEQUENCE||Up to 4 sequences where the API will stop generating further tokens. Represent these as a string joined with "|", e.g. `"stop1|stop2|stop3"`| |AZURE_OPENAI_SYSTEM_MESSAGE|You are an AI assistant that helps people find information.|A brief description of the role and tone the model should use| -|AZURE_OPENAI_API_VERSION|2023-06-01-preview|API version when using Azure OpenAI on your data| +|AZURE_OPENAI_API_VERSION|2023-12-01-preview|API version when using Azure OpenAI on your data| |AzureWebJobsStorage||The connection string to the Azure Blob Storage for the Azure Functions Batch processing| |BACKEND_URL||The URL for the Backend Batch Azure Function. Use http://localhost:7071 for local execution and http://backend for docker compose| |DOCUMENT_PROCESSING_QUEUE_NAME|doc-processing|The name of the Azure Queue to handle the Batch processing| @@ -90,4 +86,4 @@ docker push YOUR_DOCKER_REGISTRY/YOUR_DOCKER_IMAGE |AZURE_CONTENT_SAFETY_ENDPOINT | | The endpoint of the Azure AI Content Safety service | |AZURE_CONTENT_SAFETY_KEY | | The key of the Azure AI Content Safety service| |AZURE_SPEECH_SERVICE_KEY | | The key of the Azure Speech service| -|AZURE_SPEECH_SERVICE_REGION | | The region (location) of the Azure Speech service| +|AZURE_SPEECH_SERVICE_REGION | | The region (location) of the Azure Speech service| \ No newline at end of file diff --git a/extensions/infrastructure/main.bicep b/extensions/infrastructure/main.bicep index 0e8080717..a6ad89062 100644 --- a/extensions/infrastructure/main.bicep +++ b/extensions/infrastructure/main.bicep @@ -104,7 +104,7 @@ param AzureOpenAIStopSequence string = '\n' param AzureOpenAISystemMessage string = 'You are an AI assistant that helps people find information.' @description('Azure OpenAI Api Version - Created during the "Chat with your data" Solution Accelerator') -param AzureOpenAIApiVersion string = '2023-07-01-preview' +param AzureOpenAIApiVersion string = '2023-12-01-preview' @description('Whether or not to stream responses from Azure OpenAI - Created during the "Chat with your data" Solution Accelerator') param AzureOpenAIStream string = 'true' @@ -129,7 +129,7 @@ param AzureBlobAccountKey string @description('Storage Account Container Name - Created during the "Chat with your data" Solution Accelerator') param AzureBlobContainerName string -var BackendImageName = 'DOCKER|fruoccopublic.azurecr.io/cwyod_backend' +var BackendImageName = 'DOCKER|fruoccopublic.azurecr.io/rag-backend' resource HostingPlan 'Microsoft.Web/serverfarms@2020-06-01' = { name: HostingPlanName @@ -214,4 +214,4 @@ resource WaitFunctionDeploymentSection 'Microsoft.Resources/deploymentScripts@20 dependsOn: [ Function ] -} \ No newline at end of file +} diff --git a/extensions/infrastructure/main.json b/extensions/infrastructure/main.json index e01afd9e7..32c6949a9 100644 --- a/extensions/infrastructure/main.json +++ b/extensions/infrastructure/main.json @@ -217,7 +217,7 @@ }, "AzureOpenAIApiVersion": { "type": "string", - "defaultValue": "2023-07-01-preview", + "defaultValue": "2023-12-01-preview", "metadata": { "description": "Azure OpenAI Api Version - Created during the \"Chat with your data\" Solution Accelerator" } @@ -268,7 +268,7 @@ } }, "variables": { - "BackendImageName": "DOCKER|fruoccopublic.azurecr.io/cwyod_backend" + "BackendImageName": "DOCKER|fruoccopublic.azurecr.io//rag-backend" }, "resources": [ { diff --git a/extensions/teams/appPackage/manifest.json b/extensions/teams/appPackage/manifest.json index c48178e60..e0a8e8245 100644 --- a/extensions/teams/appPackage/manifest.json +++ b/extensions/teams/appPackage/manifest.json @@ -1,7 +1,7 @@ { "$schema": "https://developer.microsoft.com/en-us/json-schemas/teams/v1.16/MicrosoftTeams.schema.json", "manifestVersion": "1.16", - "version": "1.0.0", + "version": "1.1.4", "id": "${{TEAMS_APP_ID}}", "packageName": "com.microsoft.teams.extension", "developer": { diff --git a/extensions/teams/cards/cardBuilder.ts b/extensions/teams/cards/cardBuilder.ts new file mode 100644 index 000000000..b6087e16f --- /dev/null +++ b/extensions/teams/cards/cardBuilder.ts @@ -0,0 +1,112 @@ +import { Attachment, CardFactory } from "botbuilder"; +import { Citation, CardType } from "../model"; + +export function actionBuilder(citation: Citation, docId: number): any { + + const urlParts = citation.url.split("]"); + let url = urlParts[urlParts.length - 1].replaceAll("(", "").replaceAll(")", ""); + let title = citation.title.replaceAll("/documents/", ""); + let content = citation.content.replaceAll(citation.title, "").replaceAll("url", ""); + content = content.replaceAll(/(<([^>]+)>)/ig, "\n").replaceAll("<>", ""); + let citationCardAction = { + title: `Ref${docId}`, + type: CardType.ShowCard, + card: { + type: CardType.AdaptiveCard, + body: [ + { + type: CardType.TextBlock, + text: `Reference - Part ${parseInt(citation.chunk_id) + 1}`, + wrap: true, + size: "small", + }, + { + type: CardType.TextBlock, + text: title, + wrap: true, + weight: "Bolder", + size: "Large", + }, + { + type: CardType.TextBlock, + text: content, + wrap: true + } + ], + actions: [ + { + type: CardType.OpenUrl, + title: "Go to the source", + url: url, + } + ] + } + }; + + return citationCardAction; +} +export function cardBodyBuilder(citations: any[], assistantAnswer: string): any { + let answerCard = { + "$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "version": "1.6", + type: CardType.AdaptiveCard, + body: [ + { + type: CardType.TextBlock, + text: assistantAnswer, + wrap: true + + }, { + type: 'ActionSet', + actions: [] + } + ], + actions: [], + msteams: { + width: "Full" + } + }; + if (citations.length <= 6) { + answerCard["actions"] = citations; + } else { + const chunkSize = 5; + for (let i = 0; i < citations.length; i += chunkSize) { + const chunk = citations.slice(i, i + chunkSize); + answerCard["body"].push({ + type: 'ActionSet', + actions: chunk + }); + } + } + + return answerCard; +} +export function cwydResponseBuilder(citations: Citation[], assistantAnswer: string): Attachment { + let citationActions: any[] = []; + let docId = 1; + let deleteEnd = ""; + let deleteEndSpace = ""; + let refCount = 1; + let findPart = {}; + let reIndex = 0; + citations.map((citation: Citation) => { + if (!(citation.chunk_id in findPart)) { + reIndex = docId; + citationActions.push(actionBuilder(citation, reIndex)); + findPart[citation.chunk_id] = reIndex; + docId++; + } else { + reIndex = findPart[citation.chunk_id]; + } + + deleteEnd += `[${reIndex}]`; + deleteEndSpace += ` [${reIndex}]`; + assistantAnswer = assistantAnswer.replaceAll(`[doc${refCount}]`, `[${reIndex}]`); + + refCount++; + }); + assistantAnswer = assistantAnswer.replaceAll(deleteEnd, ""); + assistantAnswer = assistantAnswer.replaceAll(deleteEndSpace, ""); + let answerCard = CardFactory.adaptiveCard(cardBodyBuilder(citationActions, assistantAnswer)); + return answerCard; +} \ No newline at end of file diff --git a/extensions/teams/env/.env.dev b/extensions/teams/env/.env.dev index cc4e23e7b..cf35d4966 100644 --- a/extensions/teams/env/.env.dev +++ b/extensions/teams/env/.env.dev @@ -14,4 +14,4 @@ BOT_ID= TEAMS_APP_ID= BOT_AZURE_APP_SERVICE_RESOURCE_ID= BOT_DOMAIN= -AZURE_FUNCTION_URL=https://.azurewebsites.net/api/http_cwyod \ No newline at end of file +AZURE_FUNCTION_URL=https://-backend.azurewebsites.net/api/GetConversationResponse?code=&clientId=clientKey \ No newline at end of file diff --git a/extensions/teams/env/.env.test b/extensions/teams/env/.env.test index 2ddddaed7..56c299b9a 100644 --- a/extensions/teams/env/.env.test +++ b/extensions/teams/env/.env.test @@ -14,6 +14,6 @@ BOT_ID= TEAMS_APP_ID= BOT_AZURE_APP_SERVICE_RESOURCE_ID= BOT_DOMAIN= -AZURE_FUNCTION_URL=https://.azurewebsites.net/api/http_cwyod +AZURE_FUNCTION_URL=https://-backend.azurewebsites.net/api/GetConversationResponse?code=&clientId=clientKey TEAMS_APP_TENANT_ID= TEAMS_APP_PUBLISHED_APP_ID= \ No newline at end of file diff --git a/extensions/teams/env/.env.testtool b/extensions/teams/env/.env.testtool index 2b45eb48b..e7d1b25ab 100644 --- a/extensions/teams/env/.env.testtool +++ b/extensions/teams/env/.env.testtool @@ -5,4 +5,4 @@ TEAMSFX_ENV=testtool # Environment variables used by test tool TEAMSAPPTESTER_PORT=56150 -AZURE_FUNCTION_URL=https://.azurewebsites.net/api/http_cwyod \ No newline at end of file +AZURE_FUNCTION_URL=https://-backend.azurewebsites.net/api/GetConversationResponse?code=&clientId=clientKey \ No newline at end of file diff --git a/extensions/teams/model.ts b/extensions/teams/model.ts index eb9156d2f..71d42f2e8 100644 --- a/extensions/teams/model.ts +++ b/extensions/teams/model.ts @@ -15,6 +15,13 @@ export type Citation = { reindex_id: string | null; } +export enum CardType { + OpenUrl = "Action.OpenUrl", + ShowCard = "Action.ShowCard", + AdaptiveCard = "AdaptiveCard", + TextBlock = "TextBlock" +} + export type ToolMessageContent = { citations: Citation[]; intent: string; diff --git a/extensions/teams/teamsBot.ts b/extensions/teams/teamsBot.ts index 5ba4fce75..23b784153 100644 --- a/extensions/teams/teamsBot.ts +++ b/extensions/teams/teamsBot.ts @@ -2,7 +2,7 @@ import { TeamsActivityHandler, TurnContext, ActivityTypes, - MessageFactory, + MessageFactory } from "botbuilder"; import config from "./config"; import { @@ -11,6 +11,7 @@ import { ToolMessageContent, Citation, } from "./model"; +import { cwydResponseBuilder } from "./cards/cardBuilder"; const EMPTY_RESPONSE = "Sorry, I do not have an answer. Please try again."; @@ -19,6 +20,7 @@ export class TeamsBot extends TeamsActivityHandler { super(); let newActivity; let assistantAnswer = ""; + let activityUpdated = true; this.onMessage(async (context, next) => { console.log("Running with Message Activity."); @@ -118,32 +120,41 @@ export class TeamsBot extends TeamsActivityHandler { assistantAnswer = answer.content; if (assistantAnswer.startsWith("[doc")) { assistantAnswer = EMPTY_RESPONSE; + newActivity = MessageFactory.text(assistantAnswer); } else { - const citations = parseCitationFromMessage(answers[index - 1]); - let docId = 1; - citations.map((citation: Citation) => { - const urlParts = citation.url.split("]"); - const url = urlParts[urlParts.length - 1]; - assistantAnswer = assistantAnswer.replaceAll( - `[doc${docId}]`, - `[[${citation.filepath}]${url}]` - ); - docId++; - }); + const citations = parseCitationFromMessage(answers[index - 1]) as Citation[]; + if (citations.length === 0) { + newActivity = MessageFactory.text(assistantAnswer); + newActivity.id = reply.id; + } else { + newActivity = MessageFactory.attachment(cwydResponseBuilder(citations, assistantAnswer)); + activityUpdated = false; + } } - newActivity = MessageFactory.text(assistantAnswer); - newActivity.id = reply.id; } else if (answer.role === "error") { newActivity = MessageFactory.text( "Sorry, an error occurred. Try waiting a few minutes. If the issue persists, contact your system administrator. Error: " + - answer.content + answer.content ); newActivity.id = reply.id; } + }); newActivity.typing = false; // Stop the ellipses visual indicator - await context.updateActivity(newActivity); + + if (activityUpdated) { + await context.updateActivity(newActivity); + } else { + try { + await context.deleteActivity(reply.id); + } catch (error) { + console.log('Error in deleting message', error); + } + await context.sendActivity(newActivity); + } + } catch (error) { + console.log('Error in onMessage:', error); } finally { } @@ -164,4 +175,4 @@ export class TeamsBot extends TeamsActivityHandler { await next(); }); } -} +} \ No newline at end of file diff --git a/infra/deployment.bicep b/infra/deployment.bicep index 8f6da40ad..d4b3bdb09 100644 --- a/infra/deployment.bicep +++ b/infra/deployment.bicep @@ -104,7 +104,7 @@ param AzureOpenAIStopSequence string = '\n' param AzureOpenAISystemMessage string = 'You are an AI assistant that helps people find information.' @description('Azure OpenAI Api Version') -param AzureOpenAIApiVersion string = '2023-07-01-preview' +param AzureOpenAIApiVersion string = '2023-12-01-preview' @description('Whether or not to stream responses from Azure OpenAI') param AzureOpenAIStream string = 'true' @@ -510,6 +510,7 @@ resource Function 'Microsoft.Web/sites@2018-11-01' = { properties: { siteConfig: { appSettings: [ + { name: 'APPINSIGHTS_CONNECTION_STRING', value: reference(ApplicationInsights.id, '2015-05-01').ConnectionString } { name: 'FUNCTIONS_EXTENSION_VERSION', value: '~4' } { name: 'WEBSITES_ENABLE_APP_SERVICE_STORAGE', value: 'false' } { name: 'APPINSIGHTS_INSTRUMENTATIONKEY', value: reference(ApplicationInsights.id, '2015-05-01').InstrumentationKey } @@ -904,4 +905,4 @@ module searchIndexDataContUser 'security/role.bicep' = if (authType == 'rbac' && roleDefinitionId: '8ebe5a00-799e-43f5-93ac-243d3dce84a7' principalType: 'User' } -} \ No newline at end of file +} diff --git a/infra/deployment.json b/infra/deployment.json index f805e8c94..2012dc700 100644 --- a/infra/deployment.json +++ b/infra/deployment.json @@ -4,8 +4,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "8362420292927365599" + "version": "0.25.53.49325", + "templateHash": "10944925979843412005" } }, "parameters": { @@ -230,7 +230,7 @@ }, "AzureOpenAIApiVersion": { "type": "string", - "defaultValue": "2023-07-01-preview", + "defaultValue": "2023-12-01-preview", "metadata": { "description": "Azure OpenAI Api Version" } @@ -956,6 +956,10 @@ "properties": { "siteConfig": { "appSettings": [ + { + "name": "APPINSIGHTS_CONNECTION_STRING", + "value": "[reference(resourceId('Microsoft.Insights/components', parameters('ApplicationInsightsName')), '2015-05-01').ConnectionString]" + }, { "name": "FUNCTIONS_EXTENSION_VERSION", "value": "~4" @@ -1173,8 +1177,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1242,8 +1246,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1311,8 +1315,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1380,8 +1384,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1449,8 +1453,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1518,8 +1522,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1584,8 +1588,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1653,8 +1657,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1722,8 +1726,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1791,8 +1795,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1860,8 +1864,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1929,8 +1933,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -1998,8 +2002,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2064,8 +2068,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2133,8 +2137,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2202,8 +2206,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2271,8 +2275,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2340,8 +2344,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2406,8 +2410,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2475,8 +2479,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2544,8 +2548,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2613,8 +2617,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2679,8 +2683,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2748,8 +2752,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2817,8 +2821,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, @@ -2886,8 +2890,8 @@ "metadata": { "_generator": { "name": "bicep", - "version": "0.24.24.22086", - "templateHash": "2184194315885104837" + "version": "0.25.53.49325", + "templateHash": "15698903649631098787" }, "description": "Creates a role assignment for a service principal." }, diff --git a/media/chat-app.png b/media/chat-app.png index f7bffa9d6..d7c6716d0 100644 Binary files a/media/chat-app.png and b/media/chat-app.png differ diff --git a/media/teams-deploy-env.png b/media/teams-deploy-env.png new file mode 100644 index 000000000..ea4bccb84 Binary files /dev/null and b/media/teams-deploy-env.png differ diff --git a/media/teams-local-3.png b/media/teams-local-3.png index 473869311..5e9de298d 100644 Binary files a/media/teams-local-3.png and b/media/teams-local-3.png differ