Skip to content
This repository was archived by the owner on Aug 17, 2023. It is now read-only.

Commit

Permalink
Merge remote-tracking branch 'nuv-origin/main' into chat-bot-feature/…
Browse files Browse the repository at this point in the history
…cards
  • Loading branch information
cargillnuvalence committed Aug 7, 2023
2 parents 5b63fa8 + c8427c4 commit 9adf586
Show file tree
Hide file tree
Showing 17 changed files with 1,030 additions and 692 deletions.
52 changes: 30 additions & 22 deletions .env.sample
Original file line number Diff line number Diff line change
@@ -1,23 +1,31 @@
AZURE_SEARCH_SERVICE=
AZURE_SEARCH_INDEX=
AZURE_SEARCH_KEY=
AZURE_SEARCH_USE_SEMANTIC_SEARCH=False
AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG=default
AZURE_SEARCH_INDEX_IS_PRECHUNKED=False
AZURE_SEARCH_TOP_K=5
AZURE_SEARCH_ENABLE_IN_DOMAIN=False
AZURE_SEARCH_CONTENT_COLUMNS=
AZURE_SEARCH_FILENAME_COLUMN=
AZURE_SEARCH_TITLE_COLUMN=
AZURE_SEARCH_URL_COLUMN=
AZURE_OPENAI_RESOURCE=
AZURE_OPENAI_MODEL=
AZURE_OPENAI_KEY=
AZURE_OPENAI_MODEL_NAME=gpt-35-turbo
AZURE_OPENAI_TEMPERATURE=0
AZURE_OPENAI_TOP_P=1.0
AZURE_ENV_NAME="nyc-cogsearch-useast"
AZURE_FORMRECOGNIZER_RESOURCE_GROUP="rg-nyc-cogsearch-useast"
AZURE_FORMRECOGNIZER_SERVICE="cog-fr-p5nvosgo6yaw4"
AZURE_FORMRECOGNIZER_SKU_NAME="S0"
AZURE_LOCATION="eastus"
AZURE_OPENAI_KEY="========================== GET FROM OUR DEPLOYMENT"
AZURE_OPENAI_MAX_TOKENS=1000
AZURE_OPENAI_STOP_SEQUENCE=
AZURE_OPENAI_SYSTEM_MESSAGE=You are an AI assistant that helps people find information.
AZURE_OPENAI_PREVIEW_API_VERSION=2023-06-01-preview
AZURE_OPENAI_STREAM=True
AZURE_OPENAI_MODEL="turbo"
AZURE_OPENAI_MODEL_NAME="gpt-35-turbo"
AZURE_OPENAI_PREVIEW_API_VERSION="2023-06-01-preview"
AZURE_OPENAI_RESOURCE="cog-p5nvosgo6yaw4"
AZURE_OPENAI_RESOURCE_GROUP="rg-nyc-cogsearch-useast"
AZURE_OPENAI_SKU_NAME="S0"
AZURE_OPENAI_STOP_SEQUENCE=""
AZURE_OPENAI_STREAM="true"
AZURE_OPENAI_SYSTEM_MESSAGE="You are an AI assistant that helps people find information."
AZURE_OPENAI_TEMPERATURE=0
AZURE_RESOURCE_GROUP="rg-nyc-cogsearch-useast"
AZURE_SEARCH_CONTENT_COLUMNS="content"
AZURE_SEARCH_ENABLE_IN_DOMAIN="true"
AZURE_SEARCH_FILENAME_COLUMN="filepath"
AZURE_SEARCH_INDEX="gptkbindex"
AZURE_SEARCH_KEY="========================== GET FROM OUR DEPLOYMENT"
AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG="default"
AZURE_SEARCH_SERVICE="gptkb-p5nvosgo6yaw4"
AZURE_SEARCH_SERVICE_RESOURCE_GROUP="rg-nyc-cogsearch-useast"
AZURE_SEARCH_SKU_NAME="standard"
AZURE_SEARCH_TITLE_COLUMN="title"
AZURE_SEARCH_TOP_K=5
AZURE_SEARCH_URL_COLUMN="url"
AZURE_SEARCH_USE_SEMANTIC_SEARCH="false"
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
.venv
.idea
frontend/node_modules
.env
.env-*
static
.azure/
__pycache__/
node_modules
*.iml
26 changes: 19 additions & 7 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,18 @@

app = Flask(__name__, static_folder="static")


# Static Files
@app.route("/")
def index():
return app.send_static_file("index.html")


@app.route("/favicon.ico")
def favicon():
return app.send_static_file('favicon.ico')


@app.route("/assets/<path:path>")
def assets(path):
return send_from_directory("static/assets", path)
Expand All @@ -45,23 +48,29 @@ def assets(path):
AZURE_OPENAI_TOP_P = os.environ.get("AZURE_OPENAI_TOP_P", 1.0)
AZURE_OPENAI_MAX_TOKENS = os.environ.get("AZURE_OPENAI_MAX_TOKENS", 1000)
AZURE_OPENAI_STOP_SEQUENCE = os.environ.get("AZURE_OPENAI_STOP_SEQUENCE")
AZURE_OPENAI_SYSTEM_MESSAGE = os.environ.get("AZURE_OPENAI_SYSTEM_MESSAGE", "You are an AI assistant that helps people find information.")
AZURE_OPENAI_SYSTEM_MESSAGE = os.environ.get("AZURE_OPENAI_SYSTEM_MESSAGE",
"You are an AI assistant that helps people find information.")
AZURE_OPENAI_PREVIEW_API_VERSION = os.environ.get("AZURE_OPENAI_PREVIEW_API_VERSION", "2023-06-01-preview")
AZURE_OPENAI_STREAM = os.environ.get("AZURE_OPENAI_STREAM", "true")
AZURE_OPENAI_MODEL_NAME = os.environ.get("AZURE_OPENAI_MODEL_NAME", "gpt-35-turbo") # Name of the model, e.g. 'gpt-35-turbo' or 'gpt-4'
AZURE_OPENAI_MODEL_NAME = os.environ.get("AZURE_OPENAI_MODEL_NAME",
"gpt-35-turbo") # Name of the model, e.g. 'gpt-35-turbo' or 'gpt-4'

SHOULD_STREAM = True if AZURE_OPENAI_STREAM.lower() == "true" else False


def is_chat_model():
if 'gpt-4' in AZURE_OPENAI_MODEL_NAME.lower() or AZURE_OPENAI_MODEL_NAME.lower() in ['gpt-35-turbo-4k', 'gpt-35-turbo-16k']:
if 'gpt-4' in AZURE_OPENAI_MODEL_NAME.lower() or AZURE_OPENAI_MODEL_NAME.lower() in ['gpt-35-turbo-4k',
'gpt-35-turbo-16k']:
return True
return False


def should_use_data():
if AZURE_SEARCH_SERVICE and AZURE_SEARCH_INDEX and AZURE_SEARCH_KEY:
return True
return False


def prepare_body_headers_with_data(request):
request_messages = request.json["messages"]

Expand Down Expand Up @@ -138,7 +147,7 @@ def stream_with_data(body, headers, endpoint):
role = lineJson["choices"][0]["messages"][0]["delta"].get("role")
if role == "tool":
response["choices"][0]["messages"].append(lineJson["choices"][0]["messages"][0]["delta"])
elif role == "assistant":
elif role == "assistant":
response["choices"][0]["messages"].append({
"role": "assistant",
"content": ""
Expand All @@ -156,7 +165,7 @@ def stream_with_data(body, headers, endpoint):
def conversation_with_data(request):
body, headers = prepare_body_headers_with_data(request)
endpoint = f"https://{AZURE_OPENAI_RESOURCE}.openai.azure.com/openai/deployments/{AZURE_OPENAI_MODEL}/extensions/chat/completions?api-version={AZURE_OPENAI_PREVIEW_API_VERSION}"

if not SHOULD_STREAM:
r = requests.post(endpoint, headers=headers, json=body)
status_code = r.status_code
Expand All @@ -169,6 +178,7 @@ def conversation_with_data(request):
else:
return Response(None, mimetype='text/event-stream')


def stream_without_data(response):
responseText = ""
for line in response:
Expand Down Expand Up @@ -207,13 +217,13 @@ def conversation_without_data(request):

for message in request_messages:
messages.append({
"role": message["role"] ,
"role": message["role"],
"content": message["content"]
})

response = openai.ChatCompletion.create(
engine=AZURE_OPENAI_MODEL,
messages = messages,
messages=messages,
temperature=float(AZURE_OPENAI_TEMPERATURE),
max_tokens=int(AZURE_OPENAI_MAX_TOKENS),
top_p=float(AZURE_OPENAI_TOP_P),
Expand Down Expand Up @@ -242,6 +252,7 @@ def conversation_without_data(request):
else:
return Response(None, mimetype='text/event-stream')


@app.route("/conversation", methods=["GET", "POST"])
def conversation():
try:
Expand All @@ -254,5 +265,6 @@ def conversation():
logging.exception("Exception in /conversation")
return jsonify({"error": str(e)}), 500


if __name__ == "__main__":
app.run()
2 changes: 1 addition & 1 deletion frontend/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<meta charset="UTF-8" />
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Azure AI</title>
<title>NYC CHAT BOT</title>
</head>
<body>
<div id="root"></div>
Expand Down
4 changes: 4 additions & 0 deletions frontend/src/assets/MyCityTitle.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 9adf586

Please sign in to comment.