-
Notifications
You must be signed in to change notification settings - Fork 400
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
185 additions
and
85 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
import streamlit as st | ||
import os | ||
import json | ||
import traceback | ||
import logging | ||
from dotenv import load_dotenv | ||
from utilities.ConfigHelper import ConfigHelper, ChunkingStrategy | ||
|
||
load_dotenv() | ||
|
||
logger = logging.getLogger('azure.core.pipeline.policies.http_logging_policy').setLevel(logging.WARNING) | ||
st.set_page_config(page_title="Configure Prompts", page_icon=os.path.join('images','favicon.ico'), layout="wide", menu_items=None) | ||
|
||
mod_page_style = """ | ||
<style> | ||
#MainMenu {visibility: hidden;} | ||
footer {visibility: hidden;} | ||
header {visibility: hidden;} | ||
</style> | ||
""" | ||
st.markdown(mod_page_style, unsafe_allow_html=True) | ||
|
||
try: | ||
config = ConfigHelper.get_active_config() | ||
except Exception as e: | ||
config = ConfigHelper.get_default_config() | ||
|
||
if 'condense_question_prompt' not in st.session_state: | ||
st.session_state['condense_question_prompt'] = config.prompts.condense_question_prompt | ||
|
||
def check_variables_in_prompt(): | ||
# Check if "summaries" is present in the string answering_prompt | ||
if "{summaries}" not in st.session_state.answering_prompt: | ||
st.warning("""Your custom prompt doesn't contain the variable "{summaries}". | ||
This variable is used to add the content of the documents retrieved from the VectorStore to the prompt. | ||
Please add it to your custom prompt to use the app. | ||
Reverting to default prompt. | ||
""") | ||
st.session_state.answering_prompt = "" | ||
if "{question}" not in st.session_state.answering_prompt: | ||
st.warning("""Your custom prompt doesn't contain the variable "{question}". | ||
This variable is used to add the user's question to the prompt. | ||
Please add it to your custom prompt to use the app. | ||
Reverting to default prompt. | ||
""") | ||
st.session_state.answering_prompt = "" | ||
|
||
try: | ||
# Prompt initialisation | ||
if 'condense_question_prompt' not in st.session_state: | ||
st.session_state['condense_question_prompt'] = "" | ||
if 'answering_prompt' not in st.session_state: | ||
st.session_state['answering_prompt'] = "" | ||
if 'post_answering_prompt' not in st.session_state: | ||
st.session_state['post_answering_prompt'] = "" | ||
|
||
answering_prompt_placeholder = """{summaries} | ||
Please reply to the question using only the text above. | ||
Question: {question} | ||
Answer:""" | ||
condense_question_prompt_placeholder = """""" | ||
post_answering_prompt_placeholder = """""" | ||
|
||
condense_question_prompt_help = """You can configure a pre prompt by defining how the documents retrieved from the VectorStore will be combined and sent to LLM.""" | ||
answering_prompt_help = """You can configure a custom prompt by adding the variables {summaries} and {question} to the prompt. | ||
{summaries} will be replaced with the content of the documents retrieved from the VectorStore. | ||
{question} will be replaced with the user's question. | ||
""" | ||
post_answering_prompt_help = """You can configure a post prompt by defining how the user's answer will be processed for fact checking or conflict resolution. | ||
""" | ||
|
||
with st.expander("Prompt configuration", expanded=True): | ||
# Custom prompt | ||
st.text_area("Condense question prompt", key='condense_question_prompt', on_change=check_variables_in_prompt, placeholder= condense_question_prompt_placeholder,help=condense_question_prompt_help, height=50) | ||
st.text_area("Answering prompt", key='answering_prompt', on_change=check_variables_in_prompt, placeholder= answering_prompt_placeholder,help=answering_prompt_help, height=50) | ||
st.text_area("Post-answering prompt", key='post_answering_prompt', on_change=check_variables_in_prompt, placeholder= post_answering_prompt_placeholder,help=post_answering_prompt_help, height=50) | ||
|
||
with st.expander("Chunking configuration", expanded=True): | ||
# Chunking config input | ||
chunking_strategy = st.selectbox('Chunking strategy', [s.value for s in ChunkingStrategy], key="chunking_strategy") | ||
chunking_size = st.text_input("Chunk size (in tokens)", key='chunking_size', placeholder="500") | ||
chunking_overlap = st.text_input("Chunk overlap (in tokens)", key='chunking_overlap', placeholder="100") | ||
|
||
with st.expander("Logging configuration", expanded=True): | ||
log_questions = st.checkbox('Log user input and output (questions, answers, chat history, sources)', value=True, key='log_user_interactions') | ||
log_answers = st.checkbox('Log tokens', value=True, key='log_tokens') | ||
|
||
if st.button("Save configuration"): | ||
current_config = { | ||
"prompts": { | ||
"condense_question_prompt": st.session_state['condense_question_prompt'], | ||
"answering_prompt": st.session_state['answering_prompt'], | ||
"post_answering_prompt": st.session_state['post_answering_prompt'] | ||
}, | ||
"chunking": [{ | ||
"strategy": st.session_state['chunking_strategy'], | ||
"size": st.session_state['chunking_size'], | ||
"overlap": st.session_state['chunking_overlap'] | ||
}], | ||
"logging": { | ||
"log_user_interactions": st.session_state['log_user_interactions'], | ||
"log_tokens": st.session_state['log_tokens'] | ||
} | ||
} | ||
ConfigHelper.save_config_as_active(current_config) | ||
st.success("Configuration saved successfully!") | ||
|
||
except Exception as e: | ||
st.error(traceback.format_exc()) |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import os | ||
import json | ||
from enum import Enum | ||
from .azureblobstorage import AzureBlobStorageClient | ||
|
||
CONFIG_CONTAINER_NAME = "config" | ||
|
||
class ChunkingStrategy(Enum): | ||
LAYOUT = 'layout' | ||
PAGE = 'page' | ||
FIXED_SIZE_OVERLAP = 'fixed_size_overlap' | ||
SENTENCE = 'sentence' | ||
|
||
class Config: | ||
def __init__(self, config): | ||
self.prompts = Prompts(config['prompts']) | ||
self.chunking = [Chunking(x) for x in config['chunking']] | ||
self.logging = Logging(config['logging']) | ||
|
||
class Prompts: | ||
def __init__(self, prompts): | ||
self.condense_question_prompt = prompts['condense_question_prompt'] | ||
self.answering_prompt = prompts['answering_prompt'] | ||
self.post_answering_prompt = prompts['post_answering_prompt'] | ||
|
||
class Chunking: | ||
def __init__(self, chunking): | ||
self.chunking_strategy = ChunkingStrategy(chunking['strategy']) | ||
self.chunk_size = chunking['size'] | ||
self.chunk_overlap = chunking['overlap'] | ||
|
||
class Logging: | ||
def __init__(self, logging): | ||
self.log_user_interactions = logging['log_user_interactions'] | ||
self.log_tokens = logging['log_tokens'] | ||
|
||
class ConfigHelper: | ||
@staticmethod | ||
def get_active_config(): | ||
blob_client = AzureBlobStorageClient(container_name=CONFIG_CONTAINER_NAME) | ||
config = blob_client.download_file("active.json") | ||
return Config(json.loads(config)) | ||
|
||
@staticmethod | ||
def save_config_as_active(config): | ||
blob_client = AzureBlobStorageClient(container_name=CONFIG_CONTAINER_NAME) | ||
blob_client = blob_client.upload_file(json.dumps(config, indent=2), "active.json", content_type='application/json') | ||
|
||
@staticmethod | ||
def get_default_config(): | ||
default_config = { | ||
"prompts": { | ||
"condense_question_prompt": """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. | ||
Chat History: | ||
{chat_history} | ||
Follow Up Input: {question} | ||
Standalone question:""", | ||
"answering_prompt": "", | ||
"post_answering_prompt": None, | ||
}, | ||
"chunking": [{ | ||
"strategy": ChunkingStrategy.FIXED_SIZE_OVERLAP, | ||
"size": 500, | ||
"overlap": 100 | ||
}], | ||
"logging": { | ||
"log_user_interactions": True, | ||
"log_tokens": True | ||
} | ||
} | ||
return Config(default_config) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters