Skip to content

Commit

Permalink
Chat improvements (#414)
Browse files Browse the repository at this point in the history
* prompt updates

add ability to change temperature in the frontend

* fix bug in field_utils.js where block and item fields couldn't be set to 0 (introduced in #309 and #312)

* Fix typos in prompt

Co-authored-by: Matthew Evans <[email protected]>

tweak prompt to work well after typos

* pin openai model to current model
  • Loading branch information
jdbocarsly authored Jun 30, 2023
1 parent 9d6eba4 commit f820ba8
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 22 deletions.
19 changes: 10 additions & 9 deletions pydatalab/pydatalab/apps/chat/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pydatalab.utils import CustomJSONEncoder

__all__ = "ChatBlock"
MODEL = "gpt-3.5-turbo"
MODEL = "gpt-3.5-turbo-0613"
MAX_CONTEXT_SIZE = 4097


Expand All @@ -35,14 +35,14 @@ def num_tokens_from_messages(messages: Sequence[dict]):

class ChatBlock(DataBlock):
blocktype = "chat"
description = "LLM Chat Block with contextual data (powered by GPT-3.5-turbo)"
description = "Virtual assistant"
accepted_file_extensions: Sequence[str] = []
__supports_collections = True
defaults = {
"system_prompt": """You are a virtual assistant that helps materials chemists manage their experimental data and plan experiments. You are deployed in the group of Professor Clare Grey in the Department of Chemistry at the University of Cambridge.
"system_prompt": """You are whinchat (lowercase w), a virtual data managment assistant that helps materials chemists manage their experimental data and plan experiments. You are deployed in the group of Professor Clare Grey in the Department of Chemistry at the University of Cambridge.
You are embedded within the program datalab, where you have access to JSON describing an ‘item’, or a collection of items, with connections to other items. These items may include experimental samples, starting materials, and devices (e.g. battery cells made out of experimental samples and starting materials).
Answer questions in markdown. Specify the language for all markdown code blocks. You can make diagrams by writing a mermaid code block or an svg code block.
Be as concise as possible. Start the conversion with a friendly greeting introducing yourself.
Answer questions in markdown. Specify the language for all markdown code blocks. You can make diagrams by writing a mermaid code block or an svg code block. When writing mermaid code, you must use quotations around each of the labels (e.g. A["label1"] --> B["label2"])
Be as concise as possible. When saying your name, type a bird emoji right after whinchat 🐦.
""",
"temperature": 0.2,
"error_message": None,
Expand All @@ -61,7 +61,6 @@ def plot_functions(self):

def render(self):
if not self.data.get("messages"):

if (item_id := self.data.get("item_id")) is not None:
info_json = self._prepare_item_json_for_chat(item_id)
elif (collection_id := self.data.get("collection_id")) is not None:
Expand All @@ -76,7 +75,8 @@ def render(self):
},
{
"role": "user",
"content": f"""Here is the JSON data for the current item(s): {info_json}""",
"content": f"""Here is the JSON data for the current item(s): {info_json}.
Start with a friendly introduction and give me a one sentence summary of what this is (not detailed, no information about specific masses). """,
},
]

Expand Down Expand Up @@ -107,7 +107,7 @@ def render(self):

try:
LOGGER.debug(
f"submitting request to OpenAI API for completion with last message role \"{self.data['messages'][-1]['role']}\" (message = {self.data['messages'][-1:]})"
f"submitting request to OpenAI API for completion with last message role \"{self.data['messages'][-1]['role']}\" (message = {self.data['messages'][-1:]}). Temperature = {self.data['temperature']} (type {type(self.data['temperature'])})"
)
responses = openai.ChatCompletion.create(
model=MODEL,
Expand All @@ -128,9 +128,10 @@ def render(self):
except AttributeError:
self.data["messages"].append(responses["choices"][0]["message"])

self.data["model_name"] = MODEL

token_count = num_tokens_from_messages(self.data["messages"])
self.data["token_count"] = token_count

return

def _prepare_item_json_for_chat(self, item_id: str):
Expand Down
60 changes: 49 additions & 11 deletions webapp/src/components/datablocks/ChatBlock.vue
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,37 @@
<!-- think about elegant two-way binding to DataBlockBase... or, just pass all the block data into
DataBlockBase as a prop, and save from within DataBlockBase -->
<DataBlockBase :item_id="item_id" :block_id="block_id">
<div v-if="contextHidden" @click="contextHidden = !contextHidden" class="context-button">
[show prompts]
<div v-if="advancedHidden" @click="advancedHidden = !advancedHidden" class="context-button">
[show advanced]
</div>
<div v-if="!contextHidden" @click="contextHidden = !contextHidden" class="context-button">
[hide prompts]
<div v-if="!advancedHidden" @click="advancedHidden = !advancedHidden" class="context-button">
[hide advanced]
</div>

<div class="row">
<div id="chatWindowContainer" class="col-xl-9 col-lg-10 col-md-12 mx-auto">
<div id="context-size-message" v-if="!contextHidden">
Conversation token count: {{ tokenCount }} (max: 4097)
</div>
<div class="advanced-information" v-if="!advancedHidden">
<label>Model</label>: {{ modelName }} <br />
<label>Current conversation token count</label>: {{ tokenCount }}/4097

<ChatWindow :chatMessages="messages.slice(contextHidden ? 2 : 0)" :isLoading="isLoading" />
<div class="input-group form-inline">
<label for="temperatureInput" class="mr-2"><b>temperature:</b></label>
<input
id="temperatureInput"
type="number"
min="0"
max="1"
step="0.1"
class="form-control-sm"
v-model="temperature"
:class="{ 'red-border': tempInvalid }"
/>
<small class="text-danger" v-show="tempInvalid">
Temperature must must be a number between 0 and 1
</small>
</div>
</div>
<ChatWindow :chatMessages="messages.slice(advancedHidden ? 2 : 0)" :isLoading="isLoading" />
<div class="d-flex justify-content-center">
<button
class="btn btn-default btn-sm regenerate-button"
Expand Down Expand Up @@ -73,19 +90,32 @@ export default {
return {
isLoading: false,
isRegenerating: false,
contextHidden: true,
advancedHidden: true,
};
},
computed: {
messages: createComputedSetterForBlockField("messages"),
prompt: createComputedSetterForBlockField("prompt"),
temperature: createComputedSetterForBlockField("temperature"),
tempInvalid() {
return (
this.temperature == null ||
isNaN(this.temperature) ||
this.temperature < 0 ||
this.temperature > 1
);
},
errorMessage() {
return this.$store.state.all_item_data[this.item_id]["blocks_obj"][this.block_id]
.error_message;
},
tokenCount() {
return this.$store.state.all_item_data[this.item_id]["blocks_obj"][this.block_id].token_count;
},
modelName() {
return this.$store.state.all_item_data[this.item_id]["blocks_obj"][this.block_id].model_name;
},
},
components: {
DataBlockBase,
Expand Down Expand Up @@ -145,8 +175,16 @@ export default {
margin-bottom: 1rem;
}
#context-size-message {
font-style: italic;
.advanced-information {
margin-left: 20%;
}
.advanced-information label {
font-weight: 600;
color: #2c3e50;
}
#model-information-messages {
font-style: italic;
}
</style>
4 changes: 2 additions & 2 deletions webapp/src/field_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export function createComputedSetterForBlockField(block_field) {
store.commit("updateBlockData", {
item_id: this.item_id,
block_id: this.block_id,
block_data: { [block_field]: value ? value : null },
block_data: { [block_field]: value === "" ? null : value },
});
},
};
Expand All @@ -34,7 +34,7 @@ export function createComputedSetterForItemField(item_field) {
console.log(value);
store.commit("updateItemData", {
item_id: this.item_id,
item_data: { [item_field]: value ? value : null },
item_data: { [item_field]: value === "" ? null : value },
});
},
};
Expand Down

0 comments on commit f820ba8

Please sign in to comment.