Skip to content

Commit

Permalink
Merge pull request #333 from uiuc-ischool-accessible-computing-lab/LL…
Browse files Browse the repository at this point in the history
…M-suggested-prompts

Llm suggested prompts
  • Loading branch information
ellvix authored Jan 4, 2024
2 parents 14e3300 + 2f44693 commit 6077593
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 4 deletions.
3 changes: 1 addition & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Added the ability to switch from assertive (default) to polite aria modes, in the help menu (#309).
- Added OpenAI GPT4-vision query system. Hit ? from the main chart to toggle on. (#317))
- Added suggestions system for users to be able to more easily click

### Fixed


- LLM popup now only triggered by ?, not /
- LLM truncating responses, #322

Expand All @@ -27,7 +27,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Added lineplot, stacked bar, dodged bar, and normalized dodge bar info to the README (#310).
- Added Code of Conduct file in the project.


## [1.0.4] - 2023-11-30

### Added
Expand Down
6 changes: 6 additions & 0 deletions src/css/styles.css
Original file line number Diff line number Diff line change
Expand Up @@ -299,3 +299,9 @@ textarea {
.chatLLM_message_other {
align-self: flex-start;
}

.LLM_suggestions > button {
font-weight: normal;
border-radius: 0.5rem;
border: none;
}
73 changes: 71 additions & 2 deletions src/js/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -672,6 +672,11 @@ class ChatLLM {
</div>
<div id="chatLLM_content">
<p><input type="text" id="chatLLM_input" class="form-control" name="chatLLM_input" aria-labelledby="chatLLM_title" size="50"></p>
<p class="LLM_suggestions">
<button type="button">What is the title?</button>
<button type="button">What are the high and low values?</button>
<button type="button">What is the general shape of the chart?</button>
</p>
<p><button type="button" id="chatLLM_submit">Submit</button></p>
</div>
</div>
Expand Down Expand Up @@ -744,6 +749,22 @@ class ChatLLM {
}
},
]);

// ChatLLM suggestion events
let suggestions = document.querySelectorAll(
'#chatLLM .LLM_suggestions button'
);
for (let i = 0; i < suggestions.length; i++) {
constants.events.push([
suggestions[i],
'click',
function (e) {
let text = e.target.innerHTML;
chatLLM.DisplayChatMessage('User', text);
chatLLM.Submit(text);
},
]);
}
}

/**
Expand All @@ -765,8 +786,16 @@ class ChatLLM {

let xhr = new XMLHttpRequest();

// start waiting sound
if (constants.sonifMode != 'off') {
chatLLM.WaitingSound(true);
}

if (constants.LLMDebugMode == 1) {
chatLLM.ProcessLLMResponse(this.fakeLLMResponseData());
// do the below with a 5 sec delay
setTimeout(function () {
chatLLM.ProcessLLMResponse(chatLLM.fakeLLMResponseData());
}, 5000);
} else {
fetch(url, {
method: 'POST',
Expand All @@ -781,18 +810,58 @@ class ChatLLM {
chatLLM.ProcessLLMResponse(data);
})
.catch((error) => {
chatLLM.WaitingSound(false);
console.error('Error:', error);
// also todo: handle errors somehow
});
}
}

/*
* Sets a waiting sound to play while waiting for the LLM to respond.
* @function
* @name SetWaitingSound
* @memberof module:constants
* @onoff {boolean} - Whether to turn the waiting sound on or off. Defaults to true (on).
* @returns {void}
*/
WaitingSound(onoff = true) {
// clear old intervals and timeouts
if (constants.waitingInterval) {
// destroy old waiting sound
clearInterval(constants.waitingInterval);
constants.waitingSound = null;
}
if (constants.waitingSoundOverride) {
clearTimeout(constants.waitingSoundOverride);
constants.waitingSoundOverride = null;
}

// assuming we're turning it on, start playing a new waiting sound
if (onoff) {
// create new waiting sound
let delay = 1000;
let freq = 440; // a440 babee
constants.waitingInterval = setInterval(function () {
if (audio) {
audio.playOscillator(freq, 0.2, 0);
}
}, delay);

// clear automatically after 30 sec, assuming no response
constants.waitingSoundOverride = setTimeout(function () {
chatLLM.WaitingSound(false);
}, 30000);
}
}

/**
* Processes the response from the LLM and displays it to the user.
* @function
* @returns {void}
*/
ProcessLLMResponse(data) {
chatLLM.WaitingSound(false);
console.log('LLM response: ', data);
let text = data.choices[0].message.content;
chatLLM.DisplayChatMessage('LLM', text);
Expand Down Expand Up @@ -869,7 +938,7 @@ class ChatLLM {
this.requestJson = {};
this.requestJson.model = 'gpt-4-vision-preview';
this.requestJson.max_tokens = constants.LLMmaxResponseTokens; // note: if this is too short (tested with less than 200), the response gets cut off
this.requestJson.detail = constants.LLMDetail;
//this.requestJson.detail = constants.LLMDetail;
this.requestJson.messages = [];
this.requestJson.messages[0] = {};
this.requestJson.messages[0].role = 'system';
Expand Down

0 comments on commit 6077593

Please sign in to comment.