Skip to content

Commit

Permalink
clean prompt
Browse files Browse the repository at this point in the history
fix show text bug
  • Loading branch information
JYC0413 committed Mar 17, 2024
1 parent 700f927 commit db9838c
Show file tree
Hide file tree
Showing 4 changed files with 116 additions and 111 deletions.
189 changes: 102 additions & 87 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -57,26 +57,89 @@ export const Chat = memo(({stopConversationRef}: Props) => {
const chatContainerRef = useRef<HTMLDivElement>(null);
const textareaRef = useRef<HTMLTextAreaElement>(null);

function delay(time: number | undefined) {
let textList: string[] = [];
let text: string = "";
let isFirst: boolean = true;
let queryDone: boolean = false;
let showDone: boolean = true;
let updatedConversation: Conversation;


async function delay(time: number | undefined) {
return new Promise(resolve => setTimeout(resolve, time));
}

const handleSend = useCallback(
async (message: Message, deleteCount = 0, plugin: Plugin | null = null) => {
let formatBase64Message
if (typeof message.content !== "string") {
formatBase64Message = {
...message, content: message.content.map(item => {
if (item.type === "image_url" && item.image_url && item.image_url.url.startsWith("data:image/")) {
return {...item, image_url: {url: item.image_url.url.split(",")[1]}}
} else {
return item
}
})
async function showText() {
if (showDone && selectedConversation && textList.length > 0) {
showDone = false;
for (let i = 0; i < textList.length; i++) {
text += textList.shift();
text = text.replace(/<\|.*?\|>/g, "")
if (isFirst) {
isFirst = false;
homeDispatch({field: 'loading', value: false});
const updatedMessages: Message[] = [
...updatedConversation.messages,
{role: 'assistant', content: text || ""},
];
updatedConversation = {
...updatedConversation,
messages: updatedMessages,
};
homeDispatch({
field: 'selectedConversation',
value: updatedConversation,
});
} else {
const updatedMessages: Message[] =
updatedConversation.messages.map((message, index) => {
if (index === updatedConversation.messages.length - 1) {
return {
...message,
content: text,
};
}
return message;
});
updatedConversation = {
...updatedConversation,
messages: updatedMessages,
};
saveConversation(updatedConversation);
homeDispatch({
field: 'selectedConversation',
value: updatedConversation,
});
const updatedConversations: Conversation[] = conversations.map(
(conversation) => {
if (conversation.id === selectedConversation.id) {
return updatedConversation;
}
return conversation;
},
);
if (updatedConversations.length === 0) {
updatedConversations.push(updatedConversation);
}
homeDispatch({
field: 'conversations',
value: updatedConversations
});
saveConversations(updatedConversations);
if (queryDone) {
await delay(50)
} else {
await delay(textList.length > 10 ? 300 : 100)
}
}
}
showDone = true;
}
}

const handleSend = useCallback(
async (message: Message, deleteCount = 0, plugin: Plugin | null = null) => {
if (selectedConversation) {
let updatedConversation: Conversation;
let sandMessages: Message[];
if (deleteCount) {
const updatedMessages = [...selectedConversation.messages];
Expand All @@ -87,13 +150,11 @@ export const Chat = memo(({stopConversationRef}: Props) => {
...selectedConversation,
messages: [...updatedMessages, message],
};
sandMessages = [...updatedMessages, formatBase64Message || message];
} else {
updatedConversation = {
...selectedConversation,
messages: [...selectedConversation.messages, message],
};
sandMessages = [...selectedConversation.messages, formatBase64Message || message];
if (selectedConversation.messages.length === 0) {
updatedConversation = {
...updatedConversation,
Expand Down Expand Up @@ -150,14 +211,14 @@ export const Chat = memo(({stopConversationRef}: Props) => {
if (isStream) {
let response: ReadableStream<Uint8Array> | null = await ChatStream(model, promptToSend, temperatureToUse, api, key, messagesToSend);
if (response) {
let queryDone = false;
let isFirst: boolean = true;
let notFinishData = "";
let text = '';
const decoder = new TextDecoder();
const reader = response.getReader();
while (!queryDone) {
const {value} = await reader.read();
while (!queryDone || textList.length > 0) {
const {value, done} = await reader.read();
if (done) {
queryDone = true;
}
let chunkValue = decoder.decode(value);
if (chunkValue) {
const parts = chunkValue.split('\n\n');
Expand All @@ -166,22 +227,23 @@ export const Chat = memo(({stopConversationRef}: Props) => {
part = part.trim();
if (part.startsWith('data: ')) {
part = part.substring(6).trim();
if (part === "[DONE]") {
queryDone = true;
} else {
if (!part.startsWith('{')) {
if (notFinishData) {
part = notFinishData + part
notFinishData = ""
} else {
isError = true
}
} else if (!part.endsWith('}')) {
notFinishData = part
}
if (part === "[DONE]") {
queryDone = true;
} else {
if (!part.startsWith('{')) {
if (notFinishData) {
part = notFinishData + part
notFinishData = ""
} else {
isError = true
}
} else if (!part.endsWith('}')) {
notFinishData = part
isError = true
}
}

if (!isError && !queryDone) {
try {
if (part) {
Expand All @@ -190,59 +252,7 @@ export const Chat = memo(({stopConversationRef}: Props) => {
obj["choices"].forEach((obj1: { [x: string]: { [x: string]: any; }; }) => {
if (obj1) {
if (obj1["delta"] && obj1["delta"]["content"]) {
text += obj1["delta"]["content"];
if (isFirst) {
isFirst = false;
homeDispatch({field: 'loading', value: false});
const updatedMessages: Message[] = [
...updatedConversation.messages,
{role: 'assistant', content: text},
];
updatedConversation = {
...updatedConversation,
messages: updatedMessages,
};
homeDispatch({
field: 'selectedConversation',
value: updatedConversation,
});
} else {
const updatedMessages: Message[] =
updatedConversation.messages.map((message, index) => {
if (index === updatedConversation.messages.length - 1) {
return {
...message,
content: text,
};
}
return message;
});
updatedConversation = {
...updatedConversation,
messages: updatedMessages,
};
saveConversation(updatedConversation);
homeDispatch({
field: 'selectedConversation',
value: updatedConversation,
});
const updatedConversations: Conversation[] = conversations.map(
(conversation) => {
if (conversation.id === selectedConversation.id) {
return updatedConversation;
}
return conversation;
},
);
if (updatedConversations.length === 0) {
updatedConversations.push(updatedConversation);
}
homeDispatch({
field: 'conversations',
value: updatedConversations
});
saveConversations(updatedConversations);
}
textList.push(obj1["delta"]["content"]);
}
}
})
Expand All @@ -254,9 +264,15 @@ export const Chat = memo(({stopConversationRef}: Props) => {
}
});
}
if (showDone) {
await showText();
}
}
homeDispatch({field: 'messageIsStreaming', value: false});
controller.abort();
} else {
homeDispatch({field: 'loading', value: false});
homeDispatch({field: 'messageIsStreaming', value: false});
}
} else {
let response = await ChatWithoutStream(model, promptToSend, temperatureToUse, api, key, messagesToSend);
Expand All @@ -270,7 +286,6 @@ export const Chat = memo(({stopConversationRef}: Props) => {
...updatedConversation,
messages: updatedMessages,
};
console.log("updatedMessages")

homeDispatch({
field: 'selectedConversation',
Expand Down
2 changes: 0 additions & 2 deletions components/Chat/SystemPrompt.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ import {FC, KeyboardEvent, useCallback, useEffect, useRef, useState,} from 'reac

import {useTranslation} from 'next-i18next';

import {DEFAULT_SYSTEM_PROMPT} from '@/utils/app/const';

import {Conversation} from '@/types/chat';
import {Prompt} from '@/types/prompt';

Expand Down
14 changes: 1 addition & 13 deletions utils/app/clean.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Conversation } from '@/types/chat';
import { OpenAIModelID, OpenAIModels } from '@/types/openai';

import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from './const';
import { DEFAULT_TEMPERATURE } from './const';

export const cleanSelectedConversation = (conversation: Conversation) => {
// added model for each conversation (3/20/23)
Expand All @@ -20,14 +20,6 @@ export const cleanSelectedConversation = (conversation: Conversation) => {
};
}

// check for system prompt on each conversation
if (!updatedConversation.prompt) {
updatedConversation = {
...updatedConversation,
prompt: updatedConversation.prompt || DEFAULT_SYSTEM_PROMPT,
};
}

if (!updatedConversation.temperature) {
updatedConversation = {
...updatedConversation,
Expand Down Expand Up @@ -70,10 +62,6 @@ export const cleanConversationHistory = (history: any[]): Conversation[] => {
conversation.model = OpenAIModels[OpenAIModelID.GPT_3_5];
}

if (!conversation.prompt) {
conversation.prompt = DEFAULT_SYSTEM_PROMPT;
}

if (!conversation.temperature) {
conversation.temperature = DEFAULT_TEMPERATURE;
}
Expand Down
22 changes: 13 additions & 9 deletions utils/server/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import {Message} from '@/types/chat';
import {OpenAIModel} from '@/types/openai';

import {OPENAI_API_TYPE} from '../app/const';

export class OpenAIError extends Error {
type: string;
param: string;
Expand All @@ -25,7 +23,19 @@ export const ChatStream = async (
key: string,
messages: Message[]
) => {
let finalMessage
let queryUrl = `${api}/v1/chat/completions`;
if (systemPrompt) {
finalMessage = [
{
role: 'system',
content: systemPrompt,
},
...messages
]
} else {
finalMessage = messages;
}
const res = await fetch(queryUrl, {
headers: {
'accept': "application/json",
Expand All @@ -34,13 +44,7 @@ export const ChatStream = async (
method: 'POST',
body: JSON.stringify({
model: model.id,
messages: [
{
role: 'system',
content: systemPrompt,
},
...messages,
],
messages: finalMessage,
stream: true
}),
});
Expand Down

0 comments on commit db9838c

Please sign in to comment.