diff --git a/ui/app/api/chat-with-gemini-streaming-langchain/route.ts b/ui/app/api/chat-with-gemini-streaming-langchain/route.ts new file mode 100644 index 00000000..e875b278 --- /dev/null +++ b/ui/app/api/chat-with-gemini-streaming-langchain/route.ts @@ -0,0 +1,47 @@ +import { NextRequest, NextResponse } from "next/server"; +import { Message as VercelChatMessage, StreamingTextResponse } from "ai"; +import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; +import { BytesOutputParser } from "langchain/schema/output_parser"; +import { PromptTemplate } from "langchain/prompts"; + +export const runtime = "edge"; + +const formatMessage = (message: VercelChatMessage) => { + return `${message.role}: ${message.content}`; +}; + +const TEMPLATE = ` +Current conversation: +{chat_history} + +User: {input} +AI:`; + +export async function POST(req: NextRequest) { + + + try { + const body = await req.json(); + const messages = body.messages ?? []; + const formattedPreviousMessages = messages.slice(0, -1).map(formatMessage); + const currentMessageContent = messages[messages.length - 1].content; + const prompt = PromptTemplate.fromTemplate(TEMPLATE); + + + const model = new ChatGoogleGenerativeAI({ + temperature: 0.8, + }); + + const outputParser = new BytesOutputParser(); + const chain = prompt.pipe(model).pipe(outputParser); + + const stream = await chain.stream({ + chat_history: formattedPreviousMessages.join("\n"), + input: currentMessageContent, + }); + + return new StreamingTextResponse(stream); + } catch (e: any) { + return NextResponse.json({ error: e.message }, { status: 500 }); + } +} diff --git a/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx b/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx new file mode 100644 index 00000000..8b3bcbdc --- /dev/null +++ b/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx @@ -0,0 +1,76 @@ +import React, { useState, useEffect } from 'react'; +import { IoSend } from 'react-icons/io5'; +import { useChat } from 'ai/react'; + +export const ChatWithOpenAIStreaming = () => { + const [inputValue, setInputValue] = useState(''); + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: '/api/chat-with-gemini-streaming-langchain' + }); + + const [loading, setLoading] = useState(false); + + const latestAssistantResponse = messages + .filter((m) => m.role === 'assistant') + .map((m) => m.content) + .pop(); + + const handleFormSubmit = async (event) => { + event.preventDefault(); + setLoading(true); + + try { + await handleSubmit(event); + } finally { + setLoading(false); + } + }; + + useEffect(() => { + if (latestAssistantResponse !== undefined) { + setInputValue(''); + } + }, [latestAssistantResponse]); + + return ( +
+
+
+ { + setInputValue(e.target.value); + handleInputChange(e); + }} + placeholder="Ask anything..." + className="focus:shadow-outline w-full rounded-full border border-gray-400 py-2 pl-4 pr-10 focus:outline-none" + onKeyDown={(e) => { + if (e.key === 'Enter') handleFormSubmit(e); + }} + /> + +
+
+
+ {loading ? ( + Generating... + ) : latestAssistantResponse ? ( + latestAssistantResponse + ) : ( +

Output here...

+ )} +
+
+ ); +}; + +export default ChatWithOpenAIStreaming; diff --git a/ui/app/components/stacks/kushagra-stack.tsx b/ui/app/components/stacks/kushagra-stack.tsx deleted file mode 100644 index 2fcfd8c3..00000000 --- a/ui/app/components/stacks/kushagra-stack.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import { useState } from 'react'; -import { IoSend } from 'react-icons/io5'; -import ReactMarkdown from 'react-markdown'; - -export const ChatWithOpenAIStreaming = () => { - const [inputValue, setInputValue] = useState(''); - const [output, setOutput] = useState(''); - const [loading, setLoading] = useState(false); - - const handleSubmit = async (event: React.FormEvent) => { - event.preventDefault(); - console.log('Submitting:', inputValue); - if (inputValue.trim()) { - setOutput(''); - setLoading(true); - - const response = await fetch('/api/boilerplate-basic', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ input: inputValue }), - }); - const data = await response.json(); - console.log('data', data); - setOutput(data.output); - setLoading(false); - } - }; - return ( -
-
-
- setInputValue(e.target.value)} - placeholder="Ask anything..." - className="focus:shadow-outline w-full rounded-full border border-gray-400 py-2 pl-4 pr-10 focus:outline-none" - onKeyDown={(e) => { - if (e.key === 'Enter') - handleSubmit(e as unknown as React.FormEvent); - }} - /> - -
-
-
- {loading ? ( - Generating... - ) : output ? ( - {output} - ) : ( -

Output here...

- )} -
-
- ); -}; - -export default ChatWithOpenAIStreaming; diff --git a/ui/public/stack-pictures/chat-with-gemini-streaming-langchain.png b/ui/public/stack-pictures/chat-with-gemini-streaming-langchain.png new file mode 100644 index 00000000..cbfd0035 Binary files /dev/null and b/ui/public/stack-pictures/chat-with-gemini-streaming-langchain.png differ