diff --git a/ui/app/api/chat-with-gemini-streaming-langchain/route.ts b/ui/app/api/chat-with-gemini-streaming-langchain/route.ts new file mode 100644 index 00000000..e875b278 --- /dev/null +++ b/ui/app/api/chat-with-gemini-streaming-langchain/route.ts @@ -0,0 +1,47 @@ +import { NextRequest, NextResponse } from "next/server"; +import { Message as VercelChatMessage, StreamingTextResponse } from "ai"; +import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; +import { BytesOutputParser } from "langchain/schema/output_parser"; +import { PromptTemplate } from "langchain/prompts"; + +export const runtime = "edge"; + +const formatMessage = (message: VercelChatMessage) => { + return `${message.role}: ${message.content}`; +}; + +const TEMPLATE = ` +Current conversation: +{chat_history} + +User: {input} +AI:`; + +export async function POST(req: NextRequest) { + + + try { + const body = await req.json(); + const messages = body.messages ?? []; + const formattedPreviousMessages = messages.slice(0, -1).map(formatMessage); + const currentMessageContent = messages[messages.length - 1].content; + const prompt = PromptTemplate.fromTemplate(TEMPLATE); + + + const model = new ChatGoogleGenerativeAI({ + temperature: 0.8, + }); + + const outputParser = new BytesOutputParser(); + const chain = prompt.pipe(model).pipe(outputParser); + + const stream = await chain.stream({ + chat_history: formattedPreviousMessages.join("\n"), + input: currentMessageContent, + }); + + return new StreamingTextResponse(stream); + } catch (e: any) { + return NextResponse.json({ error: e.message }, { status: 500 }); + } +} diff --git a/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx b/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx new file mode 100644 index 00000000..8b3bcbdc --- /dev/null +++ b/ui/app/components/stacks/chat-with-gemini-streaming-langchain.tsx @@ -0,0 +1,76 @@ +import React, { useState, useEffect } from 'react'; +import { IoSend } from 'react-icons/io5'; +import { useChat } from 'ai/react'; + +export const ChatWithOpenAIStreaming = () => { + const [inputValue, setInputValue] = useState(''); + const { messages, input, handleInputChange, handleSubmit } = useChat({ + api: '/api/chat-with-gemini-streaming-langchain' + }); + + const [loading, setLoading] = useState(false); + + const latestAssistantResponse = messages + .filter((m) => m.role === 'assistant') + .map((m) => m.content) + .pop(); + + const handleFormSubmit = async (event) => { + event.preventDefault(); + setLoading(true); + + try { + await handleSubmit(event); + } finally { + setLoading(false); + } + }; + + useEffect(() => { + if (latestAssistantResponse !== undefined) { + setInputValue(''); + } + }, [latestAssistantResponse]); + + return ( +
Output here...
+ )} +Output here...
- )} -