Skip to content

Commit

Permalink
add /api/ai/suggests and SuggestByCurrentLocation component
Browse files Browse the repository at this point in the history
  • Loading branch information
yuiseki committed Jan 7, 2024
1 parent 5e42fe6 commit 763f40c
Show file tree
Hide file tree
Showing 11 changed files with 558 additions and 125 deletions.
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
{
"cSpell.words": [
"Afficher",
"ambassades",
"appname",
"arcgis",
"Bienvenue",
Expand All @@ -21,6 +23,7 @@
"kwargs",
"landuse",
"langchain",
"Liban",
"lightgreen",
"lightpink",
"lightyellow",
Expand Down
2 changes: 1 addition & 1 deletion src/app/api/ai/deep/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { NextResponse } from "next/server";
import { OpenAI, OpenAIChat } from "langchain/llms/openai";
import { OpenAIChat } from "langchain/llms/openai";
import { loadTridentDeepChain } from "@/utils/langchain/chains/deep";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";

Expand Down
45 changes: 45 additions & 0 deletions src/app/api/ai/suggests/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import { NextResponse } from "next/server";
import { OpenAIChat } from "langchain/llms/openai";
import { loadTridentSuggestChain } from "@/utils/langchain/chains/suggest";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";

export async function POST(request: Request) {
const res = await request.json();
const query = res.query;

let embeddings: OpenAIEmbeddings;
let llm: OpenAIChat;
if (process.env.CLOUDFLARE_AI_GATEWAY) {
embeddings = new OpenAIEmbeddings({
configuration: {
baseURL: process.env.CLOUDFLARE_AI_GATEWAY + "/openai",
},
});
llm = new OpenAIChat({
configuration: {
baseURL: process.env.CLOUDFLARE_AI_GATEWAY + "/openai",
},
temperature: 0,
});
} else {
embeddings = new OpenAIEmbeddings();
llm = new OpenAIChat({ temperature: 0 });
}

const chain = await loadTridentSuggestChain({ embeddings, llm });
const result = await chain.call({ input: query });

console.log("----- ----- -----");
console.log("----- start suggest -----");
console.log("Human:", query);
console.log("AI:", result.text);
console.log("");

console.log("----- end suggest -----");
console.log("----- ----- -----");

return NextResponse.json({
query: query,
suggests: result.text,
});
}
Loading

1 comment on commit 763f40c

@vercel
Copy link

@vercel vercel bot commented on 763f40c Jan 7, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.