A provider for Vercel's AI SDK that enables seamless integration with Orama's search and chat capabilities.
- 🔍 Full-text, vector, and hybrid search
- 💬 Streaming chat/QA functionality
- 🚀 Framework agnostic
- 🔄 Real-time streaming responses
npm install @oramacloud/ai-sdk-provider
// Create an Orama provider instance
const provider = oramaProvider({
// Required configurations
endpoint: process.env.ORAMA_API_URL,
apiKey: process.env.ORAMA_API_KEY,
// Optional configurations
userContext?: string | Record<string, any>; // Context for QA sessions
inferenceType?: "documentation"; // Currently only supports "documentation"
searchMode?: "fulltext" | "vector" | "hybrid"; // Default: "fulltext"
searchOptions?: OramaSearchOptions; // Additional search parameters
})
interface OramaSearchOptions {
mode?: "fulltext" | "vector" | "hybrid";
where?: Record<string, any>;
sortBy?: Array<{ property: string; order?: "asc" | "desc" }>;
facets?: Record<string, any>;
limit?: number;
boost?: Record<string, number>;
order?: "asc" | "desc";
}
import { streamText } from 'ai';
const response = await streamText({
model: provider.ask(),
messages: [{
role: 'user',
content: 'What is vector search?'
}]
});
const response = await generateText({
model: provider.search(),
messages: [{
role: 'user',
content: 'vector search documentation'
}]
});
Contributions are welcome! Please feel free to submit a Pull Request.
Apache 2.0. Read the full license here