Skip to content

Commit

Permalink
Merge pull request #19 from lkonga/feature/support-openrouter
Browse files Browse the repository at this point in the history
feat: add OpenRouter AI provider support
  • Loading branch information
jnsahaj authored Nov 16, 2024
2 parents 14c8807 + c20307e commit 0cbef5c
Show file tree
Hide file tree
Showing 4 changed files with 110 additions and 2 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ lumen draft
lumen draft --context "match brand guidelines"
# Output: "feat(button.tsx): Update button color to align with brand identity"
```
To summarise a commit, pass in its SHA-1
To summarise a commit, pass in its SHA-1
```zsh
lumen explain HEAD
lumen explain cc50651f
Expand All @@ -72,7 +72,7 @@ lumen explain HEAD~2 --query "how can this be improved?"

AI Provider can be configured by using CLI arguments or Environment variables (see also: [Advanced Configuration](#advanced-configuration)).
```sh
-p, --provider <PROVIDER> [env: LUMEN_AI_PROVIDER] [default: phind] [possible values: openai, phind, groq, claude, ollama]
-p, --provider <PROVIDER> [env: LUMEN_AI_PROVIDER] [default: phind] [possible values: openai, phind, groq, claude, ollama, openrouter]
-k, --api-key <API_KEY> [env: LUMEN_API_KEY]
-m, --model <MODEL> [env: LUMEN_AI_MODEL]

Expand All @@ -90,6 +90,7 @@ AI Provider can be configured by using CLI arguments or Environment variables (s
| [OpenAI](https://platform.openai.com/docs/guides/text-generation/chat-completions-api) `openai` | Yes | `gpt-4o`, `gpt-4o-mini`, `gpt-4`, `gpt-3.5-turbo` (default: `gpt-4o-mini`) |
| [Claude](https://claude.ai/new) `claude` | Yes | [see list](https://docs.anthropic.com/en/docs/about-claude/models#model-names) (default: `claude-3-5-sonnet-20241022`) | |
| [Ollama](https://github.com/ollama/ollama) `ollama` | No (local) | [see list](https://github.com/ollama/ollama/blob/main/docs/api.md#model-names) (required) | |
| [OpenRouter](https://openrouter.ai/) `openrouter` | Yes | [see list](https://openrouter.ai/models) (default: `anthropic/claude-3.5-sonnet`) | |


# Installation 🔅
Expand Down
2 changes: 2 additions & 0 deletions src/config/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ pub enum ProviderType {
Groq,
Claude,
Ollama,
Openrouter
}

impl FromStr for ProviderType {
Expand All @@ -42,6 +43,7 @@ impl FromStr for ProviderType {
"groq" => Ok(ProviderType::Groq),
"claude" => Ok(ProviderType::Claude),
"ollama" => Ok(ProviderType::Ollama),
"openrouter" => Ok(ProviderType::Openrouter),
_ => Err(format!("Unknown provider: {}", s)),
}
}
Expand Down
13 changes: 13 additions & 0 deletions src/provider/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use claude::{ClaudeConfig, ClaudeProvider};
use groq::{GroqConfig, GroqProvider};
use ollama::{OllamaConfig, OllamaProvider};
use openai::{OpenAIConfig, OpenAIProvider};
use openrouter::{OpenRouterConfig, OpenRouterProvider};
use phind::{PhindConfig, PhindProvider};
use thiserror::Error;
use crate::config::cli::ProviderType;
Expand All @@ -17,6 +18,7 @@ pub mod claude;
pub mod groq;
pub mod ollama;
pub mod openai;
pub mod openrouter;
pub mod phind;

#[async_trait]
Expand Down Expand Up @@ -48,6 +50,7 @@ pub enum LumenProvider {
Groq(Box<GroqProvider>),
Claude(Box<ClaudeProvider>),
Ollama(Box<OllamaProvider>),
OpenRouter(Box<OpenRouterProvider>),
}

impl LumenProvider {
Expand Down Expand Up @@ -86,6 +89,13 @@ impl LumenProvider {
let provider = LumenProvider::Ollama(Box::new(OllamaProvider::new(client, config)));
Ok(provider)
}
ProviderType::Openrouter => {
let api_key = api_key.ok_or(LumenError::MissingApiKey("OpenRouter".to_string()))?;
let config = OpenRouterConfig::new(api_key, model);
let provider =
LumenProvider::OpenRouter(Box::new(OpenRouterProvider::new(client, config)));
Ok(provider)
}
}
}

Expand All @@ -97,8 +107,10 @@ impl LumenProvider {
LumenProvider::Groq(provider) => provider.complete(prompt).await,
LumenProvider::Claude(provider) => provider.complete(prompt).await,
LumenProvider::Ollama(provider) => provider.complete(prompt).await,
LumenProvider::OpenRouter(provider) => provider.complete(prompt).await,
}
}

pub async fn draft(&self, command: &DraftCommand) -> Result<String, ProviderError> {
let prompt = AIPrompt::build_draft_prompt(command)?;
match self {
Expand All @@ -107,6 +119,7 @@ impl LumenProvider {
LumenProvider::Groq(provider) => provider.complete(prompt).await,
LumenProvider::Claude(provider) => provider.complete(prompt).await,
LumenProvider::Ollama(provider) => provider.complete(prompt).await,
LumenProvider::OpenRouter(provider) => provider.complete(prompt).await,
}
}
}
92 changes: 92 additions & 0 deletions src/provider/openrouter.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
use super::{AIProvider, ProviderError};
use crate::ai_prompt::AIPrompt;
use async_trait::async_trait;
use reqwest::StatusCode;
use serde_json::{json, Value};

#[derive(Clone)]
pub struct OpenRouterConfig {
api_key: String,
model: String,
api_base_url: String,
}

impl OpenRouterConfig {
pub fn new(api_key: String, model: Option<String>) -> Self {
Self {
api_key,
model: model.unwrap_or_else(|| "anthropic/claude-3.5-sonnet".to_string()),
api_base_url: "https://openrouter.ai/api/v1/chat/completions".to_string(),
}
}
}

pub struct OpenRouterProvider {
client: reqwest::Client,
config: OpenRouterConfig,
}

impl OpenRouterProvider {
pub fn new(client: reqwest::Client, config: OpenRouterConfig) -> Self {
Self { client, config }
}

async fn complete(&self, prompt: AIPrompt) -> Result<String, ProviderError> {
let payload = json!({
"model": self.config.model,
"messages": [
{
"role": "system",
"content": prompt.system_prompt
},
{
"role": "user",
"content": prompt.user_prompt
}
]
});

let response = self
.client
.post(&self.config.api_base_url)
.header("Authorization", format!("Bearer {}", self.config.api_key))
.header("HTTP-Referer", "https://github.com/jnsahaj/lumen")
.header("X-Title", "Lumen CLI")
.json(&payload)
.send()
.await?;

let status = response.status();
match status {
StatusCode::OK => {
let response_json: Value = response.json().await?;
let content = response_json
.get("choices")
.and_then(|choices| choices.get(0))
.and_then(|choice| choice.get("message"))
.and_then(|message| message.get("content"))
.and_then(|content| content.as_str())
.ok_or(ProviderError::NoCompletionChoice)?;

Ok(content.to_string())
}
_ => {
let error_json: Value = response.json().await?;
let error_message = error_json
.get("error")
.and_then(|error| error.get("message"))
.and_then(|msg| msg.as_str())
.ok_or(ProviderError::UnexpectedResponse)?
.into();
Err(ProviderError::APIError(status, error_message))
}
}
}
}

#[async_trait]
impl AIProvider for OpenRouterProvider {
async fn complete(&self, prompt: AIPrompt) -> Result<String, ProviderError> {
self.complete(prompt).await
}
}

0 comments on commit 0cbef5c

Please sign in to comment.