Skip to content

Commit

Permalink
Merge pull request #96 from firstbatchxyz/erhant/peer-print-and-opena…
Browse files Browse the repository at this point in the history
…i-check

openai checks & better error report & update libp2p
  • Loading branch information
erhant authored Aug 19, 2024
2 parents e63fd18 + 010d3d7 commit 3fe904d
Show file tree
Hide file tree
Showing 8 changed files with 223 additions and 82 deletions.
52 changes: 26 additions & 26 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 2 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ edition = "2021"
license = "Apache-2.0"
readme = "README.md"

# profiling build for flamegraphs
[profile.profiling]
inherits = "release"
debug = true
Expand Down Expand Up @@ -43,7 +44,7 @@ fastbloom-rs = "0.5.9"
ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "25467d2" }

# peer-to-peer
libp2p = { git = "https://github.com/anilaltuner/rust-libp2p.git", rev = "84b6d6f", features = [
libp2p = { git = "https://github.com/anilaltuner/rust-libp2p.git", rev = "c5cefa1", features = [
"dcutr",
"ping",
"relay",
Expand All @@ -65,9 +66,6 @@ tracing = { version = "0.1.40" }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
public-ip = "0.2.2"

# TODO: solves ecies dependency issue
# getrandom = "0.2.15"


[dev-dependencies]
colored = "2.1.0"
Expand Down
21 changes: 12 additions & 9 deletions src/config/mod.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
mod models;
mod ollama;
mod openai;

use crate::utils::crypto::to_address;
use libsecp256k1::{PublicKey, SecretKey};
use models::ModelConfig;
use ollama::OllamaConfig;
use ollama_workflows::ModelProvider;
use openai::OpenAIConfig;

use std::env;

Expand All @@ -26,6 +28,8 @@ pub struct DriaComputeNodeConfig {
/// Even if Ollama is not used, we store the host & port here.
/// If Ollama is used, this config will be respected during its instantiations.
pub ollama_config: OllamaConfig,
/// OpenAI API key & its service check implementation.
pub openai_config: OpenAIConfig,
}

/// The default P2P network listen address.
Expand Down Expand Up @@ -88,16 +92,15 @@ impl DriaComputeNodeConfig {
let p2p_listen_addr =
env::var("DKN_P2P_LISTEN_ADDR").unwrap_or(DEFAULT_P2P_LISTEN_ADDR.to_string());

let ollama_config = OllamaConfig::new();

Self {
admin_public_key,
secret_key,
public_key,
address,
model_config,
p2p_listen_addr,
ollama_config,
ollama_config: OllamaConfig::new(),
openai_config: OpenAIConfig::new(),
}
}

Expand All @@ -120,12 +123,12 @@ impl DriaComputeNodeConfig {

// if OpenAI is a provider, check that the API key is set
if unique_providers.contains(&ModelProvider::OpenAI) {
log::info!("Checking OpenAI requirements");
const OPENAI_API_KEY: &str = "OPENAI_API_KEY";

if std::env::var(OPENAI_API_KEY).is_err() {
return Err("OpenAI API key not found".into());
}
let openai_models = self
.model_config
.get_models_for_provider(ModelProvider::OpenAI);
self.openai_config
.check(openai_models.into_iter().map(|m| m.to_string()).collect())
.await?;
}

Ok(())
Expand Down
3 changes: 2 additions & 1 deletion src/config/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl OllamaConfig {

let auto_pull = std::env::var("OLLAMA_AUTO_PULL").unwrap_or_default() == "true";

OllamaConfig {
Self {
host,
port,
hardcoded_models,
Expand Down Expand Up @@ -109,6 +109,7 @@ impl OllamaConfig {
}
}

log::info!("Ollama setup is all good.",);
Ok(())
}
}
Loading

0 comments on commit 3fe904d

Please sign in to comment.