Skip to content

Commit

Permalink
Run cargo fmt on project
Browse files Browse the repository at this point in the history
  • Loading branch information
vlovich committed Feb 13, 2025
1 parent 7a29ac4 commit 72c1255
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 7 deletions.
8 changes: 2 additions & 6 deletions examples/simple/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@ use anyhow::{anyhow, bail, Context, Result};
use clap::Parser;
use hf_hub::api::sync::ApiBuilder;
use llama_cpp_2::context::params::LlamaContextParams;
use llama_cpp_2::{ggml_time_us, send_logs_to_tracing, LogOptions};
use llama_cpp_2::llama_backend::LlamaBackend;
use llama_cpp_2::llama_batch::LlamaBatch;
use llama_cpp_2::model::params::kv_overrides::ParamOverrideValue;
use llama_cpp_2::model::params::LlamaModelParams;
use llama_cpp_2::model::LlamaModel;
use llama_cpp_2::model::{AddBos, Special};
use llama_cpp_2::sampling::LlamaSampler;
use llama_cpp_2::{ggml_time_us, send_logs_to_tracing, LogOptions};

use std::ffi::CString;
use std::io::Write;
Expand Down Expand Up @@ -67,11 +67,7 @@ struct Args {
help = "size of the prompt context (default: loaded from themodel)"
)]
ctx_size: Option<NonZeroU32>,
#[arg(
short = 'v',
long,
help = "enable verbose llama.cpp logs",
)]
#[arg(short = 'v', long, help = "enable verbose llama.cpp logs")]
verbose: bool,
}

Expand Down
3 changes: 2 additions & 1 deletion llama-cpp-2/src/log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ impl State {
} else {
let level = self
.previous_level
.load(std::sync::atomic::Ordering::Acquire) as llama_cpp_sys_2::ggml_log_level;
.load(std::sync::atomic::Ordering::Acquire)
as llama_cpp_sys_2::ggml_log_level;
tracing::warn!(
inferred_level = level,
text = text,
Expand Down

0 comments on commit 72c1255

Please sign in to comment.