Skip to content

Commit

Permalink
fix clippy
Browse files Browse the repository at this point in the history
Signed-off-by: zenghua <[email protected]>
  • Loading branch information
zenghua committed Feb 17, 2025
1 parent 4d31896 commit bf2917c
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 16 deletions.
12 changes: 6 additions & 6 deletions rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ impl CatalogProvider for LakeSoulCatalog {
#[cfg(test)]
mod tests {
use super::*;
use crate::{catalog::lakesoul_namespace::LakeSoulNamespace, LakeSoulQueryPlanner};
use crate::LakeSoulQueryPlanner;
use datafusion::{execution::{context::{SessionContext, SessionState}, runtime_env::RuntimeEnv}, prelude::SessionConfig};
use lakesoul_metadata::MetaDataClient;
use datafusion::arrow::util::pretty::print_batches;
Expand All @@ -157,19 +157,19 @@ mod tests {
ctx.register_catalog("LAKESOUL".to_string(), Arc::new(catalog));


// 创建测试用的namespace
let test_namespace = "test_namespace";
let schema = Arc::new(LakeSoulNamespace::new(client.clone(), ctx.clone(), test_namespace));
// // 创建测试用的namespace
// let test_namespace = "test_namespace";
// let schema = Arc::new(LakeSoulNamespace::new(client.clone(), ctx.clone(), test_namespace));
// catalog.register_schema(test_namespace, schema)?;

// 执行show tables命令
let sql = "SHOW CATALOGS";
// let sql = "SHOW CATALOGS";
let sql = "SHOW TABLES";
// let sql = "CREATE SCHEMA LAKESOUL.DEFAULT";
let df = ctx.sql(sql).await?;
// print_batches(&df.clone().explain(true, false)?.collect().await?);
let results = df.collect().await?;
print_batches(&results);
let _ = print_batches(&results);

// 验证结果
// assert!(!results.is_empty());
Expand Down
3 changes: 0 additions & 3 deletions rust/lakesoul-datafusion/src/datasource/table_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -445,15 +445,12 @@ impl TableProvider for LakeSoulTableProvider {
input: Arc<dyn ExecutionPlan>,
insert_op: InsertOp,
) -> Result<Arc<dyn ExecutionPlan>> {
let table_path = &self.table_paths()[0];
// Get the object store for the table path.
// let url = Url::parse(table_path.as_str()).unwrap();
// let _store = state.runtime_env().object_store(ObjectStoreUrl::parse(&url[..url::Position::BeforePath])?);
// dbg!(&_store);
let state = state.as_any().downcast_ref::<SessionState>().unwrap();

let file_format = self.options().format.as_ref();

// let file_type_writer_options = match &self.options().file_type_write_options {
// Some(opt) => opt.clone(),
// None => FileTypeWriterOptions::build_default(&file_format.file_type(), state.config_options())?,
Expand Down
4 changes: 2 additions & 2 deletions rust/lakesoul-datafusion/src/test/catalog_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@ mod catalog_tests {
use rand_chacha::ChaCha8Rng;
use std::env;
use std::sync::Arc;
use test_log::test;
use tokio::runtime::Runtime;
use tracing::debug;
use log::debug;

fn create_batch_i32(names: Vec<&str>, values: Vec<&[i32]>) -> RecordBatch {
let values = values
Expand Down Expand Up @@ -54,6 +53,7 @@ mod catalog_tests {
},
properties: serde_json::to_string(&LakeSoulTableProperty {
hash_bucket_num: Some(hash_bucket_num),
datafusion_properties: None,
})
.unwrap(),
comment: "this is comment".to_string(),
Expand Down
2 changes: 1 addition & 1 deletion rust/lakesoul-datafusion/src/test/insert_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ mod insert_tests {
) -> Result<()> {
let lakesoul_table = LakeSoulTable::for_name(table_name).await?;

let builder = create_io_config_builder(client, None, false, "default").await?;
let builder = create_io_config_builder(client, None, false, "default", Default::default(), Default::default()).await?;
let sess_ctx = create_session_context(&mut builder.clone().build())?;

let dataframe = lakesoul_table.to_dataframe(&sess_ctx).await?;
Expand Down
2 changes: 1 addition & 1 deletion rust/lakesoul-datafusion/src/test/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

use arrow::array::RecordBatch;
use std::sync::Arc;
use tracing::debug;
use log::debug;

use lakesoul_metadata::MetaDataClient;

Expand Down
4 changes: 2 additions & 2 deletions rust/lakesoul-datafusion/src/test/upsert_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1691,7 +1691,7 @@ mod upsert_with_metadata_tests {
) -> Result<()> {
let lakesoul_table = LakeSoulTable::for_name(table_name).await?;
lakesoul_table.execute_upsert(batch).await?;
let builder = create_io_config_builder(client, None, false, "default").await?;
let builder = create_io_config_builder(client, None, false, "default", Default::default(), Default::default()).await?;
let sess_ctx = create_session_context(&mut builder.clone().build())?;

let dataframe = lakesoul_table.to_dataframe(&sess_ctx).await?;
Expand Down Expand Up @@ -1746,7 +1746,7 @@ mod upsert_with_metadata_tests {
) -> Result<()> {
let lakesoul_table = LakeSoulTable::for_name(table_name).await?;
lakesoul_table.execute_upsert(batch).await?;
let builder = create_io_config_builder(client, None, false, "default").await?;
let builder = create_io_config_builder(client, None, false, "default", Default::default(), Default::default()).await?;
let sess_ctx = create_session_context(&mut builder.clone().build())?;

let dataframe = lakesoul_table.to_dataframe(&sess_ctx).await?;
Expand Down
1 change: 0 additions & 1 deletion rust/lakesoul-io/src/lakesoul_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ impl SyncSendableMutableLakeSoulWriter {
pub fn write_batch(&mut self, record_batch: RecordBatch) -> Result<()> {
let runtime = self.runtime.clone();

println!("at SyncSendableMutableLakeSoulWriter::write_batch\n{}", arrow_cast::pretty::pretty_format_batches(&[record_batch.clone()])?);
if record_batch.num_rows() == 0 {
runtime.block_on(async move { self.write_batch_async(record_batch, false).await })
} else {
Expand Down

0 comments on commit bf2917c

Please sign in to comment.