Skip to content

Commit

Permalink
Support CREATE TABLE in optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
PakhomovAlexander committed Jan 22, 2025
1 parent 6cf8099 commit c1280f4
Show file tree
Hide file tree
Showing 6 changed files with 114 additions and 59 deletions.
4 changes: 3 additions & 1 deletion db/src/analyzer/tree.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
use std::fmt::Display;

use strum::Display;

use crate::types::ColType;

//#[allow(clippy::unused)]
#[allow(dead_code)]
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone, Display)]
pub enum Operator {
Project {
columns: Vec<Column>,
Expand Down
20 changes: 10 additions & 10 deletions db/src/catalog/catalog.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use std::collections::HashMap;

use super::types::{ColumnId, ColumnSchema, DataType, TableId, TableSchema};
use crate::types::ColType;

use super::types::{ColumnId, ColumnSchema, TableId, TableSchema};

pub struct Catalog {
store: MemoryCatalogStore,
Expand Down Expand Up @@ -91,7 +93,7 @@ impl TableSchemaBuilder {
self
}

pub fn col(&mut self, col_name: &str, data_type: DataType) -> &mut Self {
pub fn col(&mut self, col_name: &str, data_type: ColType) -> &mut Self {
self.columns.push(ColumnSchema::new(
&ColumnId::new(self.id.as_ref().unwrap(), col_name),
data_type,
Expand All @@ -107,8 +109,6 @@ impl TableSchemaBuilder {

#[cfg(test)]
mod tests {
use crate::catalog::types::DataType;

use super::*;

fn samle_schema() -> TableSchema {
Expand All @@ -117,11 +117,11 @@ mod tests {
vec![
ColumnSchema::new(
&ColumnId::new(&TableId::public("table1"), "col1"),
DataType::Int,
ColType::Int,
),
ColumnSchema::new(
&ColumnId::new(&TableId::public("table1"), "col2"),
DataType::String,
ColType::Text,
),
],
)
Expand Down Expand Up @@ -214,8 +214,8 @@ mod tests {
fn builder() {
let table = TableSchemaBuilder::public()
.table("table1")
.col("col1", DataType::Int)
.col("col2", DataType::String)
.col("col1", ColType::Int)
.col("col2", ColType::Text)
.build();

assert_eq!(
Expand All @@ -225,11 +225,11 @@ mod tests {
vec![
ColumnSchema::new(
&ColumnId::new(&TableId::public("table1"), "col1"),
DataType::Int,
ColType::Int,
),
ColumnSchema::new(
&ColumnId::new(&TableId::public("table1"), "col2"),
DataType::String,
ColType::Text,
),
],
)
Expand Down
12 changes: 4 additions & 8 deletions db/src/catalog/types.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use crate::types::ColType;

#[derive(Debug, PartialEq, Hash, Eq, Clone)]
pub struct TableId {
schema_name: String,
Expand Down Expand Up @@ -90,20 +92,14 @@ impl TableSchema {
#[derive(Debug, PartialEq, Clone)]
pub struct ColumnSchema {
pub id: ColumnId,
pub data_type: DataType,
pub data_type: ColType,
}

impl ColumnSchema {
pub fn new(id: &ColumnId, data_type: DataType) -> ColumnSchema {
pub fn new(id: &ColumnId, data_type: ColType) -> ColumnSchema {
ColumnSchema {
id: id.clone(),
data_type,
}
}
}

#[derive(Debug, PartialEq, Clone)]
pub enum DataType {
Int,
String,
}
26 changes: 15 additions & 11 deletions db/src/embedded/mod.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
use std::rc::Rc;
use std::{cell::RefCell, rc::Rc};

use crate::{
analyzer::Analyzer,
catalog::{types::DataType, Catalog, TableSchemaBuilder},
optimizer::{types::StorageEngine, types::Tuple, types::Val, Optimizer},
catalog::{Catalog, TableSchemaBuilder},
optimizer::{
types::{StorageEngine, Tuple, Val},
Optimizer,
},
parser::{Lexer, Parser},
types::ColType,
};

pub struct Db {
catalog_rc: Rc<Catalog>,
storage_rc: Rc<StorageEngine>,
catalog_rc: Rc<RefCell<Catalog>>,
storage_rc: Rc<RefCell<StorageEngine>>,
analyzer: Analyzer,
optimizer: Optimizer,
}
Expand All @@ -27,9 +31,9 @@ impl Db {

let ts = TableSchemaBuilder::public()
.table("table1")
.col("name", DataType::Int)
.col("address", DataType::Int)
.col("email", DataType::Int)
.col("name", ColType::Int)
.col("address", ColType::Int)
.col("email", ColType::Int)
.build();
let _ = catalog.register_table(&ts);

Expand Down Expand Up @@ -59,8 +63,8 @@ impl Db {
],
);

let catalog_rc = Rc::new(catalog);
let storage_rc = Rc::new(storage);
let catalog_rc = Rc::new(RefCell::new(catalog));
let storage_rc = Rc::new(RefCell::new(storage));

let optimizer = Optimizer::new(Rc::clone(&catalog_rc));

Expand All @@ -81,6 +85,6 @@ impl Db {

let mut p_plan = self.optimizer.optimize(l_plan);

p_plan.execute_all(Rc::clone(&self.storage_rc))
p_plan.execute_all(Rc::clone(&self.storage_rc), Rc::clone(&self.catalog_rc))
}
}
69 changes: 53 additions & 16 deletions db/src/optimizer/optimizer.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
use core::panic;
use std::rc::Rc;

use std::{cell::RefCell, rc::Rc};

use crate::{
analyzer::{LogicalNode, LogicalPlan, Operator},
catalog::{types::TableId, Catalog},
catalog::{types::TableId, Catalog, TableSchemaBuilder},
optimizer::types::{Column, FullScanState, PhysicalPlan},
};

use super::types::Op;

pub struct Optimizer {
catalog: Rc<Catalog>,
catalog: Rc<RefCell<Catalog>>,
}

impl Optimizer {
Expand All @@ -30,13 +29,13 @@ impl Optimizer {
}
}

pub fn new(catalog: Rc<Catalog>) -> Self {
pub fn new(catalog: Rc<RefCell<Catalog>>) -> Self {
Optimizer { catalog }
}
}

struct PhysicalPlanBuilder {
catalog: Rc<Catalog>,
catalog: Rc<RefCell<Catalog>>,
}

impl PhysicalPlanBuilder {
Expand All @@ -46,7 +45,7 @@ impl PhysicalPlanBuilder {
let mut cols = Vec::new();
// FIXME: table is hardcoded!
let table_id = TableId::public("table1");
let table_schema = self.catalog.get_table(&table_id).unwrap();
let table_schema = self.catalog.as_ref().borrow().get_table(&table_id).unwrap();

for c in columns {
cols.push(Column::new(
Expand Down Expand Up @@ -79,25 +78,38 @@ impl PhysicalPlanBuilder {
children: self.walk(&node.children[0]),
}]
}
Operator::CreateTable {
table_name,
columns,
} => {
let mut ts = TableSchemaBuilder::public();
ts.table(table_name);
for c in columns {
ts.col(&c.column_name, c.column_type);
}

vec![Op::create_table(ts.build())]
}
_ => {
panic!("Unsopported node")
panic!("Unsopported node {:?}", node)
}
}
}
}

#[cfg(test)]
mod tests {
use std::{rc::Rc, vec};
use std::{cell::RefCell, rc::Rc, vec};

use crate::{
analyzer::{Analyzer, LogicalPlan},
catalog::{types::DataType, Catalog, TableSchemaBuilder},
catalog::{Catalog, TableSchemaBuilder},
optimizer::{
types::{Column, Op, PhysicalPlan, StorageEngine, Tuple, Val},
Optimizer,
},
parser::{Lexer, Parser},
types::ColType,
};

fn analyze(input: &str) -> LogicalPlan {
Expand All @@ -116,12 +128,12 @@ mod tests {

let ts = TableSchemaBuilder::public()
.table("table1")
.col("col1", DataType::Int)
.col("col1", ColType::Int)
.build();
let _ = catalog.register_table(&ts);
let cs = ts.get_column("col1").unwrap();

let optimizer = Optimizer::new(Rc::new(catalog));
let optimizer = Optimizer::new(Rc::new(RefCell::new(catalog)));

let p_plan = optimizer.optimize(l_plan);

Expand All @@ -142,7 +154,7 @@ mod tests {

let ts = TableSchemaBuilder::public()
.table("table1")
.col("col1", DataType::Int)
.col("col1", ColType::Int)
.build();
let _ = catalog.register_table(&ts);

Expand All @@ -156,15 +168,40 @@ mod tests {
],
);

let catalog_rc = Rc::new(catalog);
let storage_rc = Rc::new(storage);
let catalog_rc = Rc::new(RefCell::new(catalog));
let storage_rc = Rc::new(RefCell::new(storage));

let optimizer = Optimizer::new(Rc::clone(&catalog_rc));

let mut p_plan = optimizer.optimize(l_plan);

let tuples = p_plan.execute_all(Rc::clone(&storage_rc));
let tuples = p_plan.execute_all(Rc::clone(&storage_rc), Rc::clone(&catalog_rc));

assert_eq!(tuples.len(), 4);
}

#[test]
fn create_table() {
let l_plan = analyze("CREATE TABLE table1 (col1 INT, col2 INT, col3 INT)");

let catalog = Catalog::mem();

let optimizer = Optimizer::new(Rc::new(RefCell::new(catalog)));

let p_plan = optimizer.optimize(l_plan);

assert_eq!(
p_plan,
PhysicalPlan {
root: Op::create_table(
TableSchemaBuilder::public()
.table("table1")
.col("col1", ColType::Int)
.col("col2", ColType::Int)
.col("col3", ColType::Int)
.build()
)
}
);
}
}
Loading

0 comments on commit c1280f4

Please sign in to comment.