From 6f6faa126ede5f787eb6201f2c84e99114f63404 Mon Sep 17 00:00:00 2001
From: Zain Hoda <7146154+zainhoda@users.noreply.github.com>
Date: Fri, 1 Mar 2024 23:44:52 -0500
Subject: [PATCH 1/2] add chroma test
---
docs/CNAME | 1 -
docs/databases.md | 142 --
docs/index.md | 239 ---
docs/intro-to-vanna.md | 64 -
docs/onboarding.md | 30 -
docs/reference.md | 4 -
docs/sidebar.py | 130 --
docs/sidebar.yaml | 77 -
docs/streamlit.md | 13 -
docs/support.md | 5 -
docs/vanna/types.html | 1717 -----------------
docs/workflow.md | 19 -
nb-theme/conf.json | 12 -
nb-theme/index.html.j2 | 189 --
nb-theme/static/custom_theme.css | 3 -
notebooks/Chinook.sqlite | Bin 1067008 -> 0 bytes
notebooks/app.ipynb | 48 -
notebooks/bigquery-mistral-chromadb.ipynb | 1 -
notebooks/bigquery-mistral-marqo.ipynb | 1 -
.../bigquery-mistral-other-vectordb.ipynb | 1 -
notebooks/bigquery-mistral-vannadb.ipynb | 1 -
.../bigquery-openai-azure-chromadb.ipynb | 1 -
notebooks/bigquery-openai-azure-marqo.ipynb | 1 -
...bigquery-openai-azure-other-vectordb.ipynb | 1 -
notebooks/bigquery-openai-azure-vannadb.ipynb | 1 -
.../bigquery-openai-standard-chromadb.ipynb | 1 -
.../bigquery-openai-standard-marqo.ipynb | 1 -
...query-openai-standard-other-vectordb.ipynb | 1 -
.../bigquery-openai-standard-vannadb.ipynb | 1 -
.../bigquery-openai-vanna-chromadb.ipynb | 1 -
notebooks/bigquery-openai-vanna-marqo.ipynb | 1 -
...bigquery-openai-vanna-other-vectordb.ipynb | 1 -
notebooks/bigquery-openai-vanna-vannadb.ipynb | 1 -
notebooks/bigquery-other-llm-chromadb.ipynb | 1 -
notebooks/bigquery-other-llm-marqo.ipynb | 1 -
.../bigquery-other-llm-other-vectordb.ipynb | 1 -
notebooks/bigquery-other-llm-vannadb.ipynb | 1 -
notebooks/configure.ipynb | 18 -
notebooks/connect-to-bigquery.ipynb | 93 -
notebooks/connect-to-postgres.ipynb | 52 -
notebooks/connect-to-snowflake.ipynb | 48 -
notebooks/databases.ipynb | 176 --
notebooks/getting-started.ipynb | 292 ---
notebooks/index.ipynb | 63 -
notebooks/local.ipynb | 374 ----
notebooks/manual-train.ipynb | 400 ----
.../other-database-mistral-chromadb.ipynb | 1 -
notebooks/other-database-mistral-marqo.ipynb | 1 -
...ther-database-mistral-other-vectordb.ipynb | 1 -
.../other-database-mistral-vannadb.ipynb | 1 -
...other-database-openai-azure-chromadb.ipynb | 1 -
.../other-database-openai-azure-marqo.ipynb | 1 -
...database-openai-azure-other-vectordb.ipynb | 1 -
.../other-database-openai-azure-vannadb.ipynb | 1 -
...er-database-openai-standard-chromadb.ipynb | 1 -
...other-database-openai-standard-marqo.ipynb | 1 -
...abase-openai-standard-other-vectordb.ipynb | 1 -
...her-database-openai-standard-vannadb.ipynb | 1 -
...other-database-openai-vanna-chromadb.ipynb | 1 -
.../other-database-openai-vanna-marqo.ipynb | 1 -
...database-openai-vanna-other-vectordb.ipynb | 1 -
.../other-database-openai-vanna-vannadb.ipynb | 1 -
.../other-database-other-llm-chromadb.ipynb | 1 -
.../other-database-other-llm-marqo.ipynb | 1 -
...er-database-other-llm-other-vectordb.ipynb | 1 -
.../other-database-other-llm-vannadb.ipynb | 1 -
notebooks/postgres-mistral-chromadb.ipynb | 1 -
notebooks/postgres-mistral-marqo.ipynb | 1 -
.../postgres-mistral-other-vectordb.ipynb | 1 -
notebooks/postgres-mistral-vannadb.ipynb | 1 -
.../postgres-openai-azure-chromadb.ipynb | 1 -
notebooks/postgres-openai-azure-marqo.ipynb | 1 -
...postgres-openai-azure-other-vectordb.ipynb | 1 -
notebooks/postgres-openai-azure-vannadb.ipynb | 1 -
.../postgres-openai-standard-chromadb.ipynb | 1 -
.../postgres-openai-standard-marqo.ipynb | 1 -
...tgres-openai-standard-other-vectordb.ipynb | 1 -
.../postgres-openai-standard-vannadb.ipynb | 1 -
.../postgres-openai-vanna-chromadb.ipynb | 1 -
notebooks/postgres-openai-vanna-marqo.ipynb | 1 -
...postgres-openai-vanna-other-vectordb.ipynb | 1 -
notebooks/postgres-openai-vanna-vannadb.ipynb | 1 -
notebooks/postgres-other-llm-chromadb.ipynb | 1 -
notebooks/postgres-other-llm-marqo.ipynb | 1 -
.../postgres-other-llm-other-vectordb.ipynb | 1 -
notebooks/postgres-other-llm-vannadb.ipynb | 1 -
notebooks/slack.ipynb | 42 -
notebooks/snowflake-mistral-chromadb.ipynb | 1 -
notebooks/snowflake-mistral-marqo.ipynb | 1 -
.../snowflake-mistral-other-vectordb.ipynb | 1 -
notebooks/snowflake-mistral-vannadb.ipynb | 1 -
.../snowflake-openai-azure-chromadb.ipynb | 1 -
notebooks/snowflake-openai-azure-marqo.ipynb | 1 -
...nowflake-openai-azure-other-vectordb.ipynb | 1 -
.../snowflake-openai-azure-vannadb.ipynb | 1 -
.../snowflake-openai-standard-chromadb.ipynb | 1 -
.../snowflake-openai-standard-marqo.ipynb | 1 -
...flake-openai-standard-other-vectordb.ipynb | 1 -
.../snowflake-openai-standard-vannadb.ipynb | 1 -
.../snowflake-openai-vanna-chromadb.ipynb | 1 -
notebooks/snowflake-openai-vanna-marqo.ipynb | 1 -
...nowflake-openai-vanna-other-vectordb.ipynb | 1 -
.../snowflake-openai-vanna-vannadb.ipynb | 1 -
notebooks/snowflake-other-llm-chromadb.ipynb | 1 -
notebooks/snowflake-other-llm-marqo.ipynb | 1 -
.../snowflake-other-llm-other-vectordb.ipynb | 1 -
notebooks/snowflake-other-llm-vannadb.ipynb | 1 -
notebooks/sqlite-mistral-chromadb.ipynb | 1 -
notebooks/sqlite-mistral-marqo.ipynb | 1 -
notebooks/sqlite-mistral-other-vectordb.ipynb | 1 -
notebooks/sqlite-mistral-vannadb.ipynb | 1 -
notebooks/sqlite-openai-azure-chromadb.ipynb | 1 -
notebooks/sqlite-openai-azure-marqo.ipynb | 1 -
.../sqlite-openai-azure-other-vectordb.ipynb | 1 -
notebooks/sqlite-openai-azure-vannadb.ipynb | 1 -
.../sqlite-openai-standard-chromadb.ipynb | 1 -
notebooks/sqlite-openai-standard-marqo.ipynb | 1 -
...qlite-openai-standard-other-vectordb.ipynb | 1 -
.../sqlite-openai-standard-vannadb.ipynb | 1 -
notebooks/sqlite-openai-vanna-chromadb.ipynb | 1 -
notebooks/sqlite-openai-vanna-marqo.ipynb | 1 -
.../sqlite-openai-vanna-other-vectordb.ipynb | 1 -
notebooks/sqlite-openai-vanna-vannadb.ipynb | 1 -
notebooks/sqlite-other-llm-chromadb.ipynb | 1 -
notebooks/sqlite-other-llm-marqo.ipynb | 1 -
.../sqlite-other-llm-other-vectordb.ipynb | 1 -
notebooks/sqlite-other-llm-vannadb.ipynb | 1 -
notebooks/streamlit.ipynb | 30 -
notebooks/vn-ask.ipynb | 572 ------
notebooks/vn-connect-to-bigquery.ipynb | 532 -----
notebooks/vn-connect-to-postgres.ipynb | 1336 -------------
notebooks/vn-train.ipynb | 394 ----
pyproject.toml | 4 +-
tests/test_vanna.py | 23 +-
134 files changed, 24 insertions(+), 7218 deletions(-)
delete mode 100644 docs/CNAME
delete mode 100644 docs/databases.md
delete mode 100644 docs/index.md
delete mode 100644 docs/intro-to-vanna.md
delete mode 100644 docs/onboarding.md
delete mode 100644 docs/reference.md
delete mode 100644 docs/sidebar.py
delete mode 100644 docs/sidebar.yaml
delete mode 100644 docs/streamlit.md
delete mode 100644 docs/support.md
delete mode 100644 docs/vanna/types.html
delete mode 100644 docs/workflow.md
delete mode 100644 nb-theme/conf.json
delete mode 100644 nb-theme/index.html.j2
delete mode 100644 nb-theme/static/custom_theme.css
delete mode 100644 notebooks/Chinook.sqlite
delete mode 100644 notebooks/app.ipynb
delete mode 100644 notebooks/bigquery-mistral-chromadb.ipynb
delete mode 100644 notebooks/bigquery-mistral-marqo.ipynb
delete mode 100644 notebooks/bigquery-mistral-other-vectordb.ipynb
delete mode 100644 notebooks/bigquery-mistral-vannadb.ipynb
delete mode 100644 notebooks/bigquery-openai-azure-chromadb.ipynb
delete mode 100644 notebooks/bigquery-openai-azure-marqo.ipynb
delete mode 100644 notebooks/bigquery-openai-azure-other-vectordb.ipynb
delete mode 100644 notebooks/bigquery-openai-azure-vannadb.ipynb
delete mode 100644 notebooks/bigquery-openai-standard-chromadb.ipynb
delete mode 100644 notebooks/bigquery-openai-standard-marqo.ipynb
delete mode 100644 notebooks/bigquery-openai-standard-other-vectordb.ipynb
delete mode 100644 notebooks/bigquery-openai-standard-vannadb.ipynb
delete mode 100644 notebooks/bigquery-openai-vanna-chromadb.ipynb
delete mode 100644 notebooks/bigquery-openai-vanna-marqo.ipynb
delete mode 100644 notebooks/bigquery-openai-vanna-other-vectordb.ipynb
delete mode 100644 notebooks/bigquery-openai-vanna-vannadb.ipynb
delete mode 100644 notebooks/bigquery-other-llm-chromadb.ipynb
delete mode 100644 notebooks/bigquery-other-llm-marqo.ipynb
delete mode 100644 notebooks/bigquery-other-llm-other-vectordb.ipynb
delete mode 100644 notebooks/bigquery-other-llm-vannadb.ipynb
delete mode 100644 notebooks/configure.ipynb
delete mode 100644 notebooks/connect-to-bigquery.ipynb
delete mode 100644 notebooks/connect-to-postgres.ipynb
delete mode 100644 notebooks/connect-to-snowflake.ipynb
delete mode 100644 notebooks/databases.ipynb
delete mode 100644 notebooks/getting-started.ipynb
delete mode 100644 notebooks/index.ipynb
delete mode 100644 notebooks/local.ipynb
delete mode 100644 notebooks/manual-train.ipynb
delete mode 100644 notebooks/other-database-mistral-chromadb.ipynb
delete mode 100644 notebooks/other-database-mistral-marqo.ipynb
delete mode 100644 notebooks/other-database-mistral-other-vectordb.ipynb
delete mode 100644 notebooks/other-database-mistral-vannadb.ipynb
delete mode 100644 notebooks/other-database-openai-azure-chromadb.ipynb
delete mode 100644 notebooks/other-database-openai-azure-marqo.ipynb
delete mode 100644 notebooks/other-database-openai-azure-other-vectordb.ipynb
delete mode 100644 notebooks/other-database-openai-azure-vannadb.ipynb
delete mode 100644 notebooks/other-database-openai-standard-chromadb.ipynb
delete mode 100644 notebooks/other-database-openai-standard-marqo.ipynb
delete mode 100644 notebooks/other-database-openai-standard-other-vectordb.ipynb
delete mode 100644 notebooks/other-database-openai-standard-vannadb.ipynb
delete mode 100644 notebooks/other-database-openai-vanna-chromadb.ipynb
delete mode 100644 notebooks/other-database-openai-vanna-marqo.ipynb
delete mode 100644 notebooks/other-database-openai-vanna-other-vectordb.ipynb
delete mode 100644 notebooks/other-database-openai-vanna-vannadb.ipynb
delete mode 100644 notebooks/other-database-other-llm-chromadb.ipynb
delete mode 100644 notebooks/other-database-other-llm-marqo.ipynb
delete mode 100644 notebooks/other-database-other-llm-other-vectordb.ipynb
delete mode 100644 notebooks/other-database-other-llm-vannadb.ipynb
delete mode 100644 notebooks/postgres-mistral-chromadb.ipynb
delete mode 100644 notebooks/postgres-mistral-marqo.ipynb
delete mode 100644 notebooks/postgres-mistral-other-vectordb.ipynb
delete mode 100644 notebooks/postgres-mistral-vannadb.ipynb
delete mode 100644 notebooks/postgres-openai-azure-chromadb.ipynb
delete mode 100644 notebooks/postgres-openai-azure-marqo.ipynb
delete mode 100644 notebooks/postgres-openai-azure-other-vectordb.ipynb
delete mode 100644 notebooks/postgres-openai-azure-vannadb.ipynb
delete mode 100644 notebooks/postgres-openai-standard-chromadb.ipynb
delete mode 100644 notebooks/postgres-openai-standard-marqo.ipynb
delete mode 100644 notebooks/postgres-openai-standard-other-vectordb.ipynb
delete mode 100644 notebooks/postgres-openai-standard-vannadb.ipynb
delete mode 100644 notebooks/postgres-openai-vanna-chromadb.ipynb
delete mode 100644 notebooks/postgres-openai-vanna-marqo.ipynb
delete mode 100644 notebooks/postgres-openai-vanna-other-vectordb.ipynb
delete mode 100644 notebooks/postgres-openai-vanna-vannadb.ipynb
delete mode 100644 notebooks/postgres-other-llm-chromadb.ipynb
delete mode 100644 notebooks/postgres-other-llm-marqo.ipynb
delete mode 100644 notebooks/postgres-other-llm-other-vectordb.ipynb
delete mode 100644 notebooks/postgres-other-llm-vannadb.ipynb
delete mode 100644 notebooks/slack.ipynb
delete mode 100644 notebooks/snowflake-mistral-chromadb.ipynb
delete mode 100644 notebooks/snowflake-mistral-marqo.ipynb
delete mode 100644 notebooks/snowflake-mistral-other-vectordb.ipynb
delete mode 100644 notebooks/snowflake-mistral-vannadb.ipynb
delete mode 100644 notebooks/snowflake-openai-azure-chromadb.ipynb
delete mode 100644 notebooks/snowflake-openai-azure-marqo.ipynb
delete mode 100644 notebooks/snowflake-openai-azure-other-vectordb.ipynb
delete mode 100644 notebooks/snowflake-openai-azure-vannadb.ipynb
delete mode 100644 notebooks/snowflake-openai-standard-chromadb.ipynb
delete mode 100644 notebooks/snowflake-openai-standard-marqo.ipynb
delete mode 100644 notebooks/snowflake-openai-standard-other-vectordb.ipynb
delete mode 100644 notebooks/snowflake-openai-standard-vannadb.ipynb
delete mode 100644 notebooks/snowflake-openai-vanna-chromadb.ipynb
delete mode 100644 notebooks/snowflake-openai-vanna-marqo.ipynb
delete mode 100644 notebooks/snowflake-openai-vanna-other-vectordb.ipynb
delete mode 100644 notebooks/snowflake-openai-vanna-vannadb.ipynb
delete mode 100644 notebooks/snowflake-other-llm-chromadb.ipynb
delete mode 100644 notebooks/snowflake-other-llm-marqo.ipynb
delete mode 100644 notebooks/snowflake-other-llm-other-vectordb.ipynb
delete mode 100644 notebooks/snowflake-other-llm-vannadb.ipynb
delete mode 100644 notebooks/sqlite-mistral-chromadb.ipynb
delete mode 100644 notebooks/sqlite-mistral-marqo.ipynb
delete mode 100644 notebooks/sqlite-mistral-other-vectordb.ipynb
delete mode 100644 notebooks/sqlite-mistral-vannadb.ipynb
delete mode 100644 notebooks/sqlite-openai-azure-chromadb.ipynb
delete mode 100644 notebooks/sqlite-openai-azure-marqo.ipynb
delete mode 100644 notebooks/sqlite-openai-azure-other-vectordb.ipynb
delete mode 100644 notebooks/sqlite-openai-azure-vannadb.ipynb
delete mode 100644 notebooks/sqlite-openai-standard-chromadb.ipynb
delete mode 100644 notebooks/sqlite-openai-standard-marqo.ipynb
delete mode 100644 notebooks/sqlite-openai-standard-other-vectordb.ipynb
delete mode 100644 notebooks/sqlite-openai-standard-vannadb.ipynb
delete mode 100644 notebooks/sqlite-openai-vanna-chromadb.ipynb
delete mode 100644 notebooks/sqlite-openai-vanna-marqo.ipynb
delete mode 100644 notebooks/sqlite-openai-vanna-other-vectordb.ipynb
delete mode 100644 notebooks/sqlite-openai-vanna-vannadb.ipynb
delete mode 100644 notebooks/sqlite-other-llm-chromadb.ipynb
delete mode 100644 notebooks/sqlite-other-llm-marqo.ipynb
delete mode 100644 notebooks/sqlite-other-llm-other-vectordb.ipynb
delete mode 100644 notebooks/sqlite-other-llm-vannadb.ipynb
delete mode 100644 notebooks/streamlit.ipynb
delete mode 100644 notebooks/vn-ask.ipynb
delete mode 100644 notebooks/vn-connect-to-bigquery.ipynb
delete mode 100644 notebooks/vn-connect-to-postgres.ipynb
delete mode 100644 notebooks/vn-train.ipynb
diff --git a/docs/CNAME b/docs/CNAME
deleted file mode 100644
index ad1c2a23..00000000
--- a/docs/CNAME
+++ /dev/null
@@ -1 +0,0 @@
-docs.vanna.ai
\ No newline at end of file
diff --git a/docs/databases.md b/docs/databases.md
deleted file mode 100644
index 6924d34c..00000000
--- a/docs/databases.md
+++ /dev/null
@@ -1,142 +0,0 @@
-# How to use Vanna with various databases
-
-You can use Vanna with any database that you can connect to via Python. Here are some examples of how to connect to various databases.
-
-All you have to do is provide Vanna with a function that takes in a SQL query and returns a Pandas DataFrame. Here are some examples of how to do that.
-
-## **PostgreSQL**
-
-```python
-import pandas as pd
-import psycopg2
-
-conn_details = {...} # fill this with your connection details
-conn_postgres = psycopg2.connect(**conn_details)
-
-def run_sql_postgres(sql: str) -> pd.DataFrame:
- df = pd.read_sql_query(sql, conn_postgres)
- return df
-
-vn.run_sql = run_sql_postgres
-```
-
-## **Snowflake**
-
-We have a built-in function for Snowflake, so you don't need to write your own.
-
-```python
-vn.connect_to_snowflake(account='my-account', username='my-username', password='my-password', database='my-database')
-```
-
-```python
-import pandas as pd
-from snowflake.connector.pandas_tools import pd_read_sql
-from snowflake.connector import connect
-
-conn_details = {...} # fill this with your connection details
-conn_snowflake = connect(**conn_details)
-
-def run_sql_snowflake(sql: str) -> pd.DataFrame:
- df = pd_read_sql(sql, conn_snowflake)
- return df
-
-vn.run_sql = run_sql_snowflake
-```
-
-## **Google BigQuery**
-
-```python
-from google.cloud import bigquery
-import pandas as pd
-
-project_id = 'your-project-id' # replace with your Project ID
-client_bigquery = bigquery.Client(project=project_id)
-
-def run_sql_bigquery(sql: str) -> pd.DataFrame:
- df = client_bigquery.query(sql).to_dataframe()
- return df
-
-vn.run_sql = run_sql_bigquery
-```
-
-## **Amazon Athena**
-
-```python
-import pandas as pd
-from pyathena import connect
-
-conn_details = {...} # fill this with your connection details
-conn_athena = connect(**conn_details)
-
-def run_sql_athena(sql: str) -> pd.DataFrame:
- df = pd.read_sql(sql, conn_athena)
- return df
-
-vn.run_sql = run_sql_athena
-```
-
-## **Amazon Redshift**
-
-```python
-import pandas as pd
-import psycopg2
-
-conn_details = {...} # fill this with your connection details
-conn_redshift = psycopg2.connect(**conn_details)
-
-def run_sql_redshift(sql: str) -> pd.DataFrame:
- df = pd.read_sql_query(sql, conn_redshift)
- return df
-
-vn.run_sql = run_sql_redshift
-```
-
-Sure, here is an example for Google Cloud SQL using the MySQL connector:
-
-## **Google Cloud SQL (MySQL)**
-
-```python
-import pandas as pd
-import mysql.connector
-
-conn_details = {...} # fill this with your connection details
-conn_google_cloud_sql = mysql.connector.connect(**conn_details)
-
-def run_sql_google_cloud_sql(sql: str) -> pd.DataFrame:
- df = pd.read_sql(sql, conn_google_cloud_sql)
- return df
-```
-
-Note: Google Cloud SQL supports MySQL, PostgreSQL, and SQL Server. The above example uses MySQL. If you are using PostgreSQL or SQL Server, you should use the appropriate connector.
-
-## **SQLite**
-
-```python
-import sqlite3
-import pandas as pd
-
-db_path = 'path_to_your_db' # replace with your SQLite DB path
-conn_sqlite = sqlite3.connect(db_path)
-
-def run_sql_sqlite(sql: str) -> pd.DataFrame:
- df = pd.read_sql_query(sql, conn_sqlite)
- return df
-
-vn.run_sql = run_sql_sqlite
-```
-
-## **Microsoft SQL Server**
-
-```python
-import pandas as pd
-import pyodbc
-
-conn_details = {...} # fill this with your connection details
-conn_sql_server = pyodbc.connect(**conn_details)
-
-def run_sql_sql_server(sql: str) -> pd.DataFrame:
- df = pd.read_sql(sql, conn_sql_server)
- return df
-
-vn.run_sql = run_sql_sql_server
-```
diff --git a/docs/index.md b/docs/index.md
deleted file mode 100644
index 02d2fc63..00000000
--- a/docs/index.md
+++ /dev/null
@@ -1,239 +0,0 @@
-# Vanna.AI - Personalized AI SQL Agent
-
-**Let Vanna.AI write your nasty SQL for you**. Vanna is a Python based AI SQL agent trained on your schema that writes complex SQL in seconds. `pip install vanna` to get started now.
-
-
-
-## An example
-
-A business user asks you **"who are the top 2 customers in each region?"**. Right in the middle of lunch. And they need it for a presentation this afternoon. 😡😡😡
-
-### The old way 😡 😫 💩
-Simple question to ask, not so fun to answer. You spend over an hour a) finding the tables, b) figuring out out the joins, c) look up the syntax for ranking, d) putting this into a CTE, e) filtering by rank, and f) choosing the correct metrics. Finally, you come up with this ugly mess -
-
-```sql
-with ranked_customers as (SELECT c.c_name as customer_name,
- r.r_name as region_name,
- row_number() OVER (PARTITION BY r.r_name
- ORDER BY sum(l.l_quantity * l.l_extendedprice) desc) as rank
- FROM snowflake_sample_data.tpch_sf1.customer c join snowflake_sample_data.tpch_sf1.orders o
- ON c.c_custkey = o.o_custkey join snowflake_sample_data.tpch_sf1.lineitem l
- ON o.o_orderkey = l.l_orderkey join snowflake_sample_data.tpch_sf1.nation n
- ON c.c_nationkey = n.n_nationkey join snowflake_sample_data.tpch_sf1.region r
- ON n.n_regionkey = r.r_regionkey
- GROUP BY customer_name, region_name)
-SELECT region_name,
- customer_name
-FROM ranked_customers
-WHERE rank <= 2;
-```
-
-And you had to skip your lunch. **HANGRY!**
-
-### The Vanna way 😍 🌟 🚀
-With Vanna, you train up a custom model on your data warehouse, and simply enter this in your Jupyter Notebook -
-
-```python
-import vanna as vn
-vn.set_model('your-model')
-vn.ask('who are the top 2 customers in each region?')
-```
-
-Vanna generates that nasty SQL above for you, runs it (locally & securely) and gives you back a Dataframe in seconds:
-
-| region_name | customer_name | total_sales |
-| ----------- | ------------- | ----------- |
-| ASIA | Customer#000000001 | 68127.72 |
-| ASIA | Customer#000000002 | 65898.69 |
-...
-
-And you ate your lunch in peace. **YUMMY!**
-
-## How Vanna works
-Vanna works in two easy steps - train a model on your data, and then ask questions.
-
-1. **Train a model on your data**.
-2. **Ask questions**.
-
-When you ask a question, we utilize a custom model for your dataset to generate SQL, as seen below. Your model performance and accuracy depends on the quality and quantity of training data you use to train your model.
-
-
-
-
-## Why Vanna?
-
-1. **High accuracy on complex datasets.**
- - Vanna’s capabilities are tied to the training data you give it
- - More training data means better accuracy for large and complex datasets
-2. **Secure and private.**
- - Your database contents are never sent to Vanna’s servers
- - We only see the bare minimum - schemas & queries.
-3. **Isolated, custom model.**
- - You train a custom model specific to your database and your schema.
- - Nobody else can use your model or view your model’s training data unless you choose to add members to your model or make it public
- - We use a combination of third-party foundational models (OpenAI, Google) and our own LLM.
-4. **Self learning.**
- - As you use Vanna more, your model continuously improves as we augment your training data
-5. **Supports many databases.**
- - We have out-of-the-box support Snowflake, BigQuery, Postgres
- - You can easily make a connector for any [database](https://docs.vanna.ai/databases/)
-6. **Pretrained models.**
- - If you’re a data provider you can publish your models for anyone to use
- - As part of our roadmap, we are in the process of pre-training models for common datasets (Google Ads, Facebook ads, etc)
-7. **Choose your front end.**
- - Start in a Jupyter Notebook.
- - Expose to business users via Slackbot, web app, Streamlit app, or Excel plugin.
- - Even integrate in your web app for customers.
-
-## Getting started
-You can start by [automatically training Vanna (currently works for Snowflake)](https://docs.vanna.ai/notebooks/vn-train/) or add manual training data.
-
-### Train with DDL Statements
-If you prefer to manually train, you do not need to connect to a database. You can use the train function with other parmaeters like ddl
-
-
-```python
-vn.train(ddl="""
- CREATE TABLE IF NOT EXISTS my-table (
- id INT PRIMARY KEY,
- name VARCHAR(100),
- age INT
- )
-""")
-```
-
-### Train with Documentation
-Sometimes you may want to add documentation about your business terminology or definitions.
-
-```python
-vn.train(documentation="Our business defines OTIF score as the percentage of orders that are delivered on time and in full")
-```
-
-### Train with SQL
-You can also add SQL queries to your training data. This is useful if you have some queries already laying around. You can just copy and paste those from your editor to begin generating new SQL.
-
-```python
-vn.train(sql="SELECT * FROM my-table WHERE name = 'John Doe'")
-```
-
-
-
-## Asking questions
-```python
-vn.ask("What are the top 10 customers by sales?")
-```
-
- SELECT c.c_name as customer_name,
- sum(l.l_extendedprice * (1 - l.l_discount)) as total_sales
- FROM snowflake_sample_data.tpch_sf1.lineitem l join snowflake_sample_data.tpch_sf1.orders o
- ON l.l_orderkey = o.o_orderkey join snowflake_sample_data.tpch_sf1.customer c
- ON o.o_custkey = c.c_custkey
- GROUP BY customer_name
- ORDER BY total_sales desc limit 10;
-
-
-
-
-
-
-
-
-
-
CUSTOMER_NAME
-
TOTAL_SALES
-
-
-
-
-
0
-
Customer#000143500
-
6757566.0218
-
-
-
1
-
Customer#000095257
-
6294115.3340
-
-
-
2
-
Customer#000087115
-
6184649.5176
-
-
-
3
-
Customer#000131113
-
6080943.8305
-
-
-
4
-
Customer#000134380
-
6075141.9635
-
-
-
5
-
Customer#000103834
-
6059770.3232
-
-
-
6
-
Customer#000069682
-
6057779.0348
-
-
-
7
-
Customer#000102022
-
6039653.6335
-
-
-
8
-
Customer#000098587
-
6027021.5855
-
-
-
9
-
Customer#000064660
-
5905659.6159
-
-
-
-
-
-
-
-
-![png](notebooks/vn-ask_files/vn-ask_10_2.png)
-
-
-
-
-AI-generated follow-up questions:
-
-* What is the country name for each of the top 10 customers by sales?
-* How many orders does each of the top 10 customers by sales have?
-* What is the total revenue for each of the top 10 customers by sales?
-* What are the customer names and total sales for customers in the United States?
-* Which customers in Africa have returned the most parts with a gross value?
-* What are the total sales for the top 3 customers?
-* What are the customer names and total sales for the top 5 customers?
-* What are the total sales for customers in Europe?
-* How many customers are there in each country?
-
-## More resources
- - [Full Documentation](https://docs.vanna.ai)
- - [Website](https://vanna.ai)
- - [Slack channel for support](https://join.slack.com/t/vanna-ai/shared_invite/zt-1unu0ipog-iE33QCoimQiBDxf2o7h97w)
- - [LinkedIn](https://www.linkedin.com/company/vanna-ai/)
diff --git a/docs/intro-to-vanna.md b/docs/intro-to-vanna.md
deleted file mode 100644
index d935082e..00000000
--- a/docs/intro-to-vanna.md
+++ /dev/null
@@ -1,64 +0,0 @@
-# Intro to Vanna: A Python-based AI SQL co-pilot
-
-**TLDR**: We help data people that know Python write SQL faster using AI. [See our starter notebook here](notebooks/vn-ask.md).
-
-## The deluge of data
-
-We are bathing in an ocean of data, sitting in Snowflake or BigQuery, that is brimming with potential insights. Yet only a small fraction of people in an enterprise have the two skills required to harness the data —
-
-1. A solid comprehension of advanced SQL, and
-2. A comprehensive knowledge of the data structure & schema
-
-## The burden of being data-savvy
-
-Since you are reading this, chances are you are one of those fortunate few (data analysts, data scientists, data engineers, etc) with those abilities. It’s an invaluable skill, but you also get hit tons requests requiring you to write complex SQL queries. Annoying!
-
-## Introducing Vanna, the SQL co-pilot
-
-Vanna, at its core, is a co-pilot to Python & SQL savvy data people to to streamline the process of writing custom SQL on your company’s data warehouse using AI and LLMs. Most of our users use our Python package directly via Jupyter Notebooks ([starter notebook here](notebooks/vn-ask.md)) —
-
-```python
-sql = vn.generate_sql(question='What are the top 10 customers by Sales?')
-print(sql)
-```
-
-And here are the results —
-
-```sql
-SELECT customer_name,
- total_sales
-FROM (SELECT c.c_name as customer_name,
- sum(l.l_extendedprice * (1 - l.l_discount)) as total_sales,
- row_number() OVER (ORDER BY sum(l.l_extendedprice * (1 - l.l_discount)) desc) as rank
- FROM snowflake_sample_data.tpch_sf1.lineitem l join snowflake_sample_data.tpch_sf1.orders o
- ON l.l_orderkey = o.o_orderkey join snowflake_sample_data.tpch_sf1.customer c
- ON o.o_custkey = c.c_custkey
- GROUP BY customer_name)
-WHERE rank <= 10;
-```
-
-## Getting started with Vanna in a Notebook
-
-Vanna is super easy to get started with —
-
-1. **Grab an API key** directly through the notebook
-2. **Train a custom model** on some past queries from your data warehouse
-3. **Ask questions in plain English** and get back SQL that you can run in your workflow
-
-Check out the full starter notebook here.
-
-Vanna is built with a privacy-first and security-first design — **your data never leaves your environment**.
-
-## Using Vanna with a Streamlit front end
-
-[Streamlit](https://streamlit.io/) is an open source pure Python front end. We have built an UI for Vanna on top of Streamlit, that you can either use directly (eg our hosted version), and that you can clone, download, optionally modify, and self host.
-
-If you choose to self host it, you can run Vanna with a UI without any data leaving your environment.
-
-![Image](https://miro.medium.com/v2/resize:fit:640/format:webp/1*PmScp647UWIaxUatib_4SQ.png)
-
-[Check out the Streamlit UI here](https://github.com/vanna-ai/vanna-streamlit).
-
-## Conclusion
-
-Vanna is a powerful tool for data people that know Python to write SQL faster using AI. It's easy to get started with, and you can even use it with a Streamlit front end for a more interactive experience. Best of all, it's built with a privacy-first and security-first design, so your data never leaves your environment. Give it a try and see how it can streamline your SQL writing process.
\ No newline at end of file
diff --git a/docs/onboarding.md b/docs/onboarding.md
deleted file mode 100644
index 8f15aa78..00000000
--- a/docs/onboarding.md
+++ /dev/null
@@ -1,30 +0,0 @@
-## What do I need to do to use **Vanna.AI**?
-Vanna.AI uses a combination of documentation and historical question and SQL pairs to generate SQL from natural language.
-
-### Step 1: Train **Vanna.AI**
-- Give **Vanna.AI** sample SQL
-- **Vanna.AI** will try to guess the question
-- Verify the question is correct
-```mermaid
-flowchart LR
- Generate[vn.generate_question]
- Question[Question]
- Verify{Is the question correct?}
- SQL --> Generate
- Generate --> Question
- Question --> Verify
- Verify -- Yes --> Store[vn.store_sql]
- Verify -- No --> Update[Update the Question]
- Update --> Store
-
-```
-
-### Step 2: Ask **Vanna.AI** a Question
-```mermaid
-flowchart LR
- Question[Question]
- Generate[vn.generate_sql]
- SQL[SQL]
- Question --> Generate
- Generate --> SQL
-```
diff --git a/docs/reference.md b/docs/reference.md
deleted file mode 100644
index fc4459af..00000000
--- a/docs/reference.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# Vanna Package Full Reference
-::: vanna
- options:
- show_source: false
\ No newline at end of file
diff --git a/docs/sidebar.py b/docs/sidebar.py
deleted file mode 100644
index f6923a38..00000000
--- a/docs/sidebar.py
+++ /dev/null
@@ -1,130 +0,0 @@
-import yaml
-import sys
-import nbformat
-from nbconvert import HTMLExporter
-
-# Get the yaml file path from the command line
-file_path = sys.argv[1]
-
-# Get the directory to search for the .ipynb files from the command line
-notebook_dir = sys.argv[2]
-
-# Get the output directory from the command line
-output_dir = sys.argv[3]
-
-def generate_html(sidebar_data, current_path: str):
- html = '
\n'
- for entry in sidebar_data:
- html += '
\n'
- if 'sub_entries' in entry:
- # Dropdown menu with sub-entries
- html += f'\n'
- html += f'
\n'
- for sub_entry in entry['sub_entries']:
- html += f'
\n'
- highlighted = 'bg-indigo-100 dark:bg-indigo-700' if sub_entry['link'] == current_path else ''
- html += f'{sub_entry["title"]}\n'
- html += '