Skip to content

Commit

Permalink
deleted revision files
Browse files Browse the repository at this point in the history
  • Loading branch information
geemi725 committed Jan 20, 2025
1 parent 7e0ebd1 commit faab002
Show file tree
Hide file tree
Showing 232 changed files with 68 additions and 30,504 deletions.
17 changes: 14 additions & 3 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os
import streamlit as st
import sys

__import__("pysqlite3")
sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
load_dotenv()
Expand All @@ -23,11 +24,13 @@
unsafe_allow_html=True,
)


def on_api_key_change():
api_key = ss.get("api_key") or os.getenv("OPENAI_API_KEY")
os.environ["OPENAI_API_KEY"] = api_key
openai.api_key = api_key


def save_uploadfile(uploadedfile):
dirpath = os.path.join("data", "lit_dir")
if os.path.exists(dirpath):
Expand All @@ -36,6 +39,7 @@ def save_uploadfile(uploadedfile):
with open(os.path.join(dirpath, uploadedfile.name), "wb") as f:
f.write(uploadedfile.getbuffer())


st.write(
"## Xpert AI: Extract human interpretable structure-property relationships from raw data"
)
Expand All @@ -44,11 +48,13 @@ def save_uploadfile(uploadedfile):
Currently, GPT-4o model is used to generate natural language explanations."""
)


def run_autofill():
st.session_state.auto_target = "toxicity of small molecules"
st.session_state.auto_df = "tests/toxicity_sample_data.csv"
st.experimental_rerun()


auto_target = st.session_state.get("auto_target", None)
auto_arxiv = st.session_state.get("auto_arxiv", None)

Expand Down Expand Up @@ -93,7 +99,8 @@ def run_autofill():
"### Provide literature to generate scientific explanations! \nIf you don't provide literature, you will receive an explanation based on XAI tools."
)
lit_files = st.file_uploader(
"Upload your literature here. Files must be in `pdf` format (Suggested):", accept_multiple_files=True
"Upload your literature here. Files must be in `pdf` format (Suggested):",
accept_multiple_files=True,
)
arxiv_keywords = st.text_input(
"If you want to scrape arxiv, provide keywords for arxiv scraping:",
Expand Down Expand Up @@ -190,7 +197,11 @@ def run_autofill():

# scrape arxiv.org
if arxiv_keywords:
arg_dict_arxiv = {"key_words": arxiv_keywords, "max_papers": max_papers,"lit_files":lit_files_given}
arg_dict_arxiv = {
"key_words": arxiv_keywords,
"max_papers": max_papers,
"lit_files": lit_files_given,
}

scrape_arxiv(arg_dict_arxiv)

Expand Down Expand Up @@ -228,4 +239,4 @@ def run_autofill():
with open("./data/figs.zip", "rb") as f:
st.download_button(
"Download the outputs!", f, file_name="XpertAI_output.zip"
)
)
321 changes: 0 additions & 321 deletions cross_reference_analysis.ipynb

This file was deleted.

3,735 changes: 0 additions & 3,735 deletions data/datasets/OMS.csv

This file was deleted.

3,735 changes: 0 additions & 3,735 deletions data/datasets/PLD.csv

This file was deleted.

Loading

0 comments on commit faab002

Please sign in to comment.