Skip to content

Commit

Permalink
mindmap iter 12
Browse files Browse the repository at this point in the history
  • Loading branch information
Sudhendra committed Oct 22, 2024
1 parent d2f6f7b commit e7e0c21
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 136 deletions.
83 changes: 13 additions & 70 deletions medapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,90 +282,32 @@ def mindmap_tab_content(video_data):
with st.spinner("Generating Mindmap..."):
try:
# Generate Mindmap
mindmap_data = get_mindmap_data(
mindmap_html, mindmap_analysis = get_mindmap_data(
st.session_state.user_query,
st.session_state.relevant_passages,
st.session_state.answer,
video_data
)
st.session_state.mindmap_data = mindmap_data

# Create and display the network graph
network_html = create_network_graph(mindmap_data)
st.components.v1.html(network_html, height=600)
st.session_state.mindmap_html = mindmap_html
st.session_state.mindmap_analysis = mindmap_analysis

st.success("Mindmap generated successfully!")
logging.info("Mindmap generated successfully")
except Exception as e:
st.error(f"Error generating mindmap: {str(e)}")
logging.error(f"Error generating mindmap: {str(e)}", exc_info=True)

if st.session_state.get('mindmap_html'):
st.components.v1.html(st.session_state.mindmap_html, height=600)

st.subheader("Mindmap Analysis")
if st.session_state.mindmap_analysis:
st.markdown(st.session_state.mindmap_analysis)
else:
st.info("No mindmap analysis available yet.")
else:
st.info("Generate a mindmap by submitting a query in the Main tab and then clicking the 'Generate Mindmap' button above.")

def create_network_graph(mindmap_data):
G = nx.Graph()

# Add nodes and edges based on mindmap_data
for item in mindmap_data:
G.add_node(item['id'], label=item['label'], group=item['group'])
if 'parent' in item:
G.add_edge(item['parent'], item['id'])

# Create a Pyvis network
net = Network(height="600px", width="100%", bgcolor="#222222", font_color="white")

# Add nodes to the network
for node in G.nodes(data=True):
net.add_node(node[0], label=node[1]['label'], title=node[1]['label'], group=node[1]['group'])

# Add edges to the network
for edge in G.edges():
net.add_edge(edge[0], edge[1])

# Set options for the network
net.set_options("""
var options = {
"nodes": {
"font": {
"size": 20
},
"borderWidth": 2,
"borderWidthSelected": 4,
"color": {
"border": "#ffffff",
"background": "#666666"
},
"shape": "dot",
"size": 30
},
"edges": {
"color": {
"color": "#ffffff",
"highlight": "#3498db"
},
"width": 2,
"smooth": {
"type": "continuous"
}
},
"physics": {
"forceAtlas2Based": {
"gravitationalConstant": -100,
"centralGravity": 0.015,
"springLength": 150,
"springConstant": 0.08
},
"maxVelocity": 50,
"solver": "forceAtlas2Based",
"timestep": 0.35,
"stabilization": {"iterations": 150}
}
}
""")

# Generate the HTML for the network
return net.generate_html()

def disclosures_tab_content():
st.header("Disclosures")
with open("disclosures.txt", "r") as f:
Expand All @@ -381,3 +323,4 @@ def mermaid_to_html(mermaid_code):

if __name__ == "__main__":
main()

70 changes: 4 additions & 66 deletions mindmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def extract_key_terms(relevant_passages):
context = " ".join([p['text'] for p in relevant_passages])
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
response = client.chat.completions.create(
model="gpt-4o-mini",
model="gpt-4",
messages=[
{
"role": "system",
Expand Down Expand Up @@ -184,68 +184,6 @@ def create_mindmap(mindmap_structure):
return content

def get_mindmap_data(query, relevant_passages, answer, all_data):
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

# Extract key terms from relevant passages
key_terms = extract_key_terms(relevant_passages)

# Prepare the context for OpenAI
key_terms_text = "\n".join([f"- {term}" for term in key_terms])

# Generate mindmap content using OpenAI
try:
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{
"role": "system",
"content": (
"Create a detailed and accurate mindmap structure based on the given query and key terms. "
"The output should be a JSON array of objects, where each object represents a node in the mindmap. "
"Each node should have the following properties: "
"id (integer), label (string), group (integer), and optionally parent (integer). "
"The main topic (query) should have id 1 and no parent. "
"Subtopics should have increasing ids and reference their parent's id. "
"Use group 1 for the main topic, group 2 for primary subtopics, and group 3 for secondary subtopics. "
"Ensure the content is grounded in the provided key terms and aligns with the USMLE Step 1 syllabus."
)
},
{
"role": "user",
"content": (
f"Query: {query}\n\nKey Terms:\n{key_terms_text}\n\n"
f"Answer: {answer}\n\n"
"Create a detailed mindmap structure in the specified JSON format, ensuring it is comprehensive, informative, and grounded in the key terms:"
)
}
],
max_tokens=1000,
temperature=0.2,
)

mindmap_data = json.loads(response.choices[0].message.content.strip())

# Validate the structure of mindmap_data
if not isinstance(mindmap_data, list):
raise ValueError("Mindmap data is not a list")

for node in mindmap_data:
if not all(key in node for key in ['id', 'label', 'group']):
raise ValueError("Mindmap node is missing required keys")

except json.JSONDecodeError:
print("Error: Invalid JSON response from OpenAI")
mindmap_data = generate_fallback_mindmap(query, key_terms)
except Exception as e:
print(f"Error generating mindmap: {str(e)}")
mindmap_data = generate_fallback_mindmap(query, key_terms)

return mindmap_data

def generate_fallback_mindmap(query, key_terms):
fallback_data = [
{"id": 1, "label": query, "group": 1}
]
for i, term in enumerate(key_terms[:5], start=2):
fallback_data.append({"id": i, "label": term, "group": 2, "parent": 1})
return fallback_data
mindmap_structure, analysis = generate_mindmap(query, relevant_passages, answer, all_data)
mindmap_html = create_mindmap(mindmap_structure)
return mindmap_html, analysis

0 comments on commit e7e0c21

Please sign in to comment.