|
import streamlit as st |
|
from transformers import pipeline |
|
import os |
|
import glob |
|
import re |
|
import pytz |
|
from datetime import datetime |
|
import base64 |
|
|
|
|
|
st.set_page_config( |
|
page_title="AI Knowledge Tree Builder ππΏ", |
|
page_icon="π³β¨", |
|
layout="wide", |
|
initial_sidebar_state="auto", |
|
) |
|
|
|
|
|
BiologyAndLevel36MagicUsers = """ |
|
0. Biology Core Rules and Future Exceptions |
|
1. Central Dogma DNA RNA Protein |
|
- Current CRISPR RNA editing π§ͺ |
|
- Research Gene therapy siRNA π¬ |
|
- Future Programmable genetics π |
|
2. Cell Origin |
|
- Current iPSCs organoids π¦ |
|
- Research Synthetic cells π¬ |
|
- Future De novo cell creation π |
|
""" |
|
|
|
AITopicsToInnovate1 = """ |
|
1. Major AI Industry Players π |
|
1. Research Leaders π― |
|
- OpenAI: GPT-4 DALL-E Foundation Models π΅ |
|
- Google: PaLM Gemini LLMs π¦ |
|
- Anthropic: Claude Constitutional AI β‘ |
|
""" |
|
|
|
MultiplayerGames = """ |
|
0. Fantasy Domain Introduction |
|
1. Setting the Scene |
|
- Current Create a high-fantasy realm ποΈ |
|
- Research Add domain-specific entities π§ββοΈ |
|
- Future AI-generated worldbuilding π |
|
""" |
|
|
|
|
|
RootNode = """ |
|
0. Research Hub π |
|
1. Awacke1 Profile |
|
- Link: [Hugging Face Profile](https://huggingface.co/awacke1) π |
|
2. TeachingCV App |
|
- Link: [TeachingCV](https://huggingface.co/spaces/awacke1/TeachingCV) π₯οΈ |
|
3. DeepResearchEvaluator App |
|
- Link: [DeepResearchEvaluator](https://huggingface.co/spaces/awacke1/DeepResearchEvaluator) π |
|
""" |
|
|
|
|
|
def sanitize_filename(text): |
|
safe_text = re.sub(r'[^\w\s-]', ' ', text) |
|
safe_text = re.sub(r'\s+', ' ', safe_text) |
|
return safe_text.strip()[:50] |
|
|
|
def generate_timestamp_filename(query): |
|
central = pytz.timezone('US/Central') |
|
current_time = datetime.now(central) |
|
time_str = current_time.strftime("%I%M%p") |
|
date_str = current_time.strftime("%m%d%Y") |
|
safe_query = sanitize_filename(query) |
|
return f"{time_str} {date_str} ({safe_query}).md" |
|
|
|
def parse_outline_to_mermaid(outline_text): |
|
lines = outline_text.strip().split('\n') |
|
nodes = [] |
|
edges = [] |
|
stack = [] |
|
for line in lines: |
|
indent = len(line) - len(line.lstrip()) |
|
level = indent // 4 |
|
text = line.strip() |
|
label = re.sub(r'^[#*\->\d\.\s]+', '', text).strip() |
|
if label: |
|
node_id = f"N{len(nodes)}" |
|
nodes.append(f'{node_id}["{label}"]') |
|
if stack: |
|
parent_level = stack[-1][0] |
|
if level > parent_level: |
|
parent_id = stack[-1][1] |
|
edges.append(f"{parent_id} --> {node_id}") |
|
stack.append((level, node_id)) |
|
else: |
|
while stack and stack[-1][0] >= level: |
|
stack.pop() |
|
if stack: |
|
parent_id = stack[-1][1] |
|
edges.append(f"{parent_id} --> {node_id}") |
|
stack.append((level, node_id)) |
|
else: |
|
stack.append((level, node_id)) |
|
return "graph TD\n" + "\n".join(nodes + edges) |
|
|
|
def grow_tree(base_tree, new_node_name, parent_node): |
|
lines = base_tree.strip().split('\n') |
|
new_lines = [] |
|
added = False |
|
for line in lines: |
|
new_lines.append(line) |
|
if parent_node in line and not added: |
|
indent = len(line) - len(line.lstrip()) |
|
new_lines.append(f"{' ' * (indent + 4)}- {new_node_name} π±") |
|
added = True |
|
return "\n".join(new_lines) |
|
|
|
def breed_trees(tree1, tree2, intersect_node): |
|
lines1 = tree1.strip().split('\n') |
|
lines2 = tree2.strip().split('\n') |
|
new_lines = lines1.copy() |
|
for line in lines2: |
|
if intersect_node not in line and not any(line.strip() in l for l in lines1): |
|
new_lines.append(line) |
|
return "\n".join(new_lines) |
|
|
|
|
|
def generate_model_pipeline(): |
|
return """ |
|
graph TD |
|
A[Load Data π] --> B[Preprocess Data π οΈ] |
|
B --> C[Train Model π€] |
|
C --> D[Evaluate Model π] |
|
D --> E[Deploy Model π] |
|
""" |
|
|
|
|
|
@st.cache_resource |
|
def load_generator(): |
|
return pipeline("text-generation", model="distilgpt2") |
|
|
|
|
|
if 'selected_file' not in st.session_state: |
|
st.session_state.selected_file = None |
|
|
|
st.sidebar.title("π Saved Interactions") |
|
md_files = glob.glob("*.md") |
|
for file in md_files: |
|
if st.sidebar.button(file): |
|
st.session_state.selected_file = file |
|
if st.sidebar.button("Create New Note"): |
|
filename = generate_timestamp_filename("New Note") |
|
with open(filename, 'w') as f: |
|
f.write("# New Note\n") |
|
st.sidebar.success(f"Created {filename}") |
|
st.session_state.selected_file = filename |
|
|
|
|
|
st.title("π³ AI Knowledge Tree Builder π±") |
|
st.markdown("Grow and visualize knowledge trees, build ML pipelines, and explore research!") |
|
|
|
if st.session_state.selected_file: |
|
with open(st.session_state.selected_file, 'r') as f: |
|
content = f.read() |
|
st.markdown(content) |
|
else: |
|
|
|
trees = { |
|
"Research Hub": RootNode, |
|
"Biology": BiologyAndLevel36MagicUsers, |
|
"AI Topics": AITopicsToInnovate1, |
|
"Multiplayer Games": MultiplayerGames |
|
} |
|
selected_tree = st.selectbox("Select Knowledge Tree", list(trees.keys())) |
|
current_tree = trees[selected_tree] |
|
|
|
|
|
new_node = st.text_input("Add New Node (e.g., 'ML Pipeline')") |
|
parent_node = st.text_input("Parent Node to Attach To (e.g., 'Research Leaders')") |
|
if st.button("Grow Tree π±") and new_node and parent_node: |
|
current_tree = grow_tree(current_tree, new_node, parent_node) |
|
trees[selected_tree] = current_tree |
|
st.success(f"Added '{new_node}' under '{parent_node}'!") |
|
|
|
|
|
breed_with = st.selectbox("Breed With Another Tree", [t for t in trees.keys() if t != selected_tree]) |
|
intersect_node = st.text_input("Common Node for Breeding (e.g., 'Research')") |
|
if st.button("Breed Trees π³"): |
|
new_tree = breed_trees(current_tree, trees[breed_with], intersect_node) |
|
trees[f"{selected_tree} + {breed_with}"] = new_tree |
|
st.success(f"Created new tree: {selected_tree} + {breed_with}") |
|
|
|
|
|
mermaid_code = parse_outline_to_mermaid(current_tree) |
|
st.markdown("### Knowledge Tree Visualization") |
|
st.mermaid(mermaid_code) |
|
|
|
|
|
st.markdown("### ML Model Building Pipeline") |
|
st.mermaid(generate_model_pipeline()) |
|
|
|
|
|
query = st.text_input("Enter Query for AI Lookup") |
|
if st.button("Perform AI Lookup π€") and query: |
|
generator = load_generator() |
|
response = generator(query, max_length=50)[0]['generated_text'] |
|
st.write(f"**AI Response:** {response}") |
|
filename = generate_timestamp_filename(query) |
|
with open(filename, 'w') as f: |
|
f.write(f"# Query: {query}\n\n## AI Response\n{response}") |
|
st.success(f"Saved to {filename}") |
|
|
|
if __name__ == "__main__": |
|
st.sidebar.markdown("Explore, grow, and innovate!") |