File size: 2,437 Bytes
b35a32c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os
import sys
from generate_markdown import load_llm_model, generate_markdown
from generate_mindmap import generate_mindmap_svg
import gradio as gr
import subprocess

def generate(file):
    print(f"Generating mindmap for {file}")
    summary = "This is a summary of the research paper"
    mindmap_markdown = generate_markdown(llm, file)
    mindmap_svg = generate_mindmap_svg(mindmap_markdown)
    print("Mindmap generated successfully")
    return summary, mindmap_markdown, mindmap_svg

theme = gr.themes.Soft(
    primary_hue="purple",
    secondary_hue="cyan",
    neutral_hue="slate",
    font=[gr.themes.GoogleFont('Syne'), gr.themes.GoogleFont('poppins'), gr.themes.GoogleFont('poppins'), gr.themes.GoogleFont('poppins')],
)

with gr.Blocks(theme=theme, title="Binary Biology") as app:
    file = gr.File(file_count='single', label='Upload Research Paper PDF file', file_types=['.pdf'])
    summary = gr.TextArea(label='Summary', lines=5, interactive=False, show_copy_button=True)
    markdown_mindmap = gr.Textbox(label='Mindmap', lines=5, interactive=False, show_copy_button=True)
    graphical_mindmap = gr.Image(label='Graphical Mindmap', interactive=False, show_download_button=True, format='svg')
    submit = gr.Button(value='Submit')

    submit.click(generate,
                inputs=[file],
                outputs=[summary, markdown_mindmap, graphical_mindmap],
                scroll_to_output=True,
                show_progress=True,
                queue=True,
    )

if __name__ == "__main__":
    try:
        env = os.environ.copy()
        env["CMAKE_ARGS"] = "-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS"
        cmd = ["pip", "install", "llama-cpp-python"]
        subprocess.run(cmd, env=env)
    except:
        cmd = ["pip", "install", "llama-cpp-python"]
        subprocess.run(cmd)
    try:
        try:
            subprocess.run(['apt', 'install', '-y', 'graphviz'])
            print("Graphviz installed successfully")
        except:
            subprocess.run(['sudo', 'apt', 'install', '-y', 'graphviz'])
            print("Graphviz installed successfully using sudo")
    except:
            print("Graphviz installation failed")
            sys.exit(1)
    print("Graphviz loaded successfully")
    llm = load_llm_model()
    print("Model loaded successfully")
    app.queue(default_concurrency_limit=1).launch(show_error=True)