Spaces:
Sleeping
Sleeping
File size: 3,179 Bytes
4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f 3273f67 4b9251f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import gradio as gr
from words2wisdom import CONFIG_DIR, ROOT
from words2wisdom.config import Config
from words2wisdom.pipeline import Pipeline
from words2wisdom.utils import dump_all
example_file = (os.path.join(ROOT, "demo", "example.txt"))
example_text = "The quick brown fox jumps over the lazy dog. The cat sits on the mat."
def w2w_from_string(openai_api_key: str, input_text: str):
config = Config.read_ini(os.path.join(CONFIG_DIR, "default_config.ini"))
config.llm["openai_api_key"] = openai_api_key
pipeline = Pipeline(config)
text_batches, knowledge_graph = pipeline.run(input_text)
zip_path = dump_all(config, text_batches, knowledge_graph)
return knowledge_graph, zip_path
def w2w_from_file(openai_api_key: str, input_file):
with open(input_file.name) as f:
input_text = f.read()
return w2w_from_string(openai_api_key, input_text)
with gr.Blocks(title="Words2Wisdom") as demo:
gr.Markdown("# π§π Words2Wisdom")
with gr.Column(variant="panel"):
openai_api_key = gr.Textbox(label="OpenAI API Key", placeholder="sk-...", type="password")
with gr.Row(equal_height=False):
with gr.Column(variant="panel"):
gr.Markdown("## Input (Text or Text File)")
#gr.Markdown("A knowledge graph will be generated for the provided text.")
with gr.Tab("Direct Input"):
text_string = gr.Textbox(lines=2, placeholder="Text Here...", label="Text")
submit_str = gr.Button()
with gr.Tab("File Upload"):
text_file = gr.File(file_types=["text"], label="Text File")
submit_file = gr.Button()
with gr.Column(variant="panel"):
gr.Markdown("## Output (ZIP Archive)")
#gr.Markdown("The ZIP contains the generated knowledge graph, the text batches (indexed), and a configuration file.")
output_zip = gr.File(label="ZIP")
with gr.Accordion(label="Preview of Knowledge Graph", open=False):
output_graph = gr.DataFrame(headers=["batch_id", "subject", "relation", "object"], label="Knowledge Graph")
with gr.Accordion(label="Examples", open=False):
gr.Markdown("### Text Example")
gr.Examples(
examples=[[None, example_text]],
inputs=[openai_api_key, text_string],
outputs=[output_graph, output_zip],
fn=w2w_from_string,
preprocess=False,
postprocess=False
)
gr.Markdown("### File Example")
gr.Examples(
examples=[[None, example_file]],
inputs=[openai_api_key, text_file],
outputs=[output_graph, output_zip],
fn=w2w_from_file,
preprocess=False,
postprocess=False
)
submit_str.click(fn=w2w_from_string, inputs=[openai_api_key, text_string], outputs=[output_graph, output_zip])
submit_file.click(fn=w2w_from_file, inputs=[openai_api_key, text_file], outputs=[output_graph, output_zip])
demo.launch(inbrowser=True, width="75%") |