mCodeGPT / app.py
paopaoka3325's picture
Add application files
1c477ba
raw
history blame
4.02 kB
import gradio as gr
import subprocess
import pandas as pd
'''
import gradio as gr
def greet(name):
return "Hello " + name + "!"
demo = gr.Interface(fn=greet, inputs="text", outputs="text")
demo.launch()
'''
def greet(name1, name2):
# Storing each input in a variable, you can process or save them as you like
str1_openai = name1 ## openai
str2_bioportal = "213e22ba-4c3b-402b-bd36-6e9d4e86b1b5" #bioportal
str3_huggingface = "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" # huggingface
str4_input = name2
with open('abstractsave.txt', 'w') as f:
f.write(str4_input)
def run_command(command):
result = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
output_string = result.stdout
error_string = result.stderr
return output_string, error_string
##### output_string1, error_string1= run_command("pip install optogpt")
output_string1, error_string1 = run_command("curl -sSL https://install.python-poetry.org | python3 -")
output_string2, error_string2 = run_command(f"poetry run runoak set-apikey -e openai {str1_openai}")
run_command(f"poetry run runoak set-apikey -e bioportal {str2_bioportal}")
run_command(f"poetry run runoak set-apikey -e hfhub-key {str3_huggingface}")
##### output = run_command(f"ontogpt extract -t gocam.GoCamAnnotations -i ./abstract.txt")
output = run_command(f"cancerontogpt extract -t cancer.CancerAnnotations -i ./abstractsave.txt")
output = output[0].replace('\\n', '\n')
# Find the positions of the start and end markers
start_marker = "raw_completion_output: |-"
end_marker = "prompt: "
start_position = output.find(start_marker)
end_position = output.find(end_marker)
# Extract the text between the start and end positions
output = output[start_position + len(start_marker):end_position].strip()
# Splitting the data string into lines and then split each line into key-value pairs
key_value_pairs = [line.split(": ", 1) for line in output.split("\n")[1:] if line.strip()]
def format_identifier(identifier: str) -> str:
# Split the string by underscores
words = identifier.strip().split('_')
# Capitalize each word
capitalized_words = [word.capitalize() for word in words]
# Join the words with spaces
formatted_identifier = ' '.join(capitalized_words)
return formatted_identifier
key_value_pairs = [[format_identifier(x[0]), *x[1:]] for x in key_value_pairs]
# Convert the key-value pairs into a table format (a list of lists)
df_pred = pd.DataFrame(key_value_pairs, columns = ["Ontology Attribute", "Value"]).iloc[:19,:]
# data = {
# "Name": ['fawef', 'fseaf', 'asef'],
# "Age": [30, 25, 35],
# "City": ["New York", "San Francisco", "Los Angeles"]
# }
# df = pd.DataFrame(data)
return df_pred.to_html()
#### output_string1, error_string1=run_command("poetry")# ontogpt")
# return location
# For the purpose of this example, I'm just returning the values concatenated
# return f"Inputs received: {str1} \n, {str2}, {str3}, {str4}, '--------------', '--------------', {output_string1},{error_string1},{output_string2},{error_string2},{output}"
# # return location
# For the purpose of this example, I'm just returning the values concatenated
# return f"{str4_input}"
# Define 5 text input boxes with labels
input_boxes = [
gr.inputs.Textbox(label="openai api key"),
gr.inputs.Textbox(lines=20, label="Input cencer report", placeholder='Type text here...'),
]
# iface = gr.Interface(fn=greet, inputs=input_boxes, outputs="text")
iface = gr.Interface(fn=greet, inputs=input_boxes, outputs=gr.outputs.HTML(label="Output Table"),examples=[ # Sample text examples
["this is a sample text"],
["gradio is great"]
])
iface.launch()