from OpenAITools.ExpertTools import GetPubmedSummaryDf, generate, search from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, SummaryIndex from llama_index.core import Document from llama_index.llms.groq import Groq from llama_index.core import ServiceContext, set_global_service_context from llama_index.llms.llama_cpp.llama_utils import messages_to_prompt, completion_to_prompt import gradio as gr #models LLAMA3_8B = "Llama3-8b-8192" LLAMA3_70B = "Llama3-70b-8192" Mixtral = "mixtral-8x7b-32768" def custom_completion_to_prompt(completion:str) ->str: return completion_to_prompt( completion, system_prompt=( "You are a Q&A assistant. Your goal is to answer questions as " "accurately as possible is the instructions and context provided." ), ) def getMutationEffect(cancer_name, gene_name): searchWords= "(" +str(cancer_name)+ ") AND " + "(" + str(gene_name) + ") AND(treatment)" studies = search(searchWords) df, abstracts= GetPubmedSummaryDf(studies) #Define LLM llm = Groq( model=LLAMA3_8B, temperature=0.01, context_window=4096, completion_to_prompt=custom_completion_to_prompt, messages_to_prompt=messages_to_prompt,) #set global service context ctx = ServiceContext.from_defaults(llm=llm) set_global_service_context(ctx) documents = [Document(text=t) for t in abstracts[:10]] index = SummaryIndex.from_documents(documents) query_engine = index.as_query_engine(response_mode="tree_summarize") prompt = "Please prepare a single summary of the abstracts of the following papers. Pay particular attention to the {} gene".format(gene_name) response = query_engine.query(prompt) return response demo = gr.Interface(fn=getMutationEffect, inputs=[gr.Textbox(label="CancerName"), gr.Textbox(label="GeneName"), ], outputs="text") if __name__ == "__main__": demo.launch()