su_senho / app.py
rdlf's picture
Create app.py
366d04c verified
import gradio as gr
from langchain import PromptTemplate, OpenAI, LLMChain
from langchain.chains import RetrievalAugmentedGenerationChain
from langchain.tools import GoogleCustomSearchAPIWrapper
from groq import GroqClient
# Initialize Groq client
groq_client = GroqClient(api_key='your_groq_api_key')
# Define function to extract keywords using Groq
def extract_keywords(query):
response = groq_client.keywords(query)
keywords = response['keywords']
return keywords
# Define function to search on noticiasjuridicas.es
def search_noticiasjuridicas(keywords):
search_query = "site:www.noticiasjuridicas.es " + " ".join(keywords)
search_tool = GoogleCustomSearchAPIWrapper(api_key="your_google_api_key", search_engine_id="your_search_engine_id")
results = search_tool(search_query)
return results
# Define function to generate response using retrieved context
def generate_response(query, context):
template = """Based on the following information:
{context}
Here is the response to your query:
{query}
Response:
"""
prompt_template = PromptTemplate(template=template, input_variables=["context", "query"])
llm = OpenAI(model="gpt-4", api_key="your_openai_api_key")
chain = LLMChain(prompt=prompt_template, llm=llm)
response = chain.run({"context": context, "query": query})
return response
# Define the main function for the chatbot
def chatbot(query):
keywords = extract_keywords(query)
search_results = search_noticiasjuridicas(keywords)
context = "\n".join([result['snippet'] for result in search_results['items']])
response = generate_response(query, context)
return response
# Create Gradio interface
iface = gr.Interface(
fn=chatbot,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your legal query here..."),
outputs="text",
title="Legal Assistant Chatbot",
description="Ask any legal questions and get answers based on the latest information from noticiasjuridicas.es"
)
# Launch the interface
iface.launch()