Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain_community.agent_toolkits.load_tools import load_tools # Updated import | |
from langchain.agents import initialize_agent | |
from langchain.agents import AgentType | |
from langchain_openai import ChatOpenAI # Updated import | |
import os | |
# Set your OpenAI API key (ensure to store it securely in Hugging Face Spaces environment variables) | |
# os.environ["OPENAI_API_KEY"] = "your_openai_api_key" | |
import warnings | |
warnings.filterwarnings("ignore", message=".*TqdmWarning.*") | |
from dotenv import load_dotenv | |
_ = load_dotenv() | |
# Define the LLM model | |
llm_model = "gpt-3.5-turbo" | |
llm = ChatOpenAI(temperature=0, model=llm_model, openai_api_key=os.getenv('OPEN_API_KEY')) # Ensure to pass the API key | |
# Load tools | |
tools = load_tools(["llm-math", "wikipedia"], llm=llm) | |
# Initialize agent | |
agent = initialize_agent( | |
tools, | |
llm, | |
agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION, | |
handle_parsing_errors=True, | |
verbose=True | |
) | |
def chatbot(query): | |
"""Handles user query and returns agent response.""" | |
try: | |
response = agent.run(query) | |
return response | |
except Exception as e: | |
return str(e) | |
# Create Gradio interface | |
demo = gr.Interface( | |
fn=chatbot, | |
inputs=gr.Textbox(label="Your Question", placeholder="Ask me anything..."), | |
outputs=gr.Textbox(label="Response"), | |
title="LangChain AI Chatbot", | |
description="A smart AI chatbot powered by OpenAI and LangChain.", | |
theme="compact" | |
) | |
# Launch the app | |
demo.launch() | |