Spaces:
Sleeping
Sleeping
File size: 1,500 Bytes
7aa2ccc 4174f3d 7aa2ccc 4174f3d 7aa2ccc 1227a66 7aa2ccc 1946b86 7aa2ccc 1227a66 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import gradio as gr
from langchain_community.agent_toolkits.load_tools import load_tools # Updated import
from langchain.agents import initialize_agent
from langchain.agents import AgentType
from langchain_openai import ChatOpenAI # Updated import
import os
# Set your OpenAI API key (ensure to store it securely in Hugging Face Spaces environment variables)
# os.environ["OPENAI_API_KEY"] = "your_openai_api_key"
import warnings
warnings.filterwarnings("ignore", message=".*TqdmWarning.*")
from dotenv import load_dotenv
_ = load_dotenv()
# Define the LLM model
llm_model = "gpt-3.5-turbo"
llm = ChatOpenAI(temperature=0, model=llm_model, openai_api_key=os.getenv('OPEN_API_KEY')) # Ensure to pass the API key
# Load tools
tools = load_tools(["llm-math", "wikipedia"], llm=llm)
# Initialize agent
agent = initialize_agent(
tools,
llm,
agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,
handle_parsing_errors=True,
verbose=True
)
def chatbot(query):
"""Handles user query and returns agent response."""
try:
response = agent.run(query)
return response
except Exception as e:
return str(e)
# Create Gradio interface
demo = gr.Interface(
fn=chatbot,
inputs=gr.Textbox(label="Your Question", placeholder="Ask me anything..."),
outputs=gr.Textbox(label="Response"),
title="LangChain AI Chatbot",
description="A smart AI chatbot powered by OpenAI and LangChain.",
theme="compact"
)
# Launch the app
demo.launch()
|