Spaces:
Running
Running
import streamlit as st | |
import pandas as pd | |
from langchain.agents.agent_types import AgentType | |
from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent | |
from langchain_community.llms import Ollama | |
# Streamlit UI | |
st.title("Excel ChatBot (Free - Local Model)") | |
st.subheader("Stack: LangChain Agent, Streamlit, Ollama (Mistral)") | |
uploaded_file = st.file_uploader("Upload CSV or Excel", type=['csv','xlsx']) | |
# Load dataframe | |
if uploaded_file is None: | |
df = pd.read_csv("titanic.csv") | |
st.write("Using default Titanic dataset.") | |
else: | |
if uploaded_file.name.endswith(".csv"): | |
df = pd.read_csv(uploaded_file) | |
elif uploaded_file.name.endswith(".xlsx"): | |
df = pd.read_excel(uploaded_file) | |
st.dataframe(df, height=300) | |
# Load local LLM using Ollama (make sure `ollama run mistral` is running) | |
llm = Ollama(model="mistral") | |
# Create LangChain agent | |
agent = create_pandas_dataframe_agent( | |
llm, | |
df, | |
verbose=True, | |
agent_type=AgentType.OPENAI_FUNCTIONS # Still works, just the name | |
) | |
# Chat input | |
if question := st.chat_input("Ask a question about the data"): | |
response = agent.invoke(question) | |
st.chat_message("user").markdown(question) | |
st.chat_message("assistant").markdown(response["output"]) | |