Spaces:
Sleeping
Sleeping
Update app.py
#1
by
wweavishayaknin
- opened
app.py
CHANGED
|
@@ -1,33 +1,40 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
| 3 |
-
|
| 4 |
from langchain.agents.agent_types import AgentType
|
| 5 |
from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent
|
| 6 |
-
from
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
-
st.
|
| 9 |
-
st.subheader("Stack used: LangChain Agent, Streamlit, OpenAI LLM - by https://github.com/jaglinux", divider='rainbow')
|
| 10 |
|
| 11 |
-
|
| 12 |
if uploaded_file is None:
|
| 13 |
df = pd.read_csv("titanic.csv")
|
| 14 |
-
st.write("
|
| 15 |
else:
|
| 16 |
-
# Can be used wherever a "file-like" object is accepted:
|
| 17 |
if uploaded_file.name.endswith(".csv"):
|
| 18 |
df = pd.read_csv(uploaded_file)
|
| 19 |
elif uploaded_file.name.endswith(".xlsx"):
|
| 20 |
df = pd.read_excel(uploaded_file)
|
| 21 |
-
st.dataframe(df, height=5)
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
agent = create_pandas_dataframe_agent(
|
| 24 |
-
|
| 25 |
df,
|
| 26 |
verbose=True,
|
| 27 |
-
agent_type=AgentType.OPENAI_FUNCTIONS,
|
| 28 |
)
|
| 29 |
-
|
|
|
|
|
|
|
| 30 |
response = agent.invoke(question)
|
| 31 |
-
print(response['output'])
|
| 32 |
st.chat_message("user").markdown(question)
|
| 33 |
-
st.chat_message("assistant").markdown(response[
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import pandas as pd
|
|
|
|
| 3 |
from langchain.agents.agent_types import AgentType
|
| 4 |
from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent
|
| 5 |
+
from langchain_community.llms import Ollama
|
| 6 |
+
|
| 7 |
+
# Streamlit UI
|
| 8 |
+
st.title("Excel ChatBot (Free - Local Model)")
|
| 9 |
+
st.subheader("Stack: LangChain Agent, Streamlit, Ollama (Mistral)")
|
| 10 |
|
| 11 |
+
uploaded_file = st.file_uploader("Upload CSV or Excel", type=['csv','xlsx'])
|
|
|
|
| 12 |
|
| 13 |
+
# Load dataframe
|
| 14 |
if uploaded_file is None:
|
| 15 |
df = pd.read_csv("titanic.csv")
|
| 16 |
+
st.write("Using default Titanic dataset.")
|
| 17 |
else:
|
|
|
|
| 18 |
if uploaded_file.name.endswith(".csv"):
|
| 19 |
df = pd.read_csv(uploaded_file)
|
| 20 |
elif uploaded_file.name.endswith(".xlsx"):
|
| 21 |
df = pd.read_excel(uploaded_file)
|
|
|
|
| 22 |
|
| 23 |
+
st.dataframe(df, height=300)
|
| 24 |
+
|
| 25 |
+
# Load local LLM using Ollama (make sure `ollama run mistral` is running)
|
| 26 |
+
llm = Ollama(model="mistral")
|
| 27 |
+
|
| 28 |
+
# Create LangChain agent
|
| 29 |
agent = create_pandas_dataframe_agent(
|
| 30 |
+
llm,
|
| 31 |
df,
|
| 32 |
verbose=True,
|
| 33 |
+
agent_type=AgentType.OPENAI_FUNCTIONS # Still works, just the name
|
| 34 |
)
|
| 35 |
+
|
| 36 |
+
# Chat input
|
| 37 |
+
if question := st.chat_input("Ask a question about the data"):
|
| 38 |
response = agent.invoke(question)
|
|
|
|
| 39 |
st.chat_message("user").markdown(question)
|
| 40 |
+
st.chat_message("assistant").markdown(response["output"])
|