pratikshahp commited on
Commit
07c8970
Β·
verified Β·
1 Parent(s): 8af0b87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -11
app.py CHANGED
@@ -19,24 +19,27 @@ model = ChatOpenAI(
19
  # βœ… Initialize HuggingFace Embeddings
20
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
21
 
22
- # βœ… Initialize Chroma Vector Store (No need for chromadb.PersistentClient)
23
  vector_store = Chroma(
24
  collection_name="chat_collection", # Specify the collection name
25
  embedding_function=embeddings,
26
- persist_directory="./chroma_langchain_db", # Directory to store data locally
27
  )
28
 
29
  # βœ… Step 1: Helper Functions for Chat Memory
30
  def get_chat_history(user_id):
31
  """Fetches stored messages for a given user from the vector store."""
32
- # Use Chroma's retriever to fetch documents filtered by user_id
33
  retriever = vector_store.as_retriever(
34
- search_type="mmr", # Modify search type as needed (e.g., "mmr" for Maximal Marginal Relevance)
35
- search_kwargs={"k": 100, "fetch_k": 100} # Adjust the number of results to fetch and how many to filter
36
  )
37
 
38
- # Use the retriever to fetch results, filtered by the user_id
39
  results = retriever.invoke("Chat history", filter={"user_id": user_id})
 
 
 
 
40
  # Extract the page content (chat messages) from the results
41
  user_history = [doc.page_content for doc in results]
42
  return "\n".join(user_history) if user_history else ""
@@ -52,24 +55,27 @@ def generate_response(username, user_input):
52
  """Generates a chatbot response using GPT-4 and stores chat history."""
53
  user_id = username.lower().strip()
54
  history = get_chat_history(user_id)
55
- # Append the latest user input to the conversation history
56
  messages = [{"role": "system", "content": "You are a helpful AI assistant. Please provide answer in 20 words only"}]
 
57
  if history:
58
  messages.append({"role": "user", "content": f"Chat history:\n{history}"})
59
- # Include the latest user input in the conversation
60
  messages.append({"role": "user", "content": user_input})
61
- print(messages)
62
- response = model.invoke(messages) # Using the model to generate response
 
63
  bot_response = response.content
 
64
  # Store the conversation for future reference
65
  store_chat_message(user_id, user_input, bot_response)
 
66
  # Return the entire conversation including the user's input and bot's response
67
  return f"{history}\nUser: {user_input}\nBot: {bot_response}"
68
 
69
  # βœ… Step 3: Gradio UI with User Dropdown
70
  with gr.Blocks() as demo:
71
  gr.Markdown("# πŸ”₯ Multi-User Chatbot with GPT-4 and Memory (ChromaDB)")
72
-
73
  # Dropdown for selecting user
74
  username_input = gr.Dropdown(
75
  label="Select User",
 
19
  # βœ… Initialize HuggingFace Embeddings
20
  embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
21
 
22
+ # βœ… Initialize Chroma Vector Store
23
  vector_store = Chroma(
24
  collection_name="chat_collection", # Specify the collection name
25
  embedding_function=embeddings,
26
+ persist_directory="./chroma_db", # Directory to store data locally
27
  )
28
 
29
  # βœ… Step 1: Helper Functions for Chat Memory
30
  def get_chat_history(user_id):
31
  """Fetches stored messages for a given user from the vector store."""
 
32
  retriever = vector_store.as_retriever(
33
+ search_type="mmr",
34
+ search_kwargs={"k": 100, "fetch_k": 100}
35
  )
36
 
37
+ # Ensure filter is applied to user_id correctly
38
  results = retriever.invoke("Chat history", filter={"user_id": user_id})
39
+
40
+ if not results: # If no results, return empty string
41
+ return ""
42
+
43
  # Extract the page content (chat messages) from the results
44
  user_history = [doc.page_content for doc in results]
45
  return "\n".join(user_history) if user_history else ""
 
55
  """Generates a chatbot response using GPT-4 and stores chat history."""
56
  user_id = username.lower().strip()
57
  history = get_chat_history(user_id)
 
58
  messages = [{"role": "system", "content": "You are a helpful AI assistant. Please provide answer in 20 words only"}]
59
+
60
  if history:
61
  messages.append({"role": "user", "content": f"Chat history:\n{history}"})
62
+
63
  messages.append({"role": "user", "content": user_input})
64
+
65
+ # Generate the response
66
+ response = model.invoke(messages)
67
  bot_response = response.content
68
+
69
  # Store the conversation for future reference
70
  store_chat_message(user_id, user_input, bot_response)
71
+
72
  # Return the entire conversation including the user's input and bot's response
73
  return f"{history}\nUser: {user_input}\nBot: {bot_response}"
74
 
75
  # βœ… Step 3: Gradio UI with User Dropdown
76
  with gr.Blocks() as demo:
77
  gr.Markdown("# πŸ”₯ Multi-User Chatbot with GPT-4 and Memory (ChromaDB)")
78
+
79
  # Dropdown for selecting user
80
  username_input = gr.Dropdown(
81
  label="Select User",