Sbnos commited on
Commit
ba25116
·
verified ·
1 Parent(s): f7842a2

mainchange cgpt 1

Browse files
Files changed (1) hide show
  1. app.py +4 -8
app.py CHANGED
@@ -2,12 +2,12 @@ import streamlit as st
2
  import os
3
  from langchain.vectorstores import Chroma
4
  from langchain.embeddings import HuggingFaceBgeEmbeddings
5
- from langchain_together import Together # Updated import
6
  from langchain import hub
7
  from operator import itemgetter
8
- from langchain.schema import RunnableParallel, format_document # Updated import paths
9
  from typing import List, Tuple
10
- from langchain.chains import LLMChain, RetrievalQA, ConversationalRetrievalChain
11
  from langchain.schema.output_parser import StrOutputParser
12
  from langchain.memory import StreamlitChatMessageHistory, ConversationBufferMemory, ConversationSummaryMemory
13
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, PromptTemplate
@@ -22,10 +22,6 @@ embedding_function = HuggingFaceBgeEmbeddings(
22
  encode_kwargs=encode_kwargs
23
  )
24
 
25
- # Load the ChromaDB vector store
26
- # persist_directory="./mrcpchromadb/"
27
- # vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_function, collection_name="mrcppassmednotes")
28
-
29
  # Load the LLM
30
  llm = Together(
31
  model="mistralai/Mixtral-8x22B-Instruct-v0.1",
@@ -122,7 +118,7 @@ def app():
122
  }
123
  conversational_qa_chain = _inputs | _context | ANSWER_PROMPT | llm
124
 
125
- st.header("Hello Doctor, How can I help?")
126
  for message in st.session_state.messages:
127
  with st.chat_message(message["role"]):
128
  st.write(message["content"])
 
2
  import os
3
  from langchain.vectorstores import Chroma
4
  from langchain.embeddings import HuggingFaceBgeEmbeddings
5
+ from langchain_together import Together
6
  from langchain import hub
7
  from operator import itemgetter
8
+ from langchain.schema import RunnableParallel, format_document
9
  from typing import List, Tuple
10
+ from langchain.chains import LLMChain, ConversationalRetrievalChain
11
  from langchain.schema.output_parser import StrOutputParser
12
  from langchain.memory import StreamlitChatMessageHistory, ConversationBufferMemory, ConversationSummaryMemory
13
  from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder, PromptTemplate
 
22
  encode_kwargs=encode_kwargs
23
  )
24
 
 
 
 
 
25
  # Load the LLM
26
  llm = Together(
27
  model="mistralai/Mixtral-8x22B-Instruct-v0.1",
 
118
  }
119
  conversational_qa_chain = _inputs | _context | ANSWER_PROMPT | llm
120
 
121
+ st.header("Ask Away!")
122
  for message in st.session_state.messages:
123
  with st.chat_message(message["role"]):
124
  st.write(message["content"])