I'm trying to add memory to this model that works using langchain and Qdrant I tried adding Conversation Buffer But there are problems, can anyone help me with this? Can I add streamlit session state Or any of the types of memory in Langechen
from dotenv import load_dotenv
import streamlit as st
import os
from langchain.vectorstores import qdrant
from langchain.embeddings.openai import OpenAIEmbeddings
import qdrant_client
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
from langchain.llms import openai
def get_vector_store():
client= qdrant_client.QdrantClient(
os.getenv('QDRANT_HOST'),
api_key=os.getenv('QDRANT_API_KEY')
)
embeddings= OpenAIEmbeddings()
vector_store = qdrant.Qdrant(
client=client,
collection_name=os.getenv('QDRANT_COLLECTION_NAME'),
embeddings=embeddings,
)
return vector_store
def main():
load_dotenv()
st.set_page_config(page_title="Ask Your Data")
st.header("ask what do you want to know")
vector_store= get_vector_store()
qa= RetrievalQA.from_chain_type(
llm=ChatOpenAI(temperature=0.7),
chain_type="stuff",
retriever=vector_store.as_retriever()
)
user_question = st.text_input("write your Question")
if user_question:
st.write(f"Question: {user_question}")
answer= qa.run(user_question)
st.write(f"Answer: {answer}")
if __name__ == '__main__':
main()