use chromadb

This commit is contained in:
n4ze3m 2023-04-11 21:07:42 +05:30
parent dd2d3b1d97
commit e94092e7b2
2 changed files with 7 additions and 6 deletions

View File

@ -12,6 +12,8 @@ from langchain.prompts.chat import (
SystemMessagePromptTemplate, SystemMessagePromptTemplate,
HumanMessagePromptTemplate HumanMessagePromptTemplate
) )
from langchain.vectorstores import Chroma
async def chat_extension_handler(body: ChatBody): async def chat_extension_handler(body: ChatBody):
try: try:
@ -35,11 +37,11 @@ async def chat_extension_handler(body: ChatBody):
print(f'Number of documents: {len(doc)}') print(f'Number of documents: {len(doc)}')
vectorstore = FAISS.from_documents(doc, OpenAIEmbeddings()) vectorstore = Chroma.from_documents(doc, OpenAIEmbeddings())
messages = [ messages = [
SystemMessagePromptTemplate.from_template("""I want you to act as a webpage that I am having a conversation witu. Your name is "OpenChatX". You will provide me with answers from the given text from webpage. Your answer should be original, concise, accurate, and helpful. You can recommend, translate and can do anything based on the context given. If the answer is not included in the text and you know the answer you can resonpond the answer othwerwise say exactly "I don't know the answer " and stop after that. Never break character. Answer must be in markdown format. SystemMessagePromptTemplate.from_template("""You are PageAssist bot. Follow the user's instructions carefully and generate answer from given context and You can recommend, translate and can do anything one the given context. If the answer is not included in the context say exactly "Sorry, I don't know" and if you know the answer you can resonpond it. Respond using markdown
----------------- -----------------
{context} {context}
"""), """),
@ -49,12 +51,10 @@ async def chat_extension_handler(body: ChatBody):
prompt = ChatPromptTemplate.from_messages(messages) prompt = ChatPromptTemplate.from_messages(messages)
chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(), return_source_documents=True, qa_prompt=prompt,) chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(search_kwargs={"k": 1}), return_source_documents=True, qa_prompt=prompt,)
history = [(d["human_message"], d["bot_response"]) for d in body.history] history = [(d["human_message"], d["bot_response"]) for d in body.history]
print(history)
response = chat({ response = chat({
"question": body.user_message, "question": body.user_message,
"chat_history": history "chat_history": history

View File

@ -11,3 +11,4 @@ lxml
faiss-cpu faiss-cpu
supabase supabase
tiktoken tiktoken
chromadb