added supabase

This commit is contained in:
n4ze3m 2023-04-11 15:19:39 +05:30
parent a3535bb5c5
commit 00e6d71727
13 changed files with 211 additions and 90 deletions

0
py_server/db/__init__.py Normal file
View File

25
py_server/db/supa.py Normal file
View File

@ -0,0 +1,25 @@
import supabase
import os
class SupaService:
def __init__(self):
self.supabase_url = os.environ.get("SUPABASE_URL")
self.supabase_key = os.environ.get("SUPABASE_KEY")
self.supabase = supabase.create_client(self.supabase_url, self.supabase_key)
def validate_user(self, token):
user = self.supabase.table("User").select("*").eq("access_token", token).execute()
return user
def save_webiste(self, title: str, icon: str, html: str, url: str, user_id: str):
result = self.supabase.table("Website").insert( {
"title": title,
"icon": icon,
"html": html,
"url": url,
"user_id": user_id
}).execute()
return result

View File

View File

@ -0,0 +1,79 @@
from models import ChatBody
from bs4 import BeautifulSoup
from langchain.docstore.document import Document as LDocument
from langchain.vectorstores.faiss import FAISS
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms import OpenAI
from langchain.text_splitter import CharacterTextSplitter
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate
)
async def chat_extension_handler(body: ChatBody):
try:
soup = BeautifulSoup(body.html, 'lxml')
iframe = soup.find('iframe', id='pageassist-iframe')
if iframe:
iframe.decompose()
div = soup.find('div', id='pageassist-icon')
if div:
div.decompose()
div = soup.find('div', id='__plasmo-loading__')
if div:
div.decompose()
text = soup.get_text()
result = [LDocument(page_content=text, metadata={"source": "test"})]
token_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
doc = token_splitter.split_documents(result)
print(f'Number of documents: {len(doc)}')
vectorstore = FAISS.from_documents(doc, OpenAIEmbeddings())
messages = [
SystemMessagePromptTemplate.from_template("""I want you to act as a webpage that I am having a conversation witu. Your name is "OpenChatX". You will provide me with answers from the given text from webpage. Your answer should be original, concise, accurate, and helpful. You can recommend, translate and can do anything based on the context given. If the answer is not included in the text and you know the answer you can resonpond the answer othwerwise say exactly "I don't know the answer " and stop after that. Never break character. Answer must be in markdown format.
-----------------
{context}
"""),
HumanMessagePromptTemplate.from_template("{question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(), return_source_documents=True, qa_prompt=prompt,)
history = [(d["human_message"], d["bot_response"]) for d in body.history]
print(history)
response = chat({
"question": body.user_message,
"chat_history": history
})
answer = response["answer"]
answer = answer[answer.find(":")+1:].strip()
return {
"bot_response": answer,
"human_message": body.user_message,
}
except Exception as e:
print(e)
return {
"bot_response": "Something went wrong please try again later",
"human_message": body.user_message,
}

View File

@ -0,0 +1,57 @@
from fastapi import HTTPException, Header
from models import UserValidation, SaveChatToApp
from db.supa import SupaService
from bs4 import BeautifulSoup
supabase = SupaService()
async def validate_user_handler(user: UserValidation):
if user.token is None or user.token == "":
raise HTTPException(status_code=400, detail="Token is required")
user = supabase.validate_user(user.token)
data = user.data
if len(data) == 0:
raise HTTPException(status_code=400, detail="Invalid token")
return {
"status": "success",
}
async def save_website_handler(body: SaveChatToApp, x_auth_token):
try:
if x_auth_token is None or x_auth_token == "":
raise HTTPException(status_code=400, detail="Token is required")
user = supabase.validate_user(x_auth_token)
data = user.data
if len(data) == 0:
raise HTTPException(status_code=400, detail="Invalid token")
soup = BeautifulSoup(body.html, 'lxml')
title = soup.title.string if soup.title else "Untitled Page"
icon = soup.find('link', rel='icon').get('href') if soup.find('link', rel='icon') else None
iframe = soup.find('iframe', id='pageassist-iframe')
if iframe:
iframe.decompose()
div = soup.find('div', id='pageassist-icon')
if div:
div.decompose()
div = soup.find('div', id='__plasmo-loading__')
if div:
div.decompose()
text = soup.get_text()
result = supabase.save_webiste(html=text, title=title, icon=icon, url=body.url, user_id=data[0]["id"])
return {
"status": "Success"
}
except Exception as e:
raise HTTPException(status_code=500, detail="Internal server error")

View File

@ -1,23 +1,8 @@
from fastapi import FastAPI, Header, UploadFile, File from fastapi import FastAPI
import os import os
from pydantic import BaseModel
from uvicorn import run from uvicorn import run
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from bs4 import BeautifulSoup from routers import chat, user
from langchain.docstore.document import Document as LDocument
from langchain.vectorstores.faiss import FAISS
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms import OpenAI
from langchain.text_splitter import CharacterTextSplitter
from langchain.chains import ConversationalRetrievalChain
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate
)
os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY") os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY")
@ -36,79 +21,9 @@ app.add_middleware(
allow_headers=headers allow_headers=headers
) )
app.include_router(chat.router)
class ChatBody(BaseModel): app.include_router(user.router)
user_message: str
html: str
history: list
@app.post("/chat")
async def chat(body: ChatBody):
try:
soup = BeautifulSoup(body.html, 'lxml')
iframe = soup.find('iframe', id='pageassist-iframe')
if iframe:
iframe.decompose()
div = soup.find('div', id='pageassist-icon')
if div:
div.decompose()
div = soup.find('div', id='__plasmo-loading__')
if div:
div.decompose()
text = soup.get_text()
result = [LDocument(page_content=text, metadata={"source": "test"})]
token_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
doc = token_splitter.split_documents(result)
print(f'Number of documents: {len(doc)}')
vectorstore = FAISS.from_documents(doc, OpenAIEmbeddings())
messages = [
SystemMessagePromptTemplate.from_template("""I want you to act as a webpage that I am having a conversation witu. Your name is "OpenChatX". You will provide me with answers from the given text from webpage. Your answer should be original, concise, accurate, and helpful. You can recommend, translate and can do anything based on the context given. If the answer is not included in the text and you know the answer you can resonpond the answer othwerwise say exactly "I don't know the answer " and stop after that. Never break character. Answer must be in markdown format.
-----------------
{context}
"""),
HumanMessagePromptTemplate.from_template("{question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(), return_source_documents=True, qa_prompt=prompt,)
history = [(d["human_message"], d["bot_response"]) for d in body.history]
print(history)
response = chat({
"question": body.user_message,
"chat_history": history
})
answer = response["answer"]
answer = answer[answer.find(":")+1:].strip()
return {
"bot_response": answer,
"human_message": body.user_message,
}
except Exception as e:
print(e)
return {
"bot_response": "Something went wrong please try again later",
"human_message": body.user_message,
}
if __name__ == "__main__": if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000)) port = int(os.environ.get('PORT', 5000))

View File

@ -0,0 +1,2 @@
from .chat import ChatBody
from .user import UserValidation, SaveChatToApp

7
py_server/models/chat.py Normal file
View File

@ -0,0 +1,7 @@
from pydantic import BaseModel
class ChatBody(BaseModel):
user_message: str
html: str
history: list
# url: str

11
py_server/models/user.py Normal file
View File

@ -0,0 +1,11 @@
from pydantic import BaseModel
class UserValidation(BaseModel):
token: str
class SaveChatToApp(BaseModel):
html: str
url: str

View File

@ -8,4 +8,5 @@ numpy
pydantic pydantic
langchain langchain
lxml lxml
faiss-cpu faiss-cpu
supabase

View File

View File

@ -0,0 +1,9 @@
from fastapi import APIRouter
from models import ChatBody
from handlers.chat import chat_extension_handler
router = APIRouter(prefix="/api/v1")
@router.post("/chat/chrome", tags=["chat"])
async def chat_extension(body: ChatBody):
return await chat_extension_handler(body)

15
py_server/routers/user.py Normal file
View File

@ -0,0 +1,15 @@
from fastapi import APIRouter, Header
from models import UserValidation, SaveChatToApp
from handlers.user import validate_user_handler, save_website_handler
router = APIRouter(prefix="/api/v1")
@router.post("/user/validate", tags=["user"])
async def validate_user(user: UserValidation):
return await validate_user_handler(user)
@router.post("/user/save", tags=["user"])
async def save_website(body: SaveChatToApp, x_auth_token: str = Header(None)):
return await save_website_handler(body, x_auth_token)