added supabase
This commit is contained in:
parent
a3535bb5c5
commit
00e6d71727
0
py_server/db/__init__.py
Normal file
0
py_server/db/__init__.py
Normal file
25
py_server/db/supa.py
Normal file
25
py_server/db/supa.py
Normal file
@ -0,0 +1,25 @@
|
||||
import supabase
|
||||
import os
|
||||
|
||||
class SupaService:
|
||||
|
||||
def __init__(self):
|
||||
self.supabase_url = os.environ.get("SUPABASE_URL")
|
||||
self.supabase_key = os.environ.get("SUPABASE_KEY")
|
||||
self.supabase = supabase.create_client(self.supabase_url, self.supabase_key)
|
||||
|
||||
|
||||
def validate_user(self, token):
|
||||
user = self.supabase.table("User").select("*").eq("access_token", token).execute()
|
||||
return user
|
||||
|
||||
|
||||
def save_webiste(self, title: str, icon: str, html: str, url: str, user_id: str):
|
||||
result = self.supabase.table("Website").insert( {
|
||||
"title": title,
|
||||
"icon": icon,
|
||||
"html": html,
|
||||
"url": url,
|
||||
"user_id": user_id
|
||||
}).execute()
|
||||
return result
|
0
py_server/handlers/__init__.py
Normal file
0
py_server/handlers/__init__.py
Normal file
79
py_server/handlers/chat.py
Normal file
79
py_server/handlers/chat.py
Normal file
@ -0,0 +1,79 @@
|
||||
from models import ChatBody
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from langchain.docstore.document import Document as LDocument
|
||||
from langchain.vectorstores.faiss import FAISS
|
||||
from langchain.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain.llms import OpenAI
|
||||
from langchain.text_splitter import CharacterTextSplitter
|
||||
from langchain.chains import ConversationalRetrievalChain
|
||||
from langchain.prompts.chat import (
|
||||
ChatPromptTemplate,
|
||||
SystemMessagePromptTemplate,
|
||||
HumanMessagePromptTemplate
|
||||
)
|
||||
|
||||
async def chat_extension_handler(body: ChatBody):
|
||||
try:
|
||||
soup = BeautifulSoup(body.html, 'lxml')
|
||||
|
||||
iframe = soup.find('iframe', id='pageassist-iframe')
|
||||
if iframe:
|
||||
iframe.decompose()
|
||||
div = soup.find('div', id='pageassist-icon')
|
||||
if div:
|
||||
div.decompose()
|
||||
div = soup.find('div', id='__plasmo-loading__')
|
||||
if div:
|
||||
div.decompose()
|
||||
text = soup.get_text()
|
||||
|
||||
result = [LDocument(page_content=text, metadata={"source": "test"})]
|
||||
token_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
||||
doc = token_splitter.split_documents(result)
|
||||
|
||||
print(f'Number of documents: {len(doc)}')
|
||||
|
||||
|
||||
vectorstore = FAISS.from_documents(doc, OpenAIEmbeddings())
|
||||
|
||||
|
||||
messages = [
|
||||
SystemMessagePromptTemplate.from_template("""I want you to act as a webpage that I am having a conversation witu. Your name is "OpenChatX". You will provide me with answers from the given text from webpage. Your answer should be original, concise, accurate, and helpful. You can recommend, translate and can do anything based on the context given. If the answer is not included in the text and you know the answer you can resonpond the answer othwerwise say exactly "I don't know the answer " and stop after that. Never break character. Answer must be in markdown format.
|
||||
-----------------
|
||||
{context}
|
||||
"""),
|
||||
HumanMessagePromptTemplate.from_template("{question}")
|
||||
]
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(messages)
|
||||
|
||||
|
||||
chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(), return_source_documents=True, qa_prompt=prompt,)
|
||||
|
||||
history = [(d["human_message"], d["bot_response"]) for d in body.history]
|
||||
|
||||
print(history)
|
||||
|
||||
response = chat({
|
||||
"question": body.user_message,
|
||||
"chat_history": history
|
||||
})
|
||||
|
||||
|
||||
answer = response["answer"]
|
||||
answer = answer[answer.find(":")+1:].strip()
|
||||
|
||||
|
||||
return {
|
||||
"bot_response": answer,
|
||||
"human_message": body.user_message,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {
|
||||
"bot_response": "Something went wrong please try again later",
|
||||
"human_message": body.user_message,
|
||||
}
|
||||
|
57
py_server/handlers/user.py
Normal file
57
py_server/handlers/user.py
Normal file
@ -0,0 +1,57 @@
|
||||
from fastapi import HTTPException, Header
|
||||
from models import UserValidation, SaveChatToApp
|
||||
from db.supa import SupaService
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
supabase = SupaService()
|
||||
|
||||
async def validate_user_handler(user: UserValidation):
|
||||
if user.token is None or user.token == "":
|
||||
raise HTTPException(status_code=400, detail="Token is required")
|
||||
user = supabase.validate_user(user.token)
|
||||
data = user.data
|
||||
|
||||
if len(data) == 0:
|
||||
raise HTTPException(status_code=400, detail="Invalid token")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
}
|
||||
|
||||
|
||||
async def save_website_handler(body: SaveChatToApp, x_auth_token):
|
||||
try:
|
||||
if x_auth_token is None or x_auth_token == "":
|
||||
raise HTTPException(status_code=400, detail="Token is required")
|
||||
|
||||
user = supabase.validate_user(x_auth_token)
|
||||
data = user.data
|
||||
if len(data) == 0:
|
||||
raise HTTPException(status_code=400, detail="Invalid token")
|
||||
|
||||
soup = BeautifulSoup(body.html, 'lxml')
|
||||
|
||||
title = soup.title.string if soup.title else "Untitled Page"
|
||||
icon = soup.find('link', rel='icon').get('href') if soup.find('link', rel='icon') else None
|
||||
|
||||
iframe = soup.find('iframe', id='pageassist-iframe')
|
||||
if iframe:
|
||||
iframe.decompose()
|
||||
div = soup.find('div', id='pageassist-icon')
|
||||
if div:
|
||||
div.decompose()
|
||||
div = soup.find('div', id='__plasmo-loading__')
|
||||
if div:
|
||||
div.decompose()
|
||||
text = soup.get_text()
|
||||
|
||||
|
||||
result = supabase.save_webiste(html=text, title=title, icon=icon, url=body.url, user_id=data[0]["id"])
|
||||
|
||||
return {
|
||||
"status": "Success"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
@ -1,23 +1,8 @@
|
||||
from fastapi import FastAPI, Header, UploadFile, File
|
||||
from fastapi import FastAPI
|
||||
import os
|
||||
from pydantic import BaseModel
|
||||
from uvicorn import run
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from langchain.docstore.document import Document as LDocument
|
||||
from langchain.vectorstores.faiss import FAISS
|
||||
from langchain.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain.llms import OpenAI
|
||||
from langchain.text_splitter import CharacterTextSplitter
|
||||
from langchain.chains import ConversationalRetrievalChain
|
||||
from langchain.prompts.chat import (
|
||||
ChatPromptTemplate,
|
||||
SystemMessagePromptTemplate,
|
||||
HumanMessagePromptTemplate
|
||||
)
|
||||
|
||||
|
||||
from routers import chat, user
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY")
|
||||
|
||||
@ -36,79 +21,9 @@ app.add_middleware(
|
||||
allow_headers=headers
|
||||
)
|
||||
|
||||
app.include_router(chat.router)
|
||||
|
||||
class ChatBody(BaseModel):
|
||||
user_message: str
|
||||
html: str
|
||||
history: list
|
||||
|
||||
|
||||
@app.post("/chat")
|
||||
async def chat(body: ChatBody):
|
||||
try:
|
||||
soup = BeautifulSoup(body.html, 'lxml')
|
||||
|
||||
iframe = soup.find('iframe', id='pageassist-iframe')
|
||||
if iframe:
|
||||
iframe.decompose()
|
||||
div = soup.find('div', id='pageassist-icon')
|
||||
if div:
|
||||
div.decompose()
|
||||
div = soup.find('div', id='__plasmo-loading__')
|
||||
if div:
|
||||
div.decompose()
|
||||
text = soup.get_text()
|
||||
|
||||
result = [LDocument(page_content=text, metadata={"source": "test"})]
|
||||
token_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
||||
doc = token_splitter.split_documents(result)
|
||||
|
||||
print(f'Number of documents: {len(doc)}')
|
||||
|
||||
|
||||
vectorstore = FAISS.from_documents(doc, OpenAIEmbeddings())
|
||||
|
||||
|
||||
messages = [
|
||||
SystemMessagePromptTemplate.from_template("""I want you to act as a webpage that I am having a conversation witu. Your name is "OpenChatX". You will provide me with answers from the given text from webpage. Your answer should be original, concise, accurate, and helpful. You can recommend, translate and can do anything based on the context given. If the answer is not included in the text and you know the answer you can resonpond the answer othwerwise say exactly "I don't know the answer " and stop after that. Never break character. Answer must be in markdown format.
|
||||
-----------------
|
||||
{context}
|
||||
"""),
|
||||
HumanMessagePromptTemplate.from_template("{question}")
|
||||
]
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(messages)
|
||||
|
||||
|
||||
chat = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0, model_name="gpt-3.5-turbo"), vectorstore.as_retriever(), return_source_documents=True, qa_prompt=prompt,)
|
||||
|
||||
history = [(d["human_message"], d["bot_response"]) for d in body.history]
|
||||
|
||||
print(history)
|
||||
|
||||
response = chat({
|
||||
"question": body.user_message,
|
||||
"chat_history": history
|
||||
})
|
||||
|
||||
|
||||
answer = response["answer"]
|
||||
answer = answer[answer.find(":")+1:].strip()
|
||||
|
||||
|
||||
return {
|
||||
"bot_response": answer,
|
||||
"human_message": body.user_message,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {
|
||||
"bot_response": "Something went wrong please try again later",
|
||||
"human_message": body.user_message,
|
||||
}
|
||||
|
||||
|
||||
app.include_router(user.router)
|
||||
|
||||
if __name__ == "__main__":
|
||||
port = int(os.environ.get('PORT', 5000))
|
||||
|
2
py_server/models/__init__.py
Normal file
2
py_server/models/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .chat import ChatBody
|
||||
from .user import UserValidation, SaveChatToApp
|
7
py_server/models/chat.py
Normal file
7
py_server/models/chat.py
Normal file
@ -0,0 +1,7 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
class ChatBody(BaseModel):
|
||||
user_message: str
|
||||
html: str
|
||||
history: list
|
||||
# url: str
|
11
py_server/models/user.py
Normal file
11
py_server/models/user.py
Normal file
@ -0,0 +1,11 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class UserValidation(BaseModel):
|
||||
token: str
|
||||
|
||||
|
||||
|
||||
class SaveChatToApp(BaseModel):
|
||||
html: str
|
||||
url: str
|
@ -8,4 +8,5 @@ numpy
|
||||
pydantic
|
||||
langchain
|
||||
lxml
|
||||
faiss-cpu
|
||||
faiss-cpu
|
||||
supabase
|
0
py_server/routers/__init__.py
Normal file
0
py_server/routers/__init__.py
Normal file
9
py_server/routers/chat.py
Normal file
9
py_server/routers/chat.py
Normal file
@ -0,0 +1,9 @@
|
||||
from fastapi import APIRouter
|
||||
from models import ChatBody
|
||||
from handlers.chat import chat_extension_handler
|
||||
|
||||
router = APIRouter(prefix="/api/v1")
|
||||
|
||||
@router.post("/chat/chrome", tags=["chat"])
|
||||
async def chat_extension(body: ChatBody):
|
||||
return await chat_extension_handler(body)
|
15
py_server/routers/user.py
Normal file
15
py_server/routers/user.py
Normal file
@ -0,0 +1,15 @@
|
||||
from fastapi import APIRouter, Header
|
||||
from models import UserValidation, SaveChatToApp
|
||||
from handlers.user import validate_user_handler, save_website_handler
|
||||
|
||||
|
||||
router = APIRouter(prefix="/api/v1")
|
||||
|
||||
@router.post("/user/validate", tags=["user"])
|
||||
async def validate_user(user: UserValidation):
|
||||
return await validate_user_handler(user)
|
||||
|
||||
|
||||
@router.post("/user/save", tags=["user"])
|
||||
async def save_website(body: SaveChatToApp, x_auth_token: str = Header(None)):
|
||||
return await save_website_handler(body, x_auth_token)
|
Loading…
x
Reference in New Issue
Block a user