Remove console.log statements and unused dependencies

This commit is contained in:
n4ze3m 2024-03-02 19:50:51 +05:30
parent 88e7e7521b
commit e0c2c0c745
8 changed files with 167 additions and 182 deletions

View File

@ -21,7 +21,6 @@
"@tailwindcss/forms": "^0.5.7", "@tailwindcss/forms": "^0.5.7",
"@tailwindcss/typography": "^0.5.10", "@tailwindcss/typography": "^0.5.10",
"@tanstack/react-query": "^5.17.19", "@tanstack/react-query": "^5.17.19",
"@types/pdf-parse": "^1.1.4",
"antd": "^5.13.3", "antd": "^5.13.3",
"axios": "^1.6.7", "axios": "^1.6.7",
"dayjs": "^1.11.10", "dayjs": "^1.11.10",
@ -39,7 +38,6 @@
"rehype-mathjax": "4.0.3", "rehype-mathjax": "4.0.3",
"remark-gfm": "3.0.1", "remark-gfm": "3.0.1",
"remark-math": "5.1.1", "remark-math": "5.1.1",
"voy-search": "^0.6.3",
"zustand": "^4.5.0" "zustand": "^4.5.0"
}, },
"devDependencies": { "devDependencies": {

View File

@ -96,7 +96,6 @@ chrome.runtime.onMessage.addListener(async (message) => {
clearBadge() clearBadge()
}, 5000) }, 5000)
} }
console.log("Pulling model", message.modelName)
await streamDownload(ollamaURL, message.modelName) await streamDownload(ollamaURL, message.modelName)
} }

View File

@ -28,8 +28,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
initialValues: { initialValues: {
message: "", message: "",
image: "" image: ""
}, }
}) })
React.useEffect(() => { React.useEffect(() => {
@ -93,6 +92,43 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
} }
}) })
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === "Process" || e.key === "229") return
if (
e.key === "Enter" &&
!e.shiftKey &&
!isSending &&
sendWhenEnter &&
!e.isComposing
) {
e.preventDefault()
form.onSubmit(async (value) => {
if (value.message.trim().length === 0) {
return
}
if (!selectedModel || selectedModel.length === 0) {
form.setFieldError("message", "Please select a model")
return
}
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError(
"message",
"Please set an embedding model on the Settings > Ollama page"
)
return
}
}
form.reset()
textAreaFocus()
await sendMessage({
image: value.image,
message: value.message.trim()
})
})()
}
}
return ( return (
<div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600"> <div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
<div <div
@ -140,7 +176,6 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
image: value.image, image: value.image,
message: value.message.trim() message: value.message.trim()
}) })
})} })}
className="shrink-0 flex-grow flex flex-col items-center "> className="shrink-0 flex-grow flex flex-col items-center ">
<input <input
@ -155,41 +190,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
/> />
<div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2"> <div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2">
<textarea <textarea
onKeyDown={(e) => { onKeyDown={(e) => handleKeyDown(e as unknown as KeyboardEvent)}
if (
e.key === "Enter" &&
!e.shiftKey &&
!isSending &&
sendWhenEnter
) {
e.preventDefault()
form.onSubmit(async (value) => {
if (value.message.trim().length === 0) {
return
}
if (!selectedModel || selectedModel.length === 0) {
form.setFieldError("message", "Please select a model")
return
}
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError(
"message",
"Please set an embedding model on the Settings > Ollama page"
)
return
}
}
form.reset()
textAreaFocus()
await sendMessage({
image: value.image,
message: value.message.trim()
})
})()
}
}}
ref={textareaRef} ref={textareaRef}
className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100" className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100"
required required

View File

@ -72,6 +72,44 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
} }
}) })
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === "Process" || e.key === "229") return
if (
e.key === "Enter" &&
!e.shiftKey &&
!isSending &&
sendWhenEnter &&
!e.isComposing
) {
e.preventDefault()
form.onSubmit(async (value) => {
if (value.message.trim().length === 0) {
return
}
if (!selectedModel || selectedModel.length === 0) {
form.setFieldError("message", "Please select a model")
return
}
if (chatMode === "rag") {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError(
"message",
"Please set an embedding model on the settings page"
)
return
}
}
form.reset()
textAreaFocus()
await sendMessage({
image: value.image,
message: value.message.trim()
})
})()
}
}
return ( return (
<div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600"> <div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
<div <div
@ -133,41 +171,7 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
/> />
<div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2"> <div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2">
<textarea <textarea
onKeyDown={(e) => { onKeyDown={(e) => handleKeyDown(e as unknown as KeyboardEvent)}
if (
e.key === "Enter" &&
!e.shiftKey &&
!isSending &&
sendWhenEnter
) {
e.preventDefault()
form.onSubmit(async (value) => {
if (value.message.trim().length === 0) {
return
}
if (!selectedModel || selectedModel.length === 0) {
form.setFieldError("message", "Please select a model")
return
}
if (chatMode === "rag") {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError(
"message",
"Please set an embedding model on the settings page"
)
return
}
}
form.reset()
textAreaFocus()
await sendMessage({
image: value.image,
message: value.message.trim()
})
})()
}
}}
ref={textareaRef} ref={textareaRef}
className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100" className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100"
required required

View File

@ -250,7 +250,6 @@ export const useMessage = () => {
setIsProcessing(false) setIsProcessing(false)
} catch (e) { } catch (e) {
console.log(e)
setIsProcessing(false) setIsProcessing(false)
setStreaming(false) setStreaming(false)
@ -388,7 +387,6 @@ ${e?.message}
setIsProcessing(false) setIsProcessing(false)
} catch (e) { } catch (e) {
console.log(e)
setIsProcessing(false) setIsProcessing(false)
setStreaming(false) setStreaming(false)

View File

@ -310,7 +310,7 @@ export const useMessageOption = () => {
setIsProcessing(false) setIsProcessing(false)
setStreaming(false) setStreaming(false)
} catch (e) { } catch (e) {
console.log(e) (e)
if (e?.name === "AbortError") { if (e?.name === "AbortError") {
newMessage[appendingIndex].message = newMessage[ newMessage[appendingIndex].message = newMessage[
@ -526,7 +526,6 @@ export const useMessageOption = () => {
setIsProcessing(false) setIsProcessing(false)
setStreaming(false) setStreaming(false)
} catch (e) { } catch (e) {
console.log(e)
if (e?.name === "AbortError") { if (e?.name === "AbortError") {
newMessage[appendingIndex].message = newMessage[ newMessage[appendingIndex].message = newMessage[

View File

@ -1,176 +1,172 @@
import { useRef, useEffect, useState, useCallback } from "react"; import { useRef, useEffect, useState, useCallback } from "react"
type SpeechRecognitionEvent = { type SpeechRecognitionEvent = {
results: SpeechRecognitionResultList; results: SpeechRecognitionResultList
resultIndex: number; resultIndex: number
}; }
declare global { declare global {
interface SpeechRecognitionErrorEvent extends Event { interface SpeechRecognitionErrorEvent extends Event {
error: string; error: string
} }
interface Window { interface Window {
SpeechRecognition: any; SpeechRecognition: any
webkitSpeechRecognition: any; webkitSpeechRecognition: any
} }
} }
type SpeechRecognition = { type SpeechRecognition = {
lang: string; lang: string
interimResults: boolean; interimResults: boolean
continuous: boolean; continuous: boolean
maxAlternatives: number; maxAlternatives: number
grammars: any; grammars: any
onresult: (event: SpeechRecognitionEvent) => void; onresult: (event: SpeechRecognitionEvent) => void
onerror: (event: Event) => void; onerror: (event: Event) => void
onend: () => void; onend: () => void
start: () => void; start: () => void
stop: () => void; stop: () => void
}; }
type SpeechRecognitionProps = { type SpeechRecognitionProps = {
onEnd?: () => void; onEnd?: () => void
onResult?: (transcript: string) => void; onResult?: (transcript: string) => void
onError?: (event: Event) => void; onError?: (event: Event) => void
}; }
type ListenArgs = { type ListenArgs = {
lang?: string; lang?: string
interimResults?: boolean; interimResults?: boolean
continuous?: boolean; continuous?: boolean
maxAlternatives?: number; maxAlternatives?: number
grammars?: any; grammars?: any
}; }
type SpeechRecognitionHook = { type SpeechRecognitionHook = {
start: (args?: ListenArgs) => void; start: (args?: ListenArgs) => void
isListening: boolean; isListening: boolean
stop: () => void; stop: () => void
supported: boolean; supported: boolean
transcript: string; transcript: string
}; }
const useEventCallback = <T extends (...args: any[]) => any>( const useEventCallback = <T extends (...args: any[]) => any>(
fn: T, fn: T,
dependencies: any[] dependencies: any[]
) => { ) => {
const ref = useRef<T>(); const ref = useRef<T>()
useEffect(() => { useEffect(() => {
ref.current = fn; ref.current = fn
}, [fn, ...dependencies]); }, [fn, ...dependencies])
return useCallback( return useCallback(
(...args: Parameters<T>) => { (...args: Parameters<T>) => {
const fn = ref.current; const fn = ref.current
return fn!(...args); return fn!(...args)
}, },
[ref] [ref]
); )
}; }
export const useSpeechRecognition = ( export const useSpeechRecognition = (
props: SpeechRecognitionProps = {} props: SpeechRecognitionProps = {}
): SpeechRecognitionHook => { ): SpeechRecognitionHook => {
const { onEnd = () => {}, onResult = () => {}, onError = () => {} } = props; const { onEnd = () => {}, onResult = () => {}, onError = () => {} } = props
const recognition = useRef<SpeechRecognition | null>(null); const recognition = useRef<SpeechRecognition | null>(null)
const [listening, setListening] = useState<boolean>(false); const [listening, setListening] = useState<boolean>(false)
const [supported, setSupported] = useState<boolean>(false); const [supported, setSupported] = useState<boolean>(false)
const [liveTranscript, setLiveTranscript] = useState<string>(""); const [liveTranscript, setLiveTranscript] = useState<string>("")
useEffect(() => { useEffect(() => {
if (typeof window === "undefined") return; if (typeof window === "undefined") return
window.SpeechRecognition = window.SpeechRecognition =
window.SpeechRecognition || window.webkitSpeechRecognition; window.SpeechRecognition || window.webkitSpeechRecognition
console.log("window.SpeechRecognition", window.SpeechRecognition);
if (window.SpeechRecognition) { if (window.SpeechRecognition) {
setSupported(true); setSupported(true)
recognition.current = new window.SpeechRecognition(); recognition.current = new window.SpeechRecognition()
} }
}, []); }, [])
const processResult = (event: SpeechRecognitionEvent) => { const processResult = (event: SpeechRecognitionEvent) => {
const transcript = Array.from(event.results) const transcript = Array.from(event.results)
.map((result) => result[0]) .map((result) => result[0])
.map((result) => result.transcript) .map((result) => result.transcript)
.join(""); .join("")
onResult(transcript); onResult(transcript)
}; }
const handleError = (event: Event) => { const handleError = (event: Event) => {
if ((event as SpeechRecognitionErrorEvent).error === "not-allowed") { if ((event as SpeechRecognitionErrorEvent).error === "not-allowed") {
if (recognition.current) { if (recognition.current) {
recognition.current.onend = null; recognition.current.onend = null
} }
setListening(false); setListening(false)
} }
onError(event); onError(event)
}; }
const listen = useEventCallback( const listen = useEventCallback(
(args: ListenArgs = {}) => { (args: ListenArgs = {}) => {
if (listening || !supported) return; if (listening || !supported) return
const { const {
lang = "", lang = "",
interimResults = true, interimResults = true,
continuous = false, continuous = false,
maxAlternatives = 1, maxAlternatives = 1,
grammars, grammars
} = args; } = args
setListening(true); setListening(true)
setLiveTranscript(""); setLiveTranscript("")
if (recognition.current) { if (recognition.current) {
recognition.current.lang = lang; recognition.current.lang = lang
recognition.current.interimResults = interimResults; recognition.current.interimResults = interimResults
recognition.current.onresult = (event) => { recognition.current.onresult = (event) => {
processResult(event); processResult(event)
const transcript = Array.from(event.results) const transcript = Array.from(event.results)
.map((result) => result[0]) .map((result) => result[0])
.map((result) => result.transcript) .map((result) => result.transcript)
.join(""); .join("")
setLiveTranscript(transcript); setLiveTranscript(transcript)
}; }
recognition.current.onerror = handleError; recognition.current.onerror = handleError
recognition.current.continuous = continuous; recognition.current.continuous = continuous
recognition.current.maxAlternatives = maxAlternatives; recognition.current.maxAlternatives = maxAlternatives
if (grammars) { if (grammars) {
recognition.current.grammars = grammars; recognition.current.grammars = grammars
} }
recognition.current.onend = () => { recognition.current.onend = () => {
if (recognition.current) { if (recognition.current) {
recognition.current.start(); recognition.current.start()
} }
}; }
if (recognition.current) { if (recognition.current) {
recognition.current.start(); recognition.current.start()
} }
} }
}, },
[listening, supported, recognition] [listening, supported, recognition]
); )
const stop = useEventCallback(() => { const stop = useEventCallback(() => {
if (!listening || !supported) return; if (!listening || !supported) return
if (recognition.current) { if (recognition.current) {
recognition.current.onresult = null; recognition.current.onresult = null
recognition.current.onend = null; recognition.current.onend = null
recognition.current.onerror = null; recognition.current.onerror = null
setListening(false); setListening(false)
recognition.current.stop(); recognition.current.stop()
} }
onEnd(); onEnd()
}, [listening, supported, recognition, onEnd]); }, [listening, supported, recognition, onEnd])
return { return {
start: listen, start: listen,
isListening: listening, isListening: listening,
stop, stop,
supported, supported,
transcript: liveTranscript, transcript: liveTranscript
}; }
}; }

View File

@ -2456,11 +2456,6 @@
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239" resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239"
integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw== integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==
"@types/pdf-parse@^1.1.4":
version "1.1.4"
resolved "https://registry.yarnpkg.com/@types/pdf-parse/-/pdf-parse-1.1.4.tgz#21a539efd2f16009d08aeed3350133948b5d7ed1"
integrity sha512-+gbBHbNCVGGYw1S9lAIIvrHW47UYOhMIFUsJcMkMrzy1Jf0vulBN3XQIjPgnoOXveMuHnF3b57fXROnY/Or7eg==
"@types/prop-types@*": "@types/prop-types@*":
version "15.7.11" version "15.7.11"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.11.tgz#2596fb352ee96a1379c657734d4b913a613ad563" resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.11.tgz#2596fb352ee96a1379c657734d4b913a613ad563"
@ -7631,11 +7626,6 @@ vfile@^5.0.0:
unist-util-stringify-position "^3.0.0" unist-util-stringify-position "^3.0.0"
vfile-message "^3.0.0" vfile-message "^3.0.0"
voy-search@^0.6.3:
version "0.6.3"
resolved "https://registry.yarnpkg.com/voy-search/-/voy-search-0.6.3.tgz#5fed7744aac990c99a57b88bf4e7431ee6ba9ddd"
integrity sha512-GRwrXcT3Qmzr/CuwpwX55XWpgqM2hUqLipSwI8bGcfsDTJGa+mFxsOXzWHNMRpcYd+U2RP73f2USLDWQu5yFdQ==
vue@3.3.4: vue@3.3.4:
version "3.3.4" version "3.3.4"
resolved "https://registry.yarnpkg.com/vue/-/vue-3.3.4.tgz#8ed945d3873667df1d0fcf3b2463ada028f88bd6" resolved "https://registry.yarnpkg.com/vue/-/vue-3.3.4.tgz#8ed945d3873667df1d0fcf3b2463ada028f88bd6"