Remove console.log statements and unused dependencies
This commit is contained in:
parent
88e7e7521b
commit
e0c2c0c745
@ -21,7 +21,6 @@
|
||||
"@tailwindcss/forms": "^0.5.7",
|
||||
"@tailwindcss/typography": "^0.5.10",
|
||||
"@tanstack/react-query": "^5.17.19",
|
||||
"@types/pdf-parse": "^1.1.4",
|
||||
"antd": "^5.13.3",
|
||||
"axios": "^1.6.7",
|
||||
"dayjs": "^1.11.10",
|
||||
@ -39,7 +38,6 @@
|
||||
"rehype-mathjax": "4.0.3",
|
||||
"remark-gfm": "3.0.1",
|
||||
"remark-math": "5.1.1",
|
||||
"voy-search": "^0.6.3",
|
||||
"zustand": "^4.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -96,7 +96,6 @@ chrome.runtime.onMessage.addListener(async (message) => {
|
||||
clearBadge()
|
||||
}, 5000)
|
||||
}
|
||||
console.log("Pulling model", message.modelName)
|
||||
|
||||
await streamDownload(ollamaURL, message.modelName)
|
||||
}
|
||||
|
@ -28,8 +28,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
||||
initialValues: {
|
||||
message: "",
|
||||
image: ""
|
||||
},
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
React.useEffect(() => {
|
||||
@ -93,6 +92,43 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
||||
}
|
||||
})
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === "Process" || e.key === "229") return
|
||||
if (
|
||||
e.key === "Enter" &&
|
||||
!e.shiftKey &&
|
||||
!isSending &&
|
||||
sendWhenEnter &&
|
||||
!e.isComposing
|
||||
) {
|
||||
e.preventDefault()
|
||||
form.onSubmit(async (value) => {
|
||||
if (value.message.trim().length === 0) {
|
||||
return
|
||||
}
|
||||
if (!selectedModel || selectedModel.length === 0) {
|
||||
form.setFieldError("message", "Please select a model")
|
||||
return
|
||||
}
|
||||
if (webSearch) {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError(
|
||||
"message",
|
||||
"Please set an embedding model on the Settings > Ollama page"
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
await sendMessage({
|
||||
image: value.image,
|
||||
message: value.message.trim()
|
||||
})
|
||||
})()
|
||||
}
|
||||
}
|
||||
return (
|
||||
<div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
|
||||
<div
|
||||
@ -140,7 +176,6 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
||||
image: value.image,
|
||||
message: value.message.trim()
|
||||
})
|
||||
|
||||
})}
|
||||
className="shrink-0 flex-grow flex flex-col items-center ">
|
||||
<input
|
||||
@ -155,41 +190,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
||||
/>
|
||||
<div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2">
|
||||
<textarea
|
||||
onKeyDown={(e) => {
|
||||
if (
|
||||
e.key === "Enter" &&
|
||||
!e.shiftKey &&
|
||||
!isSending &&
|
||||
sendWhenEnter
|
||||
) {
|
||||
e.preventDefault()
|
||||
form.onSubmit(async (value) => {
|
||||
if (value.message.trim().length === 0) {
|
||||
return
|
||||
}
|
||||
if (!selectedModel || selectedModel.length === 0) {
|
||||
form.setFieldError("message", "Please select a model")
|
||||
return
|
||||
}
|
||||
if (webSearch) {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError(
|
||||
"message",
|
||||
"Please set an embedding model on the Settings > Ollama page"
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
await sendMessage({
|
||||
image: value.image,
|
||||
message: value.message.trim()
|
||||
})
|
||||
})()
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e) => handleKeyDown(e as unknown as KeyboardEvent)}
|
||||
ref={textareaRef}
|
||||
className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100"
|
||||
required
|
||||
|
@ -72,6 +72,44 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
}
|
||||
})
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === "Process" || e.key === "229") return
|
||||
if (
|
||||
e.key === "Enter" &&
|
||||
!e.shiftKey &&
|
||||
!isSending &&
|
||||
sendWhenEnter &&
|
||||
!e.isComposing
|
||||
) {
|
||||
e.preventDefault()
|
||||
form.onSubmit(async (value) => {
|
||||
if (value.message.trim().length === 0) {
|
||||
return
|
||||
}
|
||||
if (!selectedModel || selectedModel.length === 0) {
|
||||
form.setFieldError("message", "Please select a model")
|
||||
return
|
||||
}
|
||||
if (chatMode === "rag") {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError(
|
||||
"message",
|
||||
"Please set an embedding model on the settings page"
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
await sendMessage({
|
||||
image: value.image,
|
||||
message: value.message.trim()
|
||||
})
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="px-3 pt-3 md:px-6 md:pt-6 md:bg-white dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
|
||||
<div
|
||||
@ -133,41 +171,7 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
/>
|
||||
<div className="w-full border-x border-t flex flex-col dark:border-gray-600 rounded-t-xl p-2">
|
||||
<textarea
|
||||
onKeyDown={(e) => {
|
||||
if (
|
||||
e.key === "Enter" &&
|
||||
!e.shiftKey &&
|
||||
!isSending &&
|
||||
sendWhenEnter
|
||||
) {
|
||||
e.preventDefault()
|
||||
form.onSubmit(async (value) => {
|
||||
if (value.message.trim().length === 0) {
|
||||
return
|
||||
}
|
||||
if (!selectedModel || selectedModel.length === 0) {
|
||||
form.setFieldError("message", "Please select a model")
|
||||
return
|
||||
}
|
||||
if (chatMode === "rag") {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError(
|
||||
"message",
|
||||
"Please set an embedding model on the settings page"
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
await sendMessage({
|
||||
image: value.image,
|
||||
message: value.message.trim()
|
||||
})
|
||||
})()
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e) => handleKeyDown(e as unknown as KeyboardEvent)}
|
||||
ref={textareaRef}
|
||||
className="px-2 py-2 w-full resize-none bg-transparent focus-within:outline-none focus:ring-0 focus-visible:ring-0 ring-0 dark:ring-0 border-0 dark:text-gray-100"
|
||||
required
|
||||
|
@ -250,7 +250,6 @@ export const useMessage = () => {
|
||||
|
||||
setIsProcessing(false)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
|
||||
@ -388,7 +387,6 @@ ${e?.message}
|
||||
|
||||
setIsProcessing(false)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
|
||||
|
@ -310,7 +310,7 @@ export const useMessageOption = () => {
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
(e)
|
||||
|
||||
if (e?.name === "AbortError") {
|
||||
newMessage[appendingIndex].message = newMessage[
|
||||
@ -526,7 +526,6 @@ export const useMessageOption = () => {
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
||||
if (e?.name === "AbortError") {
|
||||
newMessage[appendingIndex].message = newMessage[
|
||||
|
@ -1,176 +1,172 @@
|
||||
import { useRef, useEffect, useState, useCallback } from "react";
|
||||
import { useRef, useEffect, useState, useCallback } from "react"
|
||||
|
||||
type SpeechRecognitionEvent = {
|
||||
results: SpeechRecognitionResultList;
|
||||
resultIndex: number;
|
||||
};
|
||||
results: SpeechRecognitionResultList
|
||||
resultIndex: number
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface SpeechRecognitionErrorEvent extends Event {
|
||||
error: string;
|
||||
error: string
|
||||
}
|
||||
interface Window {
|
||||
SpeechRecognition: any;
|
||||
webkitSpeechRecognition: any;
|
||||
SpeechRecognition: any
|
||||
webkitSpeechRecognition: any
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
type SpeechRecognition = {
|
||||
lang: string;
|
||||
interimResults: boolean;
|
||||
continuous: boolean;
|
||||
maxAlternatives: number;
|
||||
grammars: any;
|
||||
onresult: (event: SpeechRecognitionEvent) => void;
|
||||
onerror: (event: Event) => void;
|
||||
onend: () => void;
|
||||
start: () => void;
|
||||
stop: () => void;
|
||||
};
|
||||
lang: string
|
||||
interimResults: boolean
|
||||
continuous: boolean
|
||||
maxAlternatives: number
|
||||
grammars: any
|
||||
onresult: (event: SpeechRecognitionEvent) => void
|
||||
onerror: (event: Event) => void
|
||||
onend: () => void
|
||||
start: () => void
|
||||
stop: () => void
|
||||
}
|
||||
|
||||
type SpeechRecognitionProps = {
|
||||
onEnd?: () => void;
|
||||
onResult?: (transcript: string) => void;
|
||||
onError?: (event: Event) => void;
|
||||
};
|
||||
onEnd?: () => void
|
||||
onResult?: (transcript: string) => void
|
||||
onError?: (event: Event) => void
|
||||
}
|
||||
|
||||
type ListenArgs = {
|
||||
lang?: string;
|
||||
interimResults?: boolean;
|
||||
continuous?: boolean;
|
||||
maxAlternatives?: number;
|
||||
grammars?: any;
|
||||
};
|
||||
lang?: string
|
||||
interimResults?: boolean
|
||||
continuous?: boolean
|
||||
maxAlternatives?: number
|
||||
grammars?: any
|
||||
}
|
||||
|
||||
type SpeechRecognitionHook = {
|
||||
start: (args?: ListenArgs) => void;
|
||||
isListening: boolean;
|
||||
stop: () => void;
|
||||
supported: boolean;
|
||||
transcript: string;
|
||||
};
|
||||
start: (args?: ListenArgs) => void
|
||||
isListening: boolean
|
||||
stop: () => void
|
||||
supported: boolean
|
||||
transcript: string
|
||||
}
|
||||
|
||||
const useEventCallback = <T extends (...args: any[]) => any>(
|
||||
fn: T,
|
||||
dependencies: any[]
|
||||
) => {
|
||||
const ref = useRef<T>();
|
||||
const ref = useRef<T>()
|
||||
|
||||
useEffect(() => {
|
||||
ref.current = fn;
|
||||
}, [fn, ...dependencies]);
|
||||
ref.current = fn
|
||||
}, [fn, ...dependencies])
|
||||
|
||||
return useCallback(
|
||||
(...args: Parameters<T>) => {
|
||||
const fn = ref.current;
|
||||
return fn!(...args);
|
||||
const fn = ref.current
|
||||
return fn!(...args)
|
||||
},
|
||||
[ref]
|
||||
);
|
||||
};
|
||||
)
|
||||
}
|
||||
|
||||
export const useSpeechRecognition = (
|
||||
props: SpeechRecognitionProps = {}
|
||||
): SpeechRecognitionHook => {
|
||||
const { onEnd = () => {}, onResult = () => {}, onError = () => {} } = props;
|
||||
const recognition = useRef<SpeechRecognition | null>(null);
|
||||
const [listening, setListening] = useState<boolean>(false);
|
||||
const [supported, setSupported] = useState<boolean>(false);
|
||||
const [liveTranscript, setLiveTranscript] = useState<string>("");
|
||||
const { onEnd = () => {}, onResult = () => {}, onError = () => {} } = props
|
||||
const recognition = useRef<SpeechRecognition | null>(null)
|
||||
const [listening, setListening] = useState<boolean>(false)
|
||||
const [supported, setSupported] = useState<boolean>(false)
|
||||
const [liveTranscript, setLiveTranscript] = useState<string>("")
|
||||
|
||||
useEffect(() => {
|
||||
if (typeof window === "undefined") return;
|
||||
if (typeof window === "undefined") return
|
||||
window.SpeechRecognition =
|
||||
window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
console.log("window.SpeechRecognition", window.SpeechRecognition);
|
||||
window.SpeechRecognition || window.webkitSpeechRecognition
|
||||
if (window.SpeechRecognition) {
|
||||
setSupported(true);
|
||||
recognition.current = new window.SpeechRecognition();
|
||||
setSupported(true)
|
||||
recognition.current = new window.SpeechRecognition()
|
||||
}
|
||||
}, []);
|
||||
}, [])
|
||||
|
||||
const processResult = (event: SpeechRecognitionEvent) => {
|
||||
const transcript = Array.from(event.results)
|
||||
.map((result) => result[0])
|
||||
.map((result) => result.transcript)
|
||||
.join("");
|
||||
.join("")
|
||||
|
||||
onResult(transcript);
|
||||
};
|
||||
onResult(transcript)
|
||||
}
|
||||
|
||||
const handleError = (event: Event) => {
|
||||
if ((event as SpeechRecognitionErrorEvent).error === "not-allowed") {
|
||||
if (recognition.current) {
|
||||
recognition.current.onend = null;
|
||||
recognition.current.onend = null
|
||||
}
|
||||
setListening(false);
|
||||
setListening(false)
|
||||
}
|
||||
onError(event)
|
||||
}
|
||||
onError(event);
|
||||
};
|
||||
|
||||
const listen = useEventCallback(
|
||||
(args: ListenArgs = {}) => {
|
||||
if (listening || !supported) return;
|
||||
if (listening || !supported) return
|
||||
const {
|
||||
lang = "",
|
||||
interimResults = true,
|
||||
continuous = false,
|
||||
maxAlternatives = 1,
|
||||
grammars,
|
||||
} = args;
|
||||
setListening(true);
|
||||
setLiveTranscript("");
|
||||
grammars
|
||||
} = args
|
||||
setListening(true)
|
||||
setLiveTranscript("")
|
||||
if (recognition.current) {
|
||||
recognition.current.lang = lang;
|
||||
recognition.current.interimResults = interimResults;
|
||||
recognition.current.lang = lang
|
||||
recognition.current.interimResults = interimResults
|
||||
recognition.current.onresult = (event) => {
|
||||
processResult(event);
|
||||
processResult(event)
|
||||
const transcript = Array.from(event.results)
|
||||
.map((result) => result[0])
|
||||
.map((result) => result.transcript)
|
||||
.join("");
|
||||
setLiveTranscript(transcript);
|
||||
};
|
||||
recognition.current.onerror = handleError;
|
||||
recognition.current.continuous = continuous;
|
||||
recognition.current.maxAlternatives = maxAlternatives;
|
||||
.join("")
|
||||
setLiveTranscript(transcript)
|
||||
}
|
||||
recognition.current.onerror = handleError
|
||||
recognition.current.continuous = continuous
|
||||
recognition.current.maxAlternatives = maxAlternatives
|
||||
|
||||
if (grammars) {
|
||||
recognition.current.grammars = grammars;
|
||||
recognition.current.grammars = grammars
|
||||
}
|
||||
recognition.current.onend = () => {
|
||||
if (recognition.current) {
|
||||
recognition.current.start();
|
||||
recognition.current.start()
|
||||
}
|
||||
}
|
||||
};
|
||||
if (recognition.current) {
|
||||
recognition.current.start();
|
||||
recognition.current.start()
|
||||
}
|
||||
}
|
||||
},
|
||||
[listening, supported, recognition]
|
||||
);
|
||||
)
|
||||
|
||||
const stop = useEventCallback(() => {
|
||||
if (!listening || !supported) return;
|
||||
if (!listening || !supported) return
|
||||
if (recognition.current) {
|
||||
recognition.current.onresult = null;
|
||||
recognition.current.onend = null;
|
||||
recognition.current.onerror = null;
|
||||
setListening(false);
|
||||
recognition.current.stop();
|
||||
recognition.current.onresult = null
|
||||
recognition.current.onend = null
|
||||
recognition.current.onerror = null
|
||||
setListening(false)
|
||||
recognition.current.stop()
|
||||
}
|
||||
onEnd();
|
||||
}, [listening, supported, recognition, onEnd]);
|
||||
onEnd()
|
||||
}, [listening, supported, recognition, onEnd])
|
||||
|
||||
return {
|
||||
start: listen,
|
||||
isListening: listening,
|
||||
stop,
|
||||
supported,
|
||||
transcript: liveTranscript,
|
||||
};
|
||||
};
|
||||
transcript: liveTranscript
|
||||
}
|
||||
}
|
||||
|
10
yarn.lock
10
yarn.lock
@ -2456,11 +2456,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239"
|
||||
integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==
|
||||
|
||||
"@types/pdf-parse@^1.1.4":
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/pdf-parse/-/pdf-parse-1.1.4.tgz#21a539efd2f16009d08aeed3350133948b5d7ed1"
|
||||
integrity sha512-+gbBHbNCVGGYw1S9lAIIvrHW47UYOhMIFUsJcMkMrzy1Jf0vulBN3XQIjPgnoOXveMuHnF3b57fXROnY/Or7eg==
|
||||
|
||||
"@types/prop-types@*":
|
||||
version "15.7.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.11.tgz#2596fb352ee96a1379c657734d4b913a613ad563"
|
||||
@ -7631,11 +7626,6 @@ vfile@^5.0.0:
|
||||
unist-util-stringify-position "^3.0.0"
|
||||
vfile-message "^3.0.0"
|
||||
|
||||
voy-search@^0.6.3:
|
||||
version "0.6.3"
|
||||
resolved "https://registry.yarnpkg.com/voy-search/-/voy-search-0.6.3.tgz#5fed7744aac990c99a57b88bf4e7431ee6ba9ddd"
|
||||
integrity sha512-GRwrXcT3Qmzr/CuwpwX55XWpgqM2hUqLipSwI8bGcfsDTJGa+mFxsOXzWHNMRpcYd+U2RP73f2USLDWQu5yFdQ==
|
||||
|
||||
vue@3.3.4:
|
||||
version "3.3.4"
|
||||
resolved "https://registry.yarnpkg.com/vue/-/vue-3.3.4.tgz#8ed945d3873667df1d0fcf3b2463ada028f88bd6"
|
||||
|
Loading…
x
Reference in New Issue
Block a user