import { useForm } from "@mantine/form" import { useMutation } from "@tanstack/react-query" import React from "react" import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize" import { useMessage } from "~/hooks/useMessage" import { toBase64 } from "~/libs/to-base64" import { Checkbox, Dropdown, Image, Switch, Tooltip } from "antd" import { useWebUI } from "~/store/webui" import { defaultEmbeddingModelForRag } from "~/services/ollama" import { ImageIcon, MicIcon, StopCircleIcon, X, EyeIcon, EyeOffIcon } from "lucide-react" import { useTranslation } from "react-i18next" import { ModelSelect } from "@/components/Common/ModelSelect" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" import { PiGlobeX, PiGlobe } from "react-icons/pi" import { handleChatInputKeyDown } from "@/utils/key-down" type Props = { dropedFile: File | undefined } export const SidepanelForm = ({ dropedFile }: Props) => { const textareaRef = React.useRef(null) const inputRef = React.useRef(null) const { sendWhenEnter, setSendWhenEnter } = useWebUI() const [typing, setTyping] = React.useState(false) const { t } = useTranslation(["playground", "common"]) const form = useForm({ initialValues: { message: "", image: "" } }) const { transcript, isListening, resetTranscript, start: startListening, stop: stopSpeechRecognition, supported: browserSupportsSpeechRecognition } = useSpeechRecognition() const stopListening = async () => { if (isListening) { stopSpeechRecognition() } } const onInputChange = async ( e: React.ChangeEvent | File ) => { if (e instanceof File) { const base64 = await toBase64(e) form.setFieldValue("image", base64) } else { if (e.target.files) { const base64 = await toBase64(e.target.files[0]) form.setFieldValue("image", base64) } } } const textAreaFocus = () => { if (textareaRef.current) { textareaRef.current.focus() } } const handleKeyDown = (e: React.KeyboardEvent) => { if (e.key === "Process" || e.key === "229") return if ( handleChatInputKeyDown({ e, sendWhenEnter, typing, isSending }) ) { e.preventDefault() form.onSubmit(async (value) => { if (value.message.trim().length === 0 && value.image.length === 0) { return } await stopListening() if (!selectedModel || selectedModel.length === 0) { form.setFieldError("message", t("formError.noModel")) return } if (chatMode === "rag") { const defaultEM = await defaultEmbeddingModelForRag() if (!defaultEM) { form.setFieldError("message", t("formError.noEmbeddingModel")) return } } if (webSearch) { const defaultEM = await defaultEmbeddingModelForRag() if (!defaultEM) { form.setFieldError("message", t("formError.noEmbeddingModel")) return } } form.reset() textAreaFocus() await sendMessage({ image: value.image, message: value.message.trim() }) })() } } const handlePaste = (e: React.ClipboardEvent) => { if (e.clipboardData.files.length > 0) { onInputChange(e.clipboardData.files[0]) } } const { onSubmit, selectedModel, chatMode, stopStreamingRequest, streaming, setChatMode, webSearch, setWebSearch, selectedQuickPrompt, setSelectedQuickPrompt, speechToTextLanguage, useOCR, setUseOCR, defaultInternetSearchOn } = useMessage() React.useEffect(() => { if (dropedFile) { onInputChange(dropedFile) } }, [dropedFile]) useDynamicTextareaSize(textareaRef, form.values.message, 120) React.useEffect(() => { if (isListening) { form.setFieldValue("message", transcript) } }, [transcript]) React.useEffect(() => { if (selectedQuickPrompt) { const word = getVariable(selectedQuickPrompt) form.setFieldValue("message", selectedQuickPrompt) if (word) { textareaRef.current?.focus() const interval = setTimeout(() => { textareaRef.current?.setSelectionRange(word.start, word.end) setSelectedQuickPrompt(null) }, 100) return () => { clearInterval(interval) } } } }, [selectedQuickPrompt]) const { mutateAsync: sendMessage, isPending: isSending } = useMutation({ mutationFn: onSubmit, onSuccess: () => { textAreaFocus() }, onError: (error) => { textAreaFocus() } }) React.useEffect(() => { const handleDrop = (e: DragEvent) => { e.preventDefault() if (e.dataTransfer?.items) { for (let i = 0; i < e.dataTransfer.items.length; i++) { if (e.dataTransfer.items[i].type === "text/plain") { e.dataTransfer.items[i].getAsString((text) => { form.setFieldValue("message", text) }) } } } } const handleDragOver = (e: DragEvent) => { e.preventDefault() } textareaRef.current?.addEventListener("drop", handleDrop) textareaRef.current?.addEventListener("dragover", handleDragOver) if (defaultInternetSearchOn) { setWebSearch(true) } return () => { textareaRef.current?.removeEventListener("drop", handleDrop) textareaRef.current?.removeEventListener("dragover", handleDragOver) } }, []) React.useEffect(() => { if (defaultInternetSearchOn) { setWebSearch(true) } }, [defaultInternetSearchOn]) return (
Uploaded Image
{ if (!selectedModel || selectedModel.length === 0) { form.setFieldError("message", t("formError.noModel")) return } if (chatMode === "rag") { const defaultEM = await defaultEmbeddingModelForRag() if (!defaultEM) { form.setFieldError("message", t("formError.noEmbeddingModel")) return } } if (webSearch) { const defaultEM = await defaultEmbeddingModelForRag() if (!defaultEM) { form.setFieldError("message", t("formError.noEmbeddingModel")) return } } await stopListening() if ( value.message.trim().length === 0 && value.image.length === 0 ) { return } form.reset() textAreaFocus() await sendMessage({ image: value.image, message: value.message.trim() }) })} className="shrink-0 flex-grow flex flex-col items-center ">