feat: change token get
This commit is contained in:
parent
7b8879a7a8
commit
90228512f7
@ -6,10 +6,10 @@ import { formatDate } from "@/utils/date"
|
||||
|
||||
const columns: TableProps<ChatMessage>["columns"] = [
|
||||
{
|
||||
title: "id",
|
||||
dataIndex: "id",
|
||||
key: "id",
|
||||
width: "13%"
|
||||
title: '序号',
|
||||
key: 'index',
|
||||
width: 100,
|
||||
render: (_text, _record, index) => index + 1, // 索引从0开始,+1后从1显示
|
||||
},
|
||||
{
|
||||
title: "问题",
|
||||
@ -84,7 +84,7 @@ const columns: TableProps<ChatMessage>["columns"] = [
|
||||
dataIndex: "date",
|
||||
key: "date",
|
||||
render: (date) => {
|
||||
return <div>{formatDate(date)}</div>
|
||||
return <div>{formatDate(date ?? new Date())}</div>
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -192,7 +192,8 @@ export const useMessageOption = () => {
|
||||
let generateMessageId = generateID()
|
||||
const chatMessage: ChatMessage = {
|
||||
id: generateMessageId,
|
||||
queryContent: message
|
||||
queryContent: message,
|
||||
date: new Date()
|
||||
} as ChatMessage
|
||||
|
||||
if (!isRegenerate) {
|
||||
@ -484,17 +485,16 @@ export const useMessageOption = () => {
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
|
||||
chatMessage.modelInputTokenCount = generationInfo?.prompt_eval_count ?? 0
|
||||
chatMessage.modelOutputTokenCount = generationInfo?.eval_count ?? 0
|
||||
chatMessage.model = generationInfo?.model ?? ""
|
||||
chatMessage.modelInputTokenCount = prompt.length
|
||||
chatMessage.modelOutputTokenCount = fullText.length
|
||||
chatMessage.model = ollama.modelName
|
||||
chatMessage.relatedDataCount = iodData?.length ?? 0
|
||||
chatMessage.timeTaken = timetaken
|
||||
chatMessage.date = reasoningStartTime
|
||||
chatMessage.timeTaken = new Date().getTime() - chatMessage.date.getTime()
|
||||
const { think, content } = responseResolver(fullText)
|
||||
chatMessage.thinkingChain = think
|
||||
chatMessage.responseContent = content
|
||||
chatMessage.modelResponseContent = fullText
|
||||
setChatMessages([...chatMessages, chatMessage])
|
||||
setChatMessages([chatMessage, ...chatMessages])
|
||||
} catch (e) {
|
||||
const errorSave = await saveMessageOnError({
|
||||
e,
|
||||
|
@ -72,7 +72,7 @@ export const pageAssistModel = async ({
|
||||
configuration: {
|
||||
apiKey: providerInfo.apiKey || "temp",
|
||||
baseURL: providerInfo.baseUrl || ""
|
||||
}
|
||||
},
|
||||
}) as any
|
||||
}
|
||||
|
||||
@ -85,7 +85,7 @@ export const pageAssistModel = async ({
|
||||
configuration: {
|
||||
apiKey: providerInfo.apiKey || "temp",
|
||||
baseURL: providerInfo.baseUrl || ""
|
||||
}
|
||||
},
|
||||
}) as any
|
||||
}
|
||||
return new ChatOllama({
|
||||
|
Loading…
x
Reference in New Issue
Block a user