feat: change token get

This commit is contained in:
zhaoweijie 2025-02-24 10:10:21 +08:00
parent 70d1f40333
commit c5fa739a95
3 changed files with 17 additions and 14 deletions

View File

@ -6,10 +6,10 @@ import { formatDate } from "@/utils/date"
const columns: TableProps<MeteringEntry>["columns"] = [ const columns: TableProps<MeteringEntry>["columns"] = [
{ {
title: "id", title: '序号',
dataIndex: "id", key: 'index',
key: "id", width: 100,
width: "13%" render: (_text, _record, index) => index + 1, // 索引从0开始+1后从1显示
}, },
{ {
title: "问题", title: "问题",
@ -84,7 +84,7 @@ const columns: TableProps<MeteringEntry>["columns"] = [
dataIndex: "date", dataIndex: "date",
key: "date", key: "date",
render: (date) => { render: (date) => {
return <div>{formatDate(date)}</div> return <div>{formatDate(date ?? new Date())}</div>
} }
}, },
{ {

View File

@ -193,7 +193,8 @@ export const useMessageOption = () => {
let generateMessageId = generateID() let generateMessageId = generateID()
const meter: MeteringEntry = { const meter: MeteringEntry = {
id: generateMessageId, id: generateMessageId,
queryContent: message queryContent: message,
date: new Date()
} as MeteringEntry } as MeteringEntry
if (!isRegenerate) { if (!isRegenerate) {
@ -488,18 +489,20 @@ export const useMessageOption = () => {
// Save metering entry // Save metering entry
const { cot, content } = responseResolver(fullText) const { cot, content } = responseResolver(fullText)
setMeteringEntries([...meteringEntries, { setMeteringEntries([ {
...meter, ...meter,
modelInputTokenCount: generationInfo?.prompt_eval_count ?? 0, modelInputTokenCount: prompt.length,
modelOutputTokenCount: generationInfo?.eval_count ?? 0, modelOutputTokenCount: fullText.length,
model: generationInfo?.model ?? "", model: ollama.modelName,
relatedDataCount: iodData?.length ?? 0, relatedDataCount: iodData?.length ?? 0,
timeTaken: timetaken, timeTaken: new Date().getTime() - meter.date.getTime(),
date: chatStartTime, date: chatStartTime,
cot, cot,
responseContent: content, responseContent: content,
modelResponseContent: fullText, modelResponseContent: fullText,
}]) },
...meteringEntries,
])
} catch (e) { } catch (e) {
const errorSave = await saveMessageOnError({ const errorSave = await saveMessageOnError({
e, e,

View File

@ -72,7 +72,7 @@ export const pageAssistModel = async ({
configuration: { configuration: {
apiKey: providerInfo.apiKey || "temp", apiKey: providerInfo.apiKey || "temp",
baseURL: providerInfo.baseUrl || "" baseURL: providerInfo.baseUrl || ""
} },
}) as any }) as any
} }
@ -85,7 +85,7 @@ export const pageAssistModel = async ({
configuration: { configuration: {
apiKey: providerInfo.apiKey || "temp", apiKey: providerInfo.apiKey || "temp",
baseURL: providerInfo.baseUrl || "" baseURL: providerInfo.baseUrl || ""
} },
}) as any }) as any
} }
return new ChatOllama({ return new ChatOllama({