Skip to content

Commit f93a8c7

Browse files
committed
fix usage
1 parent 3a043e2 commit f93a8c7

File tree

3 files changed

+30
-22
lines changed

3 files changed

+30
-22
lines changed

src/main/presenter/llmProviderPresenter/providers/anthropicProvider.ts

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import { presenter } from '@/presenter'
1414
import { HttpsProxyAgent } from 'https-proxy-agent'
1515
import { proxyConfig } from '../../proxyConfig'
1616
import { getModelConfig } from '../modelConfigs'
17+
import { Usage } from '@anthropic-ai/sdk/resources'
1718

1819
export class AnthropicProvider extends BaseLLMProvider {
1920
private anthropic!: Anthropic
@@ -710,19 +711,12 @@ ${context}
710711
let currentToolId = ''
711712
let currentToolName = ''
712713
let currentToolInputs: Record<string, unknown> = {}
713-
714+
let usageMetadata: Usage | undefined
714715
// 处理流中的各种事件
715716
for await (const chunk of stream) {
716717
// 处理使用统计
717718
if (chunk.type === 'message_start' && chunk.message.usage) {
718-
yield {
719-
type: 'usage',
720-
usage: {
721-
prompt_tokens: chunk.message.usage.input_tokens,
722-
completion_tokens: chunk.message.usage.output_tokens,
723-
total_tokens: chunk.message.usage.input_tokens + chunk.message.usage.output_tokens
724-
}
725-
}
719+
usageMetadata = chunk.message.usage
726720
}
727721

728722
// 处理工具调用开始
@@ -904,7 +898,16 @@ ${context}
904898
continue
905899
}
906900
}
907-
901+
if (usageMetadata) {
902+
yield {
903+
type: 'usage',
904+
usage: {
905+
prompt_tokens: usageMetadata.input_tokens,
906+
completion_tokens: usageMetadata.output_tokens,
907+
total_tokens: usageMetadata.input_tokens + usageMetadata.output_tokens
908+
}
909+
}
910+
}
908911
// 发送停止事件
909912
yield {
910913
type: 'stop',

src/main/presenter/llmProviderPresenter/providers/geminiProvider.ts

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ import {
1313
GenerativeModel,
1414
Part,
1515
Content,
16-
GenerationConfig
16+
GenerationConfig,
17+
UsageMetadata
1718
} from '@google/generative-ai'
1819
import { ConfigPresenter } from '../../configPresenter'
1920
import { presenter } from '@/presenter'
@@ -808,19 +809,12 @@ export class GeminiProvider extends BaseLLMProvider {
808809
let buffer = ''
809810
let isInThinkTag = false
810811
let toolUseDetected = false
811-
812+
let usageMetadata: UsageMetadata | undefined
812813
// 流处理循环
813814
for await (const chunk of result.stream) {
814815
// 处理用量统计
815816
if (chunk.usageMetadata) {
816-
yield {
817-
type: 'usage',
818-
usage: {
819-
prompt_tokens: chunk.usageMetadata.promptTokenCount,
820-
completion_tokens: chunk.usageMetadata.candidatesTokenCount,
821-
total_tokens: chunk.usageMetadata.totalTokenCount
822-
}
823-
}
817+
usageMetadata = chunk.usageMetadata
824818
}
825819

826820
// 检查是否包含函数调用
@@ -948,7 +942,16 @@ export class GeminiProvider extends BaseLLMProvider {
948942
// 内容已经发送,清空buffer避免重复
949943
buffer = ''
950944
}
951-
945+
if (usageMetadata) {
946+
yield {
947+
type: 'usage',
948+
usage: {
949+
prompt_tokens: usageMetadata.promptTokenCount,
950+
completion_tokens: usageMetadata.candidatesTokenCount,
951+
total_tokens: usageMetadata.totalTokenCount
952+
}
953+
}
954+
}
952955
// 处理剩余缓冲区内容
953956
if (buffer) {
954957
if (isInThinkTag) {

src/main/presenter/llmProviderPresenter/providers/ollamaProvider.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -495,7 +495,6 @@ export class OllamaProvider extends BaseLLMProvider {
495495
completion_tokens: chunk.eval_count || 0,
496496
total_tokens: (chunk.prompt_eval_count || 0) + (chunk.eval_count || 0)
497497
}
498-
yield { type: 'usage', usage }
499498
}
500499

501500
// 处理原生工具调用
@@ -643,6 +642,9 @@ export class OllamaProvider extends BaseLLMProvider {
643642

644643
continue
645644
}
645+
if (usage) {
646+
yield { type: 'usage', usage }
647+
}
646648

647649
// --- 思考标签处理 ---
648650
if (thinkState === 'inside') {

0 commit comments

Comments
 (0)