Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/main/presenter/deepchatAgentPresenter/accumulator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ export function accumulate(state: StreamState, event: LLMCoreStreamEvent): void
state.metadata.outputTokens = event.usage.completion_tokens
state.metadata.totalTokens = event.usage.total_tokens
state.metadata.cachedInputTokens = event.usage.cached_tokens
state.metadata.cacheWriteInputTokens = event.usage.cache_write_tokens
break
}
case 'stop': {
Expand Down
3 changes: 2 additions & 1 deletion src/main/presenter/deepchatAgentPresenter/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1405,7 +1405,8 @@ export class DeepChatAgentPresenter implements IAgentImplementation {
maxTokens: generationSettings.maxTokens,
thinkingBudget: generationSettings.thinkingBudget,
reasoningEffort: generationSettings.reasoningEffort,
verbosity: generationSettings.verbosity
verbosity: generationSettings.verbosity,
conversationId: sessionId
}

const traceEnabled = this.configPresenter.getSetting<boolean>('traceDebugEnabled') === true
Expand Down
3 changes: 3 additions & 0 deletions src/main/presenter/deepchatAgentPresenter/process.ts
Original file line number Diff line number Diff line change
Expand Up @@ -308,5 +308,8 @@ function buildUsageSnapshot(state: StreamState): Record<string, number> {
if (typeof state.metadata.cachedInputTokens === 'number') {
usage.cachedInputTokens = state.metadata.cachedInputTokens
}
if (typeof state.metadata.cacheWriteInputTokens === 'number') {
usage.cacheWriteInputTokens = state.metadata.cacheWriteInputTokens
}
return usage
}
50 changes: 50 additions & 0 deletions src/main/presenter/llmProviderPresenter/promptCacheCapabilities.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
export type PromptCacheMode =
| 'disabled'
| 'openai_implicit'
| 'anthropic_auto'
| 'anthropic_explicit'

function normalizeId(value: string | undefined): string {
return value?.trim().toLowerCase() ?? ''
}

function isClaudeModel(modelId: string): boolean {
return modelId.includes('claude')
}

export function resolvePromptCacheMode(providerId: string, modelId: string): PromptCacheMode {
const normalizedProviderId = normalizeId(providerId)
const normalizedModelId = normalizeId(modelId)

if (normalizedProviderId === 'openai') {
return 'openai_implicit'
}

if (normalizedProviderId === 'anthropic' && isClaudeModel(normalizedModelId)) {
return 'anthropic_auto'
}

if (
normalizedProviderId === 'zenmux' &&
normalizedModelId.startsWith('anthropic/') &&
isClaudeModel(normalizedModelId)
) {
return 'anthropic_explicit'
}

if (
normalizedProviderId === 'aws-bedrock' &&
(normalizedModelId.includes('anthropic.claude') || isClaudeModel(normalizedModelId))
) {
return 'anthropic_explicit'
}

if (
normalizedProviderId === 'openrouter' &&
(normalizedModelId.startsWith('anthropic/') || isClaudeModel(normalizedModelId))
) {
return 'anthropic_explicit'
}

return 'disabled'
}
Loading
Loading