From 1d7028cbddc34412db0daf011f56a3296f78150a Mon Sep 17 00:00:00 2001 From: yyhhyyyyyy Date: Thu, 14 Aug 2025 22:56:55 +0800 Subject: [PATCH] fix: resolve OpenRouter model sync timeout and welcome view UI issues --- src/main/presenter/llmProviderPresenter/baseProvider.ts | 9 +++++++++ .../providers/openAICompatibleProvider.ts | 4 ++-- .../providers/openAIResponsesProvider.ts | 4 ++-- src/renderer/src/views/WelcomeView.vue | 6 +++--- 4 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/main/presenter/llmProviderPresenter/baseProvider.ts b/src/main/presenter/llmProviderPresenter/baseProvider.ts index 5cb4befc0..4d6534759 100644 --- a/src/main/presenter/llmProviderPresenter/baseProvider.ts +++ b/src/main/presenter/llmProviderPresenter/baseProvider.ts @@ -30,6 +30,7 @@ import { CONFIG_EVENTS } from '@/events' export abstract class BaseLLMProvider { // 单轮会话中最大工具调用次数限制 protected static readonly MAX_TOOL_CALLS = 50 + protected static readonly DEFAULT_MODEL_FETCH_TIMEOUT = 12000 // 提升到12秒作为通用默认值 protected provider: LLM_PROVIDER protected models: MODEL_META[] = [] @@ -59,6 +60,14 @@ export abstract class BaseLLMProvider { return BaseLLMProvider.MAX_TOOL_CALLS } + /** + * 获取模型获取超时时间配置 + * @returns 超时时间(毫秒) + */ + protected getModelFetchTimeout(): number { + return BaseLLMProvider.DEFAULT_MODEL_FETCH_TIMEOUT + } + /** * 从配置中加载缓存的模型数据 * 在构造函数中调用,避免每次都需要重新获取模型列表 diff --git a/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts b/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts index dadb89264..cc6948351 100644 --- a/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts +++ b/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts @@ -1198,8 +1198,8 @@ export class OpenAICompatibleProvider extends BaseLLMProvider { public async check(): Promise<{ isOk: boolean; errorMsg: string | null }> { try { if (!this.isNoModelsApi) { - // Use a reasonable timeout - const models = await this.fetchOpenAIModels({ timeout: 5000 }) // Increased timeout slightly + // Use unified timeout configuration from base class + const models = await this.fetchOpenAIModels({ timeout: this.getModelFetchTimeout() }) this.models = models // Store fetched models } // Potentially add a simple API call test here if needed, e.g., list models even for no-API list to check key/endpoint diff --git a/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts b/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts index 9734f8540..1cd017b7c 100644 --- a/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts +++ b/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts @@ -1070,8 +1070,8 @@ export class OpenAIResponsesProvider extends BaseLLMProvider { public async check(): Promise<{ isOk: boolean; errorMsg: string | null }> { try { if (!this.isNoModelsApi) { - // Use a reasonable timeout - const models = await this.fetchOpenAIModels({ timeout: 5000 }) // Increased timeout slightly + // Use unified timeout configuration from base class + const models = await this.fetchOpenAIModels({ timeout: this.getModelFetchTimeout() }) this.models = models // Store fetched models } // Potentially add a simple API call test here if needed, e.g., list models even for no-API list to check key/endpoint diff --git a/src/renderer/src/views/WelcomeView.vue b/src/renderer/src/views/WelcomeView.vue index f10daeca8..a790242ba 100644 --- a/src/renderer/src/views/WelcomeView.vue +++ b/src/renderer/src/views/WelcomeView.vue @@ -211,9 +211,9 @@ onMounted(() => { const handleModelEnabledChange = async (model: MODEL_META, enabled: boolean) => { try { - await settingsStore.updateModelStatus(selectedProvider.value, model.id, !enabled) + await settingsStore.updateModelStatus(selectedProvider.value, model.id, enabled) } catch (error) { - console.error('Failed to disable model:', error) + console.error('Failed to update model status:', error) } console.log('handleModelEnabledChange', model, enabled) } @@ -339,7 +339,7 @@ const isFirstStep = computed(() => currentStep.value === 0)