From 8ec985d8b0ce220bb8513352baf59de4dcb9dc99 Mon Sep 17 00:00:00 2001
From: yyhhyyyyyy
Date: Mon, 18 Aug 2025 16:16:52 +0800
Subject: [PATCH 1/2] fix: extend reasoningEffort support to all reasoning
models
---
.../configPresenter/modelDefaultSettings.ts | 65 +++++++++++++++----
.../providers/openAICompatibleProvider.ts | 14 ++--
.../providers/openAIResponsesProvider.ts | 39 +++++------
3 files changed, 81 insertions(+), 37 deletions(-)
diff --git a/src/main/presenter/configPresenter/modelDefaultSettings.ts b/src/main/presenter/configPresenter/modelDefaultSettings.ts
index 039b7c425..02fdd8e81 100644
--- a/src/main/presenter/configPresenter/modelDefaultSettings.ts
+++ b/src/main/presenter/configPresenter/modelDefaultSettings.ts
@@ -701,7 +701,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o4-mini-high'],
vision: false,
functionCall: true,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
id: 'o3',
@@ -712,7 +713,44 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o3'],
vision: false,
functionCall: true,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
+ },
+ {
+ id: 'o3-pro',
+ name: 'OpenAI o3 Pro',
+ temperature: 0.7,
+ maxTokens: 100_000,
+ contextLength: 200_000,
+ match: ['o3-pro'],
+ vision: true,
+ functionCall: true,
+ reasoning: true,
+ reasoningEffort: 'high'
+ },
+ {
+ id: 'o3-mini',
+ name: 'OpenAI o3 Mini',
+ temperature: 0.7,
+ maxTokens: 65536,
+ contextLength: 200_000,
+ match: ['o3-mini'],
+ vision: false,
+ functionCall: true,
+ reasoning: true,
+ reasoningEffort: 'medium'
+ },
+ {
+ id: 'o3-preview',
+ name: 'OpenAI o3 Preview',
+ temperature: 0.7,
+ maxTokens: 100_000,
+ contextLength: 200_000,
+ match: ['o3-preview'],
+ vision: true,
+ functionCall: true,
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
id: 'o4-mini',
@@ -723,7 +761,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o4-mini'],
vision: false,
functionCall: true,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
id: 'gpt-4.1-nano',
@@ -832,7 +871,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o1-preview'],
vision: true,
functionCall: false,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
id: 'o1-mini',
@@ -843,18 +883,20 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o1-mini'],
vision: true,
functionCall: false,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
- id: 'o1-mini',
- name: 'OpenAI o1 Mini',
+ id: 'o1-pro',
+ name: 'OpenAI o1 Pro',
temperature: 0.7,
- maxTokens: 65536,
+ maxTokens: 32768,
contextLength: 128000,
- match: ['o1-mini'],
+ match: ['o1-pro'],
vision: true,
functionCall: false,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'high'
},
{
id: 'o1',
@@ -865,7 +907,8 @@ export const defaultModelsSettings: DefaultModelSetting[] = [
match: ['o1'],
vision: true,
functionCall: false,
- reasoning: true
+ reasoning: true,
+ reasoningEffort: 'medium'
},
{
id: 'gpt-5-chat',
diff --git a/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts b/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts
index 54c806a13..2c8ea99b5 100644
--- a/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts
+++ b/src/main/presenter/llmProviderPresenter/providers/openAICompatibleProvider.ts
@@ -563,13 +563,13 @@ export class OpenAICompatibleProvider extends BaseLLMProvider {
}
}
- if (modelId.startsWith('gpt-5')) {
- if (modelConfig.reasoningEffort) {
- ;(requestParams as any).reasoning_effort = modelConfig.reasoningEffort
- }
- if (modelConfig.verbosity) {
- ;(requestParams as any).verbosity = modelConfig.verbosity
- }
+ if (modelConfig.reasoningEffort) {
+ ;(requestParams as any).reasoning_effort = modelConfig.reasoningEffort
+ }
+
+ // verbosity 仅支持 GPT-5 系列模型
+ if (modelId.startsWith('gpt-5') && modelConfig.verbosity) {
+ ;(requestParams as any).verbosity = modelConfig.verbosity
}
// 移除推理模型的温度参数
diff --git a/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts b/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts
index 1cd017b7c..897b28572 100644
--- a/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts
+++ b/src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts
@@ -228,17 +228,18 @@ export class OpenAIResponsesProvider extends BaseLLMProvider {
stream: false
}
- if (modelId.startsWith('gpt-5')) {
- const modelConfig = this.configPresenter.getModelConfig(modelId, this.provider.id)
- if (modelConfig.reasoningEffort) {
- ;(requestParams as any).reasoning = {
- effort: modelConfig.reasoningEffort
- }
+ const modelConfig = this.configPresenter.getModelConfig(modelId, this.provider.id)
+
+ if (modelConfig.reasoningEffort) {
+ ;(requestParams as any).reasoning = {
+ effort: modelConfig.reasoningEffort
}
- if (modelConfig.verbosity) {
- ;(requestParams as any).text = {
- verbosity: modelConfig.verbosity
- }
+ }
+
+ // verbosity 仅支持 GPT-5 系列模型
+ if (modelId.startsWith('gpt-5') && modelConfig.verbosity) {
+ ;(requestParams as any).text = {
+ verbosity: modelConfig.verbosity
}
}
@@ -572,16 +573,16 @@ export class OpenAIResponsesProvider extends BaseLLMProvider {
requestParams.tools = apiTools
}
- if (modelId.startsWith('gpt-5')) {
- if (modelConfig.reasoningEffort) {
- ;(requestParams as any).reasoning = {
- effort: modelConfig.reasoningEffort
- }
+ if (modelConfig.reasoningEffort) {
+ ;(requestParams as any).reasoning = {
+ effort: modelConfig.reasoningEffort
}
- if (modelConfig.verbosity) {
- ;(requestParams as any).text = {
- verbosity: modelConfig.verbosity
- }
+ }
+
+ // verbosity 仅支持 GPT-5 系列模型
+ if (modelId.startsWith('gpt-5') && modelConfig.verbosity) {
+ ;(requestParams as any).text = {
+ verbosity: modelConfig.verbosity
}
}
From ab008964e0c89eab0975b6d5804fad91e433c085 Mon Sep 17 00:00:00 2001
From: yyhhyyyyyy
Date: Mon, 18 Aug 2025 16:51:36 +0800
Subject: [PATCH 2/2] feat: use backend config to determine reasoning effort
support
---
.../src/components/settings/ModelConfigDialog.vue | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/src/renderer/src/components/settings/ModelConfigDialog.vue b/src/renderer/src/components/settings/ModelConfigDialog.vue
index 8ab18d652..2f3935426 100644
--- a/src/renderer/src/components/settings/ModelConfigDialog.vue
+++ b/src/renderer/src/components/settings/ModelConfigDialog.vue
@@ -52,8 +52,8 @@
-
-
+
+
-
+