Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ cat ~/.config/DankMaterialShell/plugin_settings.json | jq .aiAssistant
- OpenAI (gpt-5.2 models)
- Anthropic (claude-4.5 models)
- Google Gemini (gemini-2.5-flash, gemini-3-flash-preview)
- Inception / Mercury 2: OpenAI-compatible chat completions plus [API parameters](https://docs.inceptionlabs.ai/get-started/api-parameters) (`reasoning_effort`, `reasoning_summary`, `reasoning_summary_wait`); [streaming](https://docs.inceptionlabs.ai/capabilities/streaming)
- Custom (OpenAI-compatible endpoints)

**Custom Provider Notes**:
Expand Down
30 changes: 30 additions & 0 deletions AIApiAdapters.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ function buildRequest(provider, payload, apiKey) {
return anthropicRequest(payload, apiKey);
case "gemini":
return geminiRequest(payload, apiKey);
case "inception":
return inceptionRequest(payload, apiKey);
case "custom":
return customRequest(payload, apiKey);
default:
Expand All @@ -73,6 +75,34 @@ function openaiRequest(payload, apiKey) {
return { url, headers, body: JSON.stringify(body) };
}

function inceptionRequest(payload, apiKey) {
// Mercury 2 params: https://docs.inceptionlabs.ai/get-started/api-parameters
const url = openaiChatCompletionsUrl(payload.baseUrl || "https://api.inceptionlabs.ai/v1");
const headers = ["-H", "Content-Type: application/json", "-H", "Authorization: Bearer " + apiKey];
const maxTok = payload.max_tokens;
const mt = (typeof maxTok === "number" && maxTok > 0) ? Math.min(50000, maxTok) : 8192;
let t = (typeof payload.temperature === "number") ? payload.temperature : 0.75;
if (t < 0.5)
t = 0.5;
if (t > 1.0)
t = 1.0;
const body = {
model: payload.model,
messages: payload.messages,
max_tokens: mt,
temperature: t,
stream: true
};
const efforts = ["instant", "low", "medium", "high"];
const effort = String(payload.inceptionReasoningEffort || "medium").toLowerCase();
if (efforts.indexOf(effort) >= 0)
body.reasoning_effort = effort;
body.reasoning_summary = payload.inceptionReasoningSummary !== false;
if (payload.inceptionReasoningSummaryWait === true)
body.reasoning_summary_wait = true;
return { url, headers, body: JSON.stringify(body) };
}

function anthropicRequest(payload, apiKey) {
const url = (payload.baseUrl || "https://api.anthropic.com") + "/v1/messages";
const headers = [
Expand Down
49 changes: 45 additions & 4 deletions AIAssistantService.qml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ Item {
property string sessionApiKey: "" // In-memory key
property string apiKeyEnvVar: ""
property bool useMonospace: false
property string inceptionReasoningEffort: "medium"
property bool inceptionReasoningSummary: true
property bool inceptionReasoningSummaryWait: false

readonly property bool debugEnabled: (Quickshell.env("DMS_LOG_LEVEL") || "").toLowerCase() === "debug"

Expand All @@ -56,6 +59,20 @@ Item {

function defaultsForProvider(id) {
switch (id) {
case "inception":
return {
baseUrl: "https://api.inceptionlabs.ai/v1",
model: "mercury-2",
apiKey: "",
saveApiKey: false,
apiKeyEnvVar: "",
temperature: 0.75,
maxTokens: 8192,
timeout: 30,
inceptionReasoningEffort: "medium",
inceptionReasoningSummary: true,
inceptionReasoningSummaryWait: false
};
case "anthropic":
return {
baseUrl: "https://api.anthropic.com",
Expand Down Expand Up @@ -106,7 +123,7 @@ Item {
function normalizedProfile(id, raw) {
const defaults = defaultsForProvider(id);
const p = raw || {};
return {
const profile = {
baseUrl: String(p.baseUrl || defaults.baseUrl).trim(),
model: String(p.model || defaults.model).trim(),
apiKey: String(p.apiKey || "").trim(),
Expand All @@ -116,20 +133,29 @@ Item {
maxTokens: (typeof p.maxTokens === "number") ? p.maxTokens : defaults.maxTokens,
timeout: (typeof p.timeout === "number") ? p.timeout : defaults.timeout
};
if (id === "inception") {
const efforts = ["instant", "low", "medium", "high"];
let eff = String(p.inceptionReasoningEffort || defaults.inceptionReasoningEffort || "medium").toLowerCase();
profile.inceptionReasoningEffort = efforts.indexOf(eff) >= 0 ? eff : "medium";
profile.inceptionReasoningSummary = (typeof p.inceptionReasoningSummary === "boolean") ? p.inceptionReasoningSummary : (defaults.inceptionReasoningSummary !== false);
profile.inceptionReasoningSummaryWait = !!p.inceptionReasoningSummaryWait;
}
return profile;
}

function mergedProviders(rawProviders) {
const base = {
openai: normalizedProfile("openai", null),
anthropic: normalizedProfile("anthropic", null),
gemini: normalizedProfile("gemini", null),
inception: normalizedProfile("inception", null),
custom: normalizedProfile("custom", null)
};

if (!rawProviders || typeof rawProviders !== "object")
return base;

const ids = ["openai", "anthropic", "gemini", "custom"];
const ids = ["openai", "anthropic", "gemini", "inception", "custom"];
for (let i = 0; i < ids.length; i++) {
const id = ids[i];
if (rawProviders[id] && typeof rawProviders[id] === "object") {
Expand All @@ -154,7 +180,7 @@ Item {
function loadSettings() {
suppressConfigChange = true
const selectedProvider = String(PluginService.loadPluginData(pluginId, "provider", "openai")).trim() || "openai"
const providerId = ["openai", "anthropic", "gemini", "custom"].includes(selectedProvider) ? selectedProvider : "openai"
const providerId = ["openai", "anthropic", "gemini", "inception", "custom"].includes(selectedProvider) ? selectedProvider : "openai"
const rawProviders = PluginService.loadPluginData(pluginId, "providers", null)
let nextProviders = mergedProviders(rawProviders)

Expand Down Expand Up @@ -186,6 +212,11 @@ Item {
apiKey = active.apiKey
saveApiKey = active.saveApiKey
apiKeyEnvVar = active.apiKeyEnvVar
if (provider === "inception") {
inceptionReasoningEffort = active.inceptionReasoningEffort || "medium";
inceptionReasoningSummary = active.inceptionReasoningSummary !== false;
inceptionReasoningSummaryWait = !!active.inceptionReasoningSummaryWait;
}
useMonospace = PluginService.loadPluginData(pluginId, "useMonospace", false)
suppressConfigChange = false

Expand Down Expand Up @@ -348,6 +379,8 @@ Item {
return Quickshell.env("DMS_ANTHROPIC_API_KEY") || "";
case "gemini":
return Quickshell.env("DMS_GEMINI_API_KEY") || "";
case "inception":
return Quickshell.env("DMS_INCEPTION_API_KEY") || "";
case "custom":
return Quickshell.env("DMS_CUSTOM_API_KEY") || "";
default:
Expand All @@ -361,6 +394,8 @@ Item {
return Quickshell.env("ANTHROPIC_API_KEY") || "";
case "gemini":
return Quickshell.env("GEMINI_API_KEY") || "";
case "inception":
return Quickshell.env("INCEPTION_API_KEY") || "";
case "custom":
return "";
default:
Expand Down Expand Up @@ -563,7 +598,7 @@ Item {
}

msgs.push({ role: "user", content: latestText });
return {
const payload = {
provider: provider,
baseUrl: baseUrl,
model: model,
Expand All @@ -573,6 +608,12 @@ Item {
stream: true,
timeout: timeout
};
if (provider === "inception") {
payload.inceptionReasoningEffort = inceptionReasoningEffort;
payload.inceptionReasoningSummary = inceptionReasoningSummary;
payload.inceptionReasoningSummaryWait = inceptionReasoningSummaryWait;
}
return payload;
}

function buildCurlCommand(payload) {
Expand Down
124 changes: 116 additions & 8 deletions AIAssistantSettings.qml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ Item {
property real temperature: 0.7
property int maxTokens: 4096
property bool useMonospace: false
property string inceptionReasoningEffort: "medium"
property bool inceptionReasoningSummary: true
property bool inceptionReasoningSummaryWait: false

function save(key, value) {
PluginService.savePluginData(pluginId, key, value)
Expand All @@ -36,6 +39,20 @@ Item {

function defaultsForProvider(id) {
switch (id) {
case "inception":
return {
baseUrl: "https://api.inceptionlabs.ai/v1",
model: "mercury-2",
apiKey: "",
saveApiKey: false,
apiKeyEnvVar: "",
temperature: 0.75,
maxTokens: 8192,
timeout: 30,
inceptionReasoningEffort: "medium",
inceptionReasoningSummary: true,
inceptionReasoningSummaryWait: false
};
case "anthropic":
return {
baseUrl: "https://api.anthropic.com",
Expand Down Expand Up @@ -86,7 +103,7 @@ Item {
function normalizedProfile(id, raw) {
const d = defaultsForProvider(id)
const p = raw || {}
return {
const profile = {
baseUrl: String(p.baseUrl || d.baseUrl).trim(),
model: String(p.model || d.model).trim(),
apiKey: String(p.apiKey || "").trim(),
Expand All @@ -96,19 +113,28 @@ Item {
maxTokens: (typeof p.maxTokens === "number") ? p.maxTokens : d.maxTokens,
timeout: (typeof p.timeout === "number") ? p.timeout : d.timeout
}
if (id === "inception") {
const efforts = ["instant", "low", "medium", "high"]
let eff = String(p.inceptionReasoningEffort || d.inceptionReasoningEffort || "medium").toLowerCase()
profile.inceptionReasoningEffort = efforts.indexOf(eff) >= 0 ? eff : "medium"
profile.inceptionReasoningSummary = (typeof p.inceptionReasoningSummary === "boolean") ? p.inceptionReasoningSummary : (d.inceptionReasoningSummary !== false)
profile.inceptionReasoningSummaryWait = !!p.inceptionReasoningSummaryWait
}
return profile
}

function mergedProviders(rawProviders) {
const next = {
openai: normalizedProfile("openai", null),
anthropic: normalizedProfile("anthropic", null),
gemini: normalizedProfile("gemini", null),
inception: normalizedProfile("inception", null),
custom: normalizedProfile("custom", null)
}
if (!rawProviders || typeof rawProviders !== "object")
return next

const ids = ["openai", "anthropic", "gemini", "custom"]
const ids = ["openai", "anthropic", "gemini", "inception", "custom"]
for (let i = 0; i < ids.length; i++) {
const id = ids[i]
if (rawProviders[id] && typeof rawProviders[id] === "object") {
Expand All @@ -132,6 +158,11 @@ Item {
apiKeyEnvVar = active.apiKeyEnvVar
temperature = active.temperature
maxTokens = active.maxTokens
if (provider === "inception") {
inceptionReasoningEffort = active.inceptionReasoningEffort || "medium"
inceptionReasoningSummary = active.inceptionReasoningSummary !== false
inceptionReasoningSummaryWait = !!active.inceptionReasoningSummaryWait
}
}

function setProvider(providerId) {
Expand Down Expand Up @@ -164,7 +195,7 @@ Item {

function load() {
const selectedProvider = String(PluginService.loadPluginData(pluginId, "provider", "openai")).trim() || "openai"
provider = ["openai", "anthropic", "gemini", "custom"].includes(selectedProvider) ? selectedProvider : "openai"
provider = ["openai", "anthropic", "gemini", "inception", "custom"].includes(selectedProvider) ? selectedProvider : "openai"

const rawProviders = PluginService.loadPluginData(pluginId, "providers", null)
let nextProviders = mergedProviders(rawProviders)
Expand Down Expand Up @@ -302,7 +333,7 @@ Item {
}
DankDropdown {
width: parent.width
options: ["openai", "anthropic", "gemini", "custom"]
options: ["openai", "anthropic", "gemini", "inception", "custom"]
currentValue: root.provider
onValueChanged: value => setProvider(value)
}
Expand Down Expand Up @@ -332,6 +363,82 @@ Item {
placeholderText: "gpt-5.2"
onEditingFinished: saveActiveField("model", text.trim())
}

StyledText {
width: parent.width
visible: root.provider === "inception"
text: I18n.tr("Mercury 2: temperature 0.5–1.0, max_tokens 1–50000 (see Inception API parameters).")
font.pixelSize: Theme.fontSizeSmall
color: Theme.surfaceVariantText
wrapMode: Text.WordWrap
}

StyledText {
text: I18n.tr("Reasoning effort")
font.pixelSize: Theme.fontSizeSmall
color: Theme.surfaceVariantText
visible: root.provider === "inception"
}
DankDropdown {
width: parent.width
visible: root.provider === "inception"
options: ["instant", "low", "medium", "high"]
currentValue: root.inceptionReasoningEffort
onValueChanged: value => saveActiveField("inceptionReasoningEffort", value)
}

RowLayout {
width: parent.width
spacing: Theme.spacingM
visible: root.provider === "inception"
Column {
Layout.fillWidth: true
spacing: Theme.spacingXS
StyledText {
text: I18n.tr("Reasoning summary")
font.pixelSize: Theme.fontSizeMedium
color: Theme.surfaceText
}
StyledText {
text: I18n.tr("Return a summary of the model's reasoning.")
font.pixelSize: Theme.fontSizeSmall
color: Theme.surfaceVariantText
wrapMode: Text.WordWrap
width: parent.width
}
}
DankToggle {
checked: root.inceptionReasoningSummary
onToggled: checked => saveActiveField("inceptionReasoningSummary", checked)
}
}

RowLayout {
width: parent.width
spacing: Theme.spacingM
visible: root.provider === "inception"
Column {
Layout.fillWidth: true
spacing: Theme.spacingXS
StyledText {
text: I18n.tr("Wait for reasoning summary")
font.pixelSize: Theme.fontSizeMedium
color: Theme.surfaceText
}
StyledText {
text: I18n.tr("Delay final response until the reasoning summary is ready.")
font.pixelSize: Theme.fontSizeSmall
color: Theme.surfaceVariantText
wrapMode: Text.WordWrap
width: parent.width
}
}
DankToggle {
checked: root.inceptionReasoningSummaryWait
onToggled: checked => saveActiveField("inceptionReasoningSummaryWait", checked)
}
}

}
}
}
Expand Down Expand Up @@ -473,7 +580,7 @@ Item {
width: parent.width - parent.spacing - Theme.iconSize

StyledText {
text: I18n.tr("Temperature: %1").arg(root.temperature.toFixed(1))
text: I18n.tr("Temperature: %1").arg(root.temperature.toFixed(2))
font.pixelSize: Theme.fontSizeLarge
font.weight: Font.Medium
color: Theme.surfaceText
Expand All @@ -493,10 +600,11 @@ Item {
width: parent.width
height: 32
minimum: 0
maximum: 20
value: Math.round(root.temperature * 10)
maximum: 200
step: 1
value: Math.round(root.temperature * 100)
showValue: false
onSliderValueChanged: newValue => saveActiveField("temperature", newValue / 10)
onSliderValueChanged: newValue => saveActiveField("temperature", newValue / 100)
}
}
}
Expand Down
Loading