Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions app/api/config/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ const DANGER_CONFIG = {
hideBalanceQuery: serverConfig.hideBalanceQuery,
disableFastLink: serverConfig.disableFastLink,
customModels: serverConfig.customModels,
visionModels: serverConfig.visionModels,
defaultModel: serverConfig.defaultModel,
};

Expand Down
7 changes: 5 additions & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,13 @@ export class ClaudeApi implements LLMApi {
return res?.content?.[0]?.text;
}
async chat(options: ChatOptions): Promise<void> {
const visionModel = isVisionModel(options.config.model);

const accessStore = useAccessStore.getState();

const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);

const shouldStream = !!options.config.stream;

const modelConfig = {
Expand Down
7 changes: 6 additions & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,12 @@ export class GeminiProApi implements LLMApi {
}
const messages = _messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel(options.config.model)) {
if (
isVisionModel(
options.config.model,
useAccessStore.getState().visionModels,
)
) {
const images = getMessageImages(v);
if (images.length > 0) {
multimodal = true;
Expand Down
23 changes: 14 additions & 9 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,10 @@ export class ChatGPTApi implements LLMApi {

let requestPayload: RequestPayload | DalleRequestPayload;

const accessStore = useAccessStore.getState();

const isDalle3 = _isDalle3(options.config.model);
const isO1 = options.config.model.startsWith("o1");
const isOseries = options.config.model.match(/^o\d/) !== null;
if (isDalle3) {
const prompt = getMessageTextContent(
options.messages.slice(-1)?.pop() as any,
Expand All @@ -211,13 +213,16 @@ export class ChatGPTApi implements LLMApi {
style: options.config?.style ?? "vivid",
};
} else {
const visionModel = isVisionModel(options.config.model);
const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
if (!(isO1 && v.role === "system"))
if (!(isOseries && v.role === "system"))
messages.push({ role: v.role, content });
}

Expand All @@ -226,16 +231,16 @@ export class ChatGPTApi implements LLMApi {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
top_p: !isO1 ? modelConfig.top_p : 1,
temperature: !isOseries ? modelConfig.temperature : 1,
presence_penalty: !isOseries ? modelConfig.presence_penalty : 0,
frequency_penalty: !isOseries ? modelConfig.frequency_penalty : 0,
top_p: !isOseries ? modelConfig.top_p : 1,
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};

// O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
if (isO1) {
if (isOseries) {
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
}

Expand Down Expand Up @@ -359,7 +364,7 @@ export class ChatGPTApi implements LLMApi {
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
isDalle3 || isO1 ? REQUEST_TIMEOUT_MS * 4 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
isDalle3 || isOseries ? REQUEST_TIMEOUT_MS * 4 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
);

const res = await fetch(chatPath, chatPayload);
Expand Down
6 changes: 5 additions & 1 deletion app/client/platforms/tencent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,11 @@ export class HunyuanApi implements LLMApi {
}

async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const accessStore = useAccessStore.getState();
const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);
const messages = options.messages.map((v, index) => ({
// "Messages 中 system 角色必须位于列表的最开始"
role: index !== 0 && v.role === "system" ? "user" : v.role,
Expand Down
78 changes: 47 additions & 31 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ import {
} from "../constant";
import { Avatar } from "./emoji";
import { ContextPrompts, MaskAvatar, MaskConfig } from "./mask";
import { useSyncStore } from "../store/sync";
import { useMaskStore } from "../store/mask";
import { ChatCommandPrefix, useChatCommand, useCommand } from "../command";
import { prettyObject } from "../utils/format";
Expand Down Expand Up @@ -490,6 +491,7 @@ export function ChatActions(props: {
const currentProviderName =
session.mask.modelConfig?.providerName || ServiceProvider.OpenAI;
const allModels = useAllModels();
const customVisionModels = useAccessStore().visionModels;
const models = useMemo(() => {
const filteredModels = allModels.filter((m) => m.available);
const defaultModel = filteredModels.find((m) => m.isDefault);
Expand Down Expand Up @@ -529,7 +531,7 @@ export function ChatActions(props: {
const isMobileScreen = useMobileScreen();

useEffect(() => {
const show = isVisionModel(currentModel);
const show = isVisionModel(currentModel, customVisionModels);
setShowUploadImage(show);
if (!show) {
props.setAttachImages([]);
Expand Down Expand Up @@ -947,6 +949,8 @@ function _Chat() {
const fontSize = config.fontSize;
const fontFamily = config.fontFamily;

const syncStore = useSyncStore();

const [showExport, setShowExport] = useState(false);

const inputRef = useRef<HTMLTextAreaElement>(null);
Expand Down Expand Up @@ -1163,6 +1167,7 @@ function _Chat() {

const onDelete = (msgId: string) => {
deleteMessage(msgId);
syncStore.upload();
};

const onResend = (message: ChatMessage) => {
Expand Down Expand Up @@ -1394,42 +1399,51 @@ function _Chat() {
submit: (text) => {
doSubmit(text);
},
code: (text) => {
if (accessStore.disableFastLink) return;
console.log("[Command] got code from url: ", text);
showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
if (res) {
accessStore.update((access) => (access.accessCode = text));
}
});
},
// code: (text) => {
// if (accessStore.disableFastLink) return;
// console.log("[Command] got code from url: ", text);
// showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
// if (res) {
// accessStore.update((access) => (access.accessCode = text));
// }
// });
// },
settings: (text) => {
if (accessStore.disableFastLink) return;

try {
const payload = JSON.parse(text) as {
key?: string;
url?: string;
code?: string;
username?: string;
password?: string;
};

console.log("[Command] got settings from url: ", payload);

if (payload.key || payload.url) {
showConfirm(
Locale.URLCommand.Settings +
`\n${JSON.stringify(payload, null, 4)}`,
).then((res) => {
if (!res) return;
if (payload.key) {
accessStore.update(
(access) => (access.openaiApiKey = payload.key!),
);
}
if (payload.url) {
accessStore.update((access) => (access.openaiUrl = payload.url!));
}
accessStore.update((access) => (access.useCustomConfig = true));
});
if (payload.code) {
accessStore.update((access) => (access.accessCode = payload.code!));
if (accessStore.isAuthorized()) {
context.pop();
const copiedHello = Object.assign({}, BOT_HELLO);
context.push(copiedHello);
setUserInput(" ");
}
}

if (payload.username) {
syncStore.update(
(config) => (config.webdav.username = payload.username!),
);
}

if (payload.password) {
syncStore.update(
(config) => (config.webdav.password = payload.password!),
);
}

if (payload.username && payload.password) {
syncStore.sync();
}
} catch {
console.error("[Command] failed to get settings from url: ", text);
Expand Down Expand Up @@ -1457,10 +1471,12 @@ function _Chat() {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);

const customVisionModels = useAccessStore().visionModels;

const handlePaste = useCallback(
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
const currentModel = chatStore.currentSession().mask.modelConfig.model;
if (!isVisionModel(currentModel)) {
if (!isVisionModel(currentModel, customVisionModels)) {
return;
}
const items = (event.clipboardData || window.clipboardData).items;
Expand Down Expand Up @@ -1497,7 +1513,7 @@ function _Chat() {
}
}
},
[attachImages, chatStore],
[attachImages, chatStore, customVisionModels],
);

async function uploadImage() {
Expand Down Expand Up @@ -1545,7 +1561,7 @@ function _Chat() {
setAttachImages(images);
}

// 快捷键 shortcut keys
// 捷键 shortcut keys
const [showShortcutKeyModal, setShowShortcutKeyModal] = useState(false);

useEffect(() => {
Expand Down
3 changes: 3 additions & 0 deletions app/components/home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import { AuthPage } from "./auth";
import { getClientConfig } from "../config/client";
import { type ClientApi, getClientApi } from "../client/api";
import { useAccessStore } from "../store";
import { useSyncStore } from "../store/sync";
import clsx from "clsx";

export function Loading(props: { noLogo?: boolean }) {
Expand Down Expand Up @@ -239,6 +240,8 @@ export function Home() {
return <Loading />;
}

useSyncStore.getState().download();

return (
<ErrorBoundary>
<Router>
Expand Down
35 changes: 34 additions & 1 deletion app/components/settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -528,6 +528,36 @@ function SyncItems() {
setShowSyncConfigModal(true);
}}
/>
{couldSync && (
<IconButton
icon={<UploadIcon />}
text={Locale.UI.Overwrite}
onClick={async () => {
try {
await syncStore.upload();
showToast(Locale.Settings.Sync.Success);
} catch (e) {
showToast(Locale.Settings.Sync.Fail);
console.error("[Sync]", e);
}
}}
/>
)}
{couldSync && (
<IconButton
icon={<DownloadIcon />}
text={Locale.UI.Overwrite}
onClick={async () => {
try {
await syncStore.download();
showToast(Locale.Settings.Sync.Success);
} catch (e) {
showToast(Locale.Settings.Sync.Fail);
console.error("[Sync]", e);
}
}}
/>
)}
{couldSync && (
<IconButton
icon={<ResetIcon />}
Expand Down Expand Up @@ -1393,7 +1423,10 @@ export function Settings() {
<IconButton
aria={Locale.UI.Close}
icon={<CloseIcon />}
onClick={() => navigate(Path.Home)}
onClick={() => {
navigate(Path.Home);
useSyncStore.getState().sync();
}}
bordered
/>
</div>
Expand Down
7 changes: 6 additions & 1 deletion app/config/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ declare global {
ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
CUSTOM_MODELS?: string; // to control custom models
VISION_MODELS?: string; // to control vision models
DEFAULT_MODEL?: string; // to control default model in every new chat window

// stability only
Expand Down Expand Up @@ -123,13 +124,16 @@ export const getServerSideConfig = () => {

const disableGPT4 = !!process.env.DISABLE_GPT4;
let customModels = process.env.CUSTOM_MODELS ?? "";
let visionModels = process.env.VISION_MODELS ?? "";
let defaultModel = process.env.DEFAULT_MODEL ?? "";

if (disableGPT4) {
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
(m) =>
(m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o") || m.name.startsWith("o1")) &&
(m.name.startsWith("gpt-4") ||
m.name.startsWith("chatgpt-4o") ||
m.name.startsWith("o1")) &&
!m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
Expand Down Expand Up @@ -247,6 +251,7 @@ export const getServerSideConfig = () => {
hideBalanceQuery: !process.env.ENABLE_BALANCE_QUERY,
disableFastLink: !!process.env.DISABLE_FAST_LINK,
customModels,
visionModels,
defaultModel,
allowedWebDavEndpoints,
};
Expand Down
1 change: 1 addition & 0 deletions app/locales/cn.ts
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,7 @@ const cn = {
Export: "导出",
Import: "导入",
Sync: "同步",
Overwrite: "覆盖",
Config: "配置",
},
Exporter: {
Expand Down
1 change: 1 addition & 0 deletions app/locales/en.ts
Original file line number Diff line number Diff line change
Expand Up @@ -762,6 +762,7 @@ const en: LocaleType = {
Edit: "Edit",
Export: "Export",
Import: "Import",
Overwrite: "Overwrite",
Sync: "Sync",
Config: "Config",
},
Expand Down
1 change: 1 addition & 0 deletions app/locales/fr.ts
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,7 @@ const fr: PartialLocaleType = {
Edit: "Modifier",
Export: "Exporter",
Import: "Importer",
Overwrite: "Remplacer",
Sync: "Synchroniser",
Config: "Configurer",
},
Expand Down
1 change: 1 addition & 0 deletions app/locales/it.ts
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,7 @@ const it: PartialLocaleType = {
Edit: "Modifica",
Export: "Esporta",
Import: "Importa",
Overwrite: "Sostituisci",
Sync: "Sincronizza",
Config: "Configura",
},
Expand Down
Loading