diff --git a/app/components/chat.tsx b/app/components/chat.tsx
index 6691403e6..0edbd260c 100644
--- a/app/components/chat.tsx
+++ b/app/components/chat.tsx
@@ -13,7 +13,6 @@ import SendWhiteIcon from "../icons/send-white.svg";
import BrainIcon from "../icons/brain.svg";
import RenameIcon from "../icons/rename.svg";
import EditIcon from "../icons/rename.svg";
-import ExportIcon from "../icons/share.svg";
import ReturnIcon from "../icons/return.svg";
import CopyIcon from "../icons/copy.svg";
import SpeakIcon from "../icons/speak.svg";
@@ -1283,6 +1282,7 @@ function _Chat() {
});
};
+ const appstore = useAppConfig();
const accessStore = useAccessStore();
const [speechStatus, setSpeechStatus] = useState(false);
const [speechLoading, setSpeechLoading] = useState(false);
@@ -1340,7 +1340,10 @@ function _Chat() {
) {
const copiedHello = Object.assign({}, BOT_HELLO);
if (!accessStore.isAuthorized()) {
- copiedHello.content = Locale.Error.Unauthorized;
+ if (!isEmpty(appstore.omeToken)) {
+ } else {
+ copiedHello.content = Locale.Error.Unauthorized;
+ }
}
context.push(copiedHello);
}
@@ -1569,6 +1572,7 @@ function _Chat() {
const imagesData: string[] = [];
for (let i = 0; i < files.length; i++) {
const file = event.target.files[i];
+ console.log("file", file);
uploadImageRemote(file)
.then((dataUrl) => {
imagesData.push(dataUrl);
@@ -1735,7 +1739,7 @@ function _Chat() {
/>
)}
-
+ {/*
}
bordered
@@ -1744,7 +1748,7 @@ function _Chat() {
setShowExport(true);
}}
/>
-
+
*/}
{showMaxIcon && (
{
- window.parent.postMessage("omemetis is ready", "*");
+ if (window.ReactNativeWebView) {
+ try {
+ const message = {
+ data: "omemetis is ready",
+ url: location.origin,
+ };
+ window.ReactNativeWebView.postMessage(JSON.stringify(message));
+ } catch {}
+ } else {
+ window.parent.postMessage("omemetis is ready", "*");
+ }
const handleMessage = (event: any) => {
- if (!event.origin.includes("omeoffice")) {
- return; // 如果不是信任的源,忽略消息
- }
+ const data = event.data;
- if (!isEmpty(event?.data?.omeToken))
- appConfig.setOmeToken(event.data.omeToken);
+ if (isEmpty(data) || (typeof data === "string" && data === "")) return;
+
+ if (window.ReactNativeWebView) {
+ try {
+ const params = JSON.parse(data);
+
+ if (!isEmpty(params?.ometoken) && params?.from === "OmeOfficeApp") {
+ appConfig.setOmeToken(params?.ometoken ?? "");
+ }
+ } catch {}
+ } else {
+ if (
+ !event.origin.includes("omeoffice") &&
+ !event.origin.includes("localhost")
+ ) {
+ return; // 如果不是信任的源,忽略消息
+ }
+
+ if (!isEmpty(event?.data?.ometoken))
+ appConfig.setOmeToken(event.data.ometoken);
+ }
};
window.addEventListener("message", handleMessage);
diff --git a/app/components/settings.tsx b/app/components/settings.tsx
index 68ebcf084..c3ceee54c 100644
--- a/app/components/settings.tsx
+++ b/app/components/settings.tsx
@@ -49,8 +49,7 @@ import Locale, {
changeLang,
getLang,
} from "../locales";
-import { copyToClipboard, clientUpdate, semverCompare } from "../utils";
-import Link from "next/link";
+import { copyToClipboard, semverCompare } from "../utils";
import {
Anthropic,
Azure,
@@ -67,7 +66,6 @@ import {
RELEASE_URL,
STORAGE_KEY,
ServiceProvider,
- SlotID,
UPDATE_URL,
Stability,
Iflytek,
@@ -1510,7 +1508,7 @@ export function Settings() {
- checkUpdate(true)}
/>
)}
-
+ */}
-
+ */}
)}
- }
text={shouldNarrow ? undefined : Locale.Discovery.Name}
className={styles["sidebar-bar-button"]}
onClick={() => setshowDiscoverySelector(true)}
shadow
- />
+ /> */}
{showDiscoverySelector && (
- */}
>
}
secondaryAction={
diff --git a/app/masks/cn.ts b/app/masks/cn.ts
index 64842f6e8..95df359e3 100644
--- a/app/masks/cn.ts
+++ b/app/masks/cn.ts
@@ -33,7 +33,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -59,7 +59,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -85,7 +85,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -111,7 +111,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -137,7 +137,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -163,7 +163,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -189,7 +189,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -215,7 +215,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -247,7 +247,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
@@ -273,7 +273,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -306,7 +306,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -339,7 +339,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -397,7 +397,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -429,7 +429,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-4",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
diff --git a/app/masks/en.ts b/app/masks/en.ts
index ed130351f..f90c33dfc 100644
--- a/app/masks/en.ts
+++ b/app/masks/en.ts
@@ -14,7 +14,7 @@ export const EN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-4",
+ model: "gpt-4o-mini",
temperature: 0.3,
max_tokens: 2000,
presence_penalty: 0,
@@ -60,7 +60,7 @@ export const EN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-4",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
@@ -86,7 +86,7 @@ export const EN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
@@ -118,7 +118,7 @@ export const EN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-4",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
diff --git a/app/masks/tw.ts b/app/masks/tw.ts
index 33e76c222..10d495dcf 100644
--- a/app/masks/tw.ts
+++ b/app/masks/tw.ts
@@ -33,7 +33,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -59,7 +59,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -85,7 +85,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -111,7 +111,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -137,7 +137,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -163,7 +163,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -189,7 +189,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -215,7 +215,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -247,7 +247,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
@@ -273,7 +273,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -306,7 +306,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -339,7 +339,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -397,7 +397,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-3.5-turbo",
+ model: "gpt-4o-mini",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@@ -429,7 +429,7 @@ export const TW_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
- model: "gpt-4",
+ model: "gpt-4o-mini",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
diff --git a/app/utils/chat.ts b/app/utils/chat.ts
index efc496f2c..ca6ee31d8 100644
--- a/app/utils/chat.ts
+++ b/app/utils/chat.ts
@@ -138,7 +138,7 @@ export function uploadImage(file: Blob): Promise {
})
.then((res) => res.json())
.then((res) => {
- // console.log("res", res);
+ console.log("upload res", res);
if (res?.code == 0 && res?.data) {
return res?.data;
}