commit
1cce87acaa
|
@ -110,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
|
|||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
Object.defineProperty(requestPayload, "max_tokens", {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: modelConfig.max_tokens,
|
||||
});
|
||||
}
|
||||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
|
|
|
@ -6,6 +6,7 @@ import React, {
|
|||
useMemo,
|
||||
useCallback,
|
||||
Fragment,
|
||||
RefObject,
|
||||
} from "react";
|
||||
|
||||
import SendWhiteIcon from "../icons/send-white.svg";
|
||||
|
@ -382,11 +383,13 @@ function ChatAction(props: {
|
|||
);
|
||||
}
|
||||
|
||||
function useScrollToBottom() {
|
||||
function useScrollToBottom(
|
||||
scrollRef: RefObject<HTMLDivElement>,
|
||||
detach: boolean = false,
|
||||
) {
|
||||
// for auto-scroll
|
||||
const scrollRef = useRef<HTMLDivElement>(null);
|
||||
const [autoScroll, setAutoScroll] = useState(true);
|
||||
|
||||
const [autoScroll, setAutoScroll] = useState(true);
|
||||
function scrollDomToBottom() {
|
||||
const dom = scrollRef.current;
|
||||
if (dom) {
|
||||
|
@ -399,7 +402,7 @@ function useScrollToBottom() {
|
|||
|
||||
// auto scroll
|
||||
useEffect(() => {
|
||||
if (autoScroll) {
|
||||
if (autoScroll && !detach) {
|
||||
scrollDomToBottom();
|
||||
}
|
||||
});
|
||||
|
@ -658,7 +661,17 @@ function _Chat() {
|
|||
const [userInput, setUserInput] = useState("");
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const { submitKey, shouldSubmit } = useSubmitHandler();
|
||||
const { scrollRef, setAutoScroll, scrollDomToBottom } = useScrollToBottom();
|
||||
const scrollRef = useRef<HTMLDivElement>(null);
|
||||
const isScrolledToBottom = scrollRef?.current
|
||||
? Math.abs(
|
||||
scrollRef.current.scrollHeight -
|
||||
(scrollRef.current.scrollTop + scrollRef.current.clientHeight),
|
||||
) <= 1
|
||||
: false;
|
||||
const { setAutoScroll, scrollDomToBottom } = useScrollToBottom(
|
||||
scrollRef,
|
||||
isScrolledToBottom,
|
||||
);
|
||||
const [hitBottom, setHitBottom] = useState(true);
|
||||
const isMobileScreen = useMobileScreen();
|
||||
const navigate = useNavigate();
|
||||
|
@ -1003,7 +1016,6 @@ function _Chat() {
|
|||
setHitBottom(isHitBottom);
|
||||
setAutoScroll(isHitBottom);
|
||||
};
|
||||
|
||||
function scrollToBottom() {
|
||||
setMsgRenderIndex(renderMessages.length - CHAT_PAGE_SIZE);
|
||||
scrollDomToBottom();
|
||||
|
@ -1089,6 +1101,47 @@ function _Chat() {
|
|||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const handlePaste = useCallback(
|
||||
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
|
||||
const currentModel = chatStore.currentSession().mask.modelConfig.model;
|
||||
if(!isVisionModel(currentModel)){return;}
|
||||
const items = (event.clipboardData || window.clipboardData).items;
|
||||
for (const item of items) {
|
||||
if (item.kind === "file" && item.type.startsWith("image/")) {
|
||||
event.preventDefault();
|
||||
const file = item.getAsFile();
|
||||
if (file) {
|
||||
const images: string[] = [];
|
||||
images.push(...attachImages);
|
||||
images.push(
|
||||
...(await new Promise<string[]>((res, rej) => {
|
||||
setUploading(true);
|
||||
const imagesData: string[] = [];
|
||||
compressImage(file, 256 * 1024)
|
||||
.then((dataUrl) => {
|
||||
imagesData.push(dataUrl);
|
||||
setUploading(false);
|
||||
res(imagesData);
|
||||
})
|
||||
.catch((e) => {
|
||||
setUploading(false);
|
||||
rej(e);
|
||||
});
|
||||
})),
|
||||
);
|
||||
const imagesLength = images.length;
|
||||
|
||||
if (imagesLength > 3) {
|
||||
images.splice(3, imagesLength - 3);
|
||||
}
|
||||
setAttachImages(images);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
[attachImages, chatStore],
|
||||
);
|
||||
|
||||
async function uploadImage() {
|
||||
const images: string[] = [];
|
||||
images.push(...attachImages);
|
||||
|
@ -1437,6 +1490,7 @@ function _Chat() {
|
|||
onKeyDown={onInputKeyDown}
|
||||
onFocus={scrollToBottom}
|
||||
onClick={scrollToBottom}
|
||||
onPaste={handlePaste}
|
||||
rows={inputRows}
|
||||
autoFocus={autoFocus}
|
||||
style={{
|
||||
|
|
11
app/utils.ts
11
app/utils.ts
|
@ -9,8 +9,9 @@ export function trimTopic(topic: string) {
|
|||
// This will remove the specified punctuation from the end of the string
|
||||
// and also trim quotes from both the start and end if they exist.
|
||||
return topic
|
||||
.replace(/^["“”]+|["“”]+$/g, "")
|
||||
.replace(/[,。!?”“"、,.!?]*$/, "");
|
||||
// fix for gemini
|
||||
.replace(/^["“”*]+|["“”*]+$/g, "")
|
||||
.replace(/[,。!?”“"、,.!?*]*$/, "");
|
||||
}
|
||||
|
||||
export async function copyToClipboard(text: string) {
|
||||
|
@ -292,8 +293,8 @@ export function getMessageImages(message: RequestMessage): string[] {
|
|||
|
||||
export function isVisionModel(model: string) {
|
||||
return (
|
||||
model.startsWith("gpt-4-vision") ||
|
||||
model.startsWith("gemini-pro-vision") ||
|
||||
!DEFAULT_MODELS.find((m) => m.name == model)
|
||||
// model.startsWith("gpt-4-vision") ||
|
||||
// model.startsWith("gemini-pro-vision") ||
|
||||
model.includes("vision")
|
||||
);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue