Skip to content

Commit 1cce87a

Browse files
authored
Merge pull request #4181 from ChatGPTNextWeb/main
merge main
2 parents 78c4084 + 99fb9dc commit 1cce87a

File tree

3 files changed

+76
-11
lines changed

3 files changed

+76
-11
lines changed

app/client/platforms/openai.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,16 @@ export class ChatGPTApi implements LLMApi {
110110
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
111111
};
112112

113+
// add max_tokens to vision model
114+
if (visionModel) {
115+
Object.defineProperty(requestPayload, "max_tokens", {
116+
enumerable: true,
117+
configurable: true,
118+
writable: true,
119+
value: modelConfig.max_tokens,
120+
});
121+
}
122+
113123
console.log("[Request] openai payload: ", requestPayload);
114124

115125
const shouldStream = !!options.config.stream;

app/components/chat.tsx

Lines changed: 60 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import React, {
66
useMemo,
77
useCallback,
88
Fragment,
9+
RefObject,
910
} from "react";
1011

1112
import SendWhiteIcon from "../icons/send-white.svg";
@@ -382,11 +383,13 @@ function ChatAction(props: {
382383
);
383384
}
384385

385-
function useScrollToBottom() {
386+
function useScrollToBottom(
387+
scrollRef: RefObject<HTMLDivElement>,
388+
detach: boolean = false,
389+
) {
386390
// for auto-scroll
387-
const scrollRef = useRef<HTMLDivElement>(null);
388-
const [autoScroll, setAutoScroll] = useState(true);
389391

392+
const [autoScroll, setAutoScroll] = useState(true);
390393
function scrollDomToBottom() {
391394
const dom = scrollRef.current;
392395
if (dom) {
@@ -399,7 +402,7 @@ function useScrollToBottom() {
399402

400403
// auto scroll
401404
useEffect(() => {
402-
if (autoScroll) {
405+
if (autoScroll && !detach) {
403406
scrollDomToBottom();
404407
}
405408
});
@@ -658,7 +661,17 @@ function _Chat() {
658661
const [userInput, setUserInput] = useState("");
659662
const [isLoading, setIsLoading] = useState(false);
660663
const { submitKey, shouldSubmit } = useSubmitHandler();
661-
const { scrollRef, setAutoScroll, scrollDomToBottom } = useScrollToBottom();
664+
const scrollRef = useRef<HTMLDivElement>(null);
665+
const isScrolledToBottom = scrollRef?.current
666+
? Math.abs(
667+
scrollRef.current.scrollHeight -
668+
(scrollRef.current.scrollTop + scrollRef.current.clientHeight),
669+
) <= 1
670+
: false;
671+
const { setAutoScroll, scrollDomToBottom } = useScrollToBottom(
672+
scrollRef,
673+
isScrolledToBottom,
674+
);
662675
const [hitBottom, setHitBottom] = useState(true);
663676
const isMobileScreen = useMobileScreen();
664677
const navigate = useNavigate();
@@ -1003,7 +1016,6 @@ function _Chat() {
10031016
setHitBottom(isHitBottom);
10041017
setAutoScroll(isHitBottom);
10051018
};
1006-
10071019
function scrollToBottom() {
10081020
setMsgRenderIndex(renderMessages.length - CHAT_PAGE_SIZE);
10091021
scrollDomToBottom();
@@ -1088,6 +1100,47 @@ function _Chat() {
10881100
};
10891101
// eslint-disable-next-line react-hooks/exhaustive-deps
10901102
}, []);
1103+
1104+
const handlePaste = useCallback(
1105+
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
1106+
const currentModel = chatStore.currentSession().mask.modelConfig.model;
1107+
if(!isVisionModel(currentModel)){return;}
1108+
const items = (event.clipboardData || window.clipboardData).items;
1109+
for (const item of items) {
1110+
if (item.kind === "file" && item.type.startsWith("image/")) {
1111+
event.preventDefault();
1112+
const file = item.getAsFile();
1113+
if (file) {
1114+
const images: string[] = [];
1115+
images.push(...attachImages);
1116+
images.push(
1117+
...(await new Promise<string[]>((res, rej) => {
1118+
setUploading(true);
1119+
const imagesData: string[] = [];
1120+
compressImage(file, 256 * 1024)
1121+
.then((dataUrl) => {
1122+
imagesData.push(dataUrl);
1123+
setUploading(false);
1124+
res(imagesData);
1125+
})
1126+
.catch((e) => {
1127+
setUploading(false);
1128+
rej(e);
1129+
});
1130+
})),
1131+
);
1132+
const imagesLength = images.length;
1133+
1134+
if (imagesLength > 3) {
1135+
images.splice(3, imagesLength - 3);
1136+
}
1137+
setAttachImages(images);
1138+
}
1139+
}
1140+
}
1141+
},
1142+
[attachImages, chatStore],
1143+
);
10911144

10921145
async function uploadImage() {
10931146
const images: string[] = [];
@@ -1437,6 +1490,7 @@ function _Chat() {
14371490
onKeyDown={onInputKeyDown}
14381491
onFocus={scrollToBottom}
14391492
onClick={scrollToBottom}
1493+
onPaste={handlePaste}
14401494
rows={inputRows}
14411495
autoFocus={autoFocus}
14421496
style={{

app/utils.ts

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@ export function trimTopic(topic: string) {
99
// This will remove the specified punctuation from the end of the string
1010
// and also trim quotes from both the start and end if they exist.
1111
return topic
12-
.replace(/^["]+|["]+$/g, "")
13-
.replace(/[",.!?]*$/, "");
12+
// fix for gemini
13+
.replace(/^["*]+|["*]+$/g, "")
14+
.replace(/[",.!?*]*$/, "");
1415
}
1516

1617
export async function copyToClipboard(text: string) {
@@ -292,8 +293,8 @@ export function getMessageImages(message: RequestMessage): string[] {
292293

293294
export function isVisionModel(model: string) {
294295
return (
295-
model.startsWith("gpt-4-vision") ||
296-
model.startsWith("gemini-pro-vision") ||
297-
!DEFAULT_MODELS.find((m) => m.name == model)
296+
// model.startsWith("gpt-4-vision") ||
297+
// model.startsWith("gemini-pro-vision") ||
298+
model.includes("vision")
298299
);
299300
}

0 commit comments

Comments
 (0)