Skip to content

Commit 5e4a0e9

Browse files
authored
Merge pull request ChatGPTNextWeb#2028 from Yidadaa/bugfix-0618
fix: ChatGPTNextWeb#1771 should not lose chat context when sumindex > n - count
2 parents fd85d35 + 2d3c2cb commit 5e4a0e9

File tree

1 file changed

+7
-5
lines changed

1 file changed

+7
-5
lines changed

app/store/chat.ts

+7-5
Original file line numberDiff line numberDiff line change
@@ -370,28 +370,30 @@ export const useChatStore = create<ChatStore>()(
370370
context.push(memoryPrompt);
371371
}
372372

373-
// get short term and unmemoried long term memory
373+
// get short term and unmemorized long term memory
374374
const shortTermMemoryMessageIndex = Math.max(
375375
0,
376376
n - modelConfig.historyMessageCount,
377377
);
378378
const longTermMemoryMessageIndex = session.lastSummarizeIndex;
379-
const mostRecentIndex = Math.max(
379+
380+
// try to concat history messages
381+
const memoryStartIndex = Math.min(
380382
shortTermMemoryMessageIndex,
381383
longTermMemoryMessageIndex,
382384
);
383-
const threshold = modelConfig.compressMessageLengthThreshold * 2;
385+
const threshold = modelConfig.max_tokens;
384386

385387
// get recent messages as many as possible
386388
const reversedRecentMessages = [];
387389
for (
388390
let i = n - 1, count = 0;
389-
i >= mostRecentIndex && count < threshold;
391+
i >= memoryStartIndex && count < threshold;
390392
i -= 1
391393
) {
392394
const msg = messages[i];
393395
if (!msg || msg.isError) continue;
394-
count += msg.content.length;
396+
count += estimateTokenLength(msg.content);
395397
reversedRecentMessages.push(msg);
396398
}
397399

0 commit comments

Comments
 (0)