Merge pull request #187 from Webifi/main

Fix message edits not being saved to chatStore, a few other tweaks
This commit is contained in:
Niek van der Maas 2023-06-19 06:32:16 +02:00 committed by GitHub
commit b3bb4ddcba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 49 additions and 28 deletions

View File

@ -3,7 +3,7 @@
import { mergeProfileFields, prepareSummaryPrompt } from './Profiles.svelte' import { mergeProfileFields, prepareSummaryPrompt } from './Profiles.svelte'
import { countMessageTokens, countPromptTokens, getModelMaxTokens } from './Stats.svelte' import { countMessageTokens, countPromptTokens, getModelMaxTokens } from './Stats.svelte'
import type { Chat, ChatCompletionOpts, ChatSettings, Message, Model, Request, RequestImageGeneration } from './Types.svelte' import type { Chat, ChatCompletionOpts, ChatSettings, Message, Model, Request, RequestImageGeneration } from './Types.svelte'
import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages } from './Storage.svelte' import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages, deleteSummaryMessage } from './Storage.svelte'
import { scrollToBottom, scrollToMessage } from './Util.svelte' import { scrollToBottom, scrollToMessage } from './Util.svelte'
import { getRequestSettingList, defaultModel } from './Settings.svelte' import { getRequestSettingList, defaultModel } from './Settings.svelte'
import { EventStreamContentType, fetchEventSource } from '@microsoft/fetch-event-source' import { EventStreamContentType, fetchEventSource } from '@microsoft/fetch-event-source'
@ -418,7 +418,9 @@ export class ChatRequest {
const summarizedIds = rw.map(m => m.uuid) const summarizedIds = rw.map(m => m.uuid)
const summaryIds = [summaryResponse.uuid] const summaryIds = [summaryResponse.uuid]
let loopCount = 0 let loopCount = 0
let networkRetry = 2 // number of retries on network error
while (continueCounter-- > 0) { while (continueCounter-- > 0) {
let error = false
try { try {
const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), { const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
summaryRequest: true, summaryRequest: true,
@ -430,7 +432,7 @@ export class ChatRequest {
if (opts.streaming) scrollToMessage(summaryResponse.uuid, 150, true, true) if (opts.streaming) scrollToMessage(summaryResponse.uuid, 150, true, true)
} }
} as ChatCompletionOpts, { } as ChatCompletionOpts, {
temperature: 0.1, // make summary more deterministic temperature: chatSettings.summaryTemperature, // make summary more deterministic
top_p: 1, top_p: 1,
presence_penalty: 0, presence_penalty: 0,
frequency_penalty: 0, frequency_penalty: 0,
@ -445,30 +447,35 @@ export class ChatRequest {
deleteMessage(chatId, srid) deleteMessage(chatId, srid)
return summary return summary
} }
// Looks like we got our summarized messages.
// Mark the new summaries as such
// Need more?
if (summaryResponse.finish_reason === 'length' && continueCounter > 0) {
// Our summary was truncated
// Try to get more of it
delete summaryResponse.finish_reason
_this.updatingMessage = 'Summarizing more...'
let _recount = countPromptTokens(top.concat(rw).concat([summaryRequest]).concat([summaryResponse]), model)
while (rw.length && (_recount + maxSummaryTokens >= maxTokens)) {
rw.shift()
_recount = countPromptTokens(top.concat(rw).concat([summaryRequest]).concat([summaryResponse]), model)
}
loopCount++
continue
} else {
// We're done
continueCounter = 0
}
} catch (e) { } catch (e) {
_this.updating = false if (e.message?.includes('network error') && networkRetry > 0) {
_this.updatingMessage = '' networkRetry--
deleteMessage(chatId, srid) error = true
throw e } else {
_this.updating = false
_this.updatingMessage = ''
deleteSummaryMessage(chatId, srid)
throw e
}
}
// Looks like we got our summarized messages.
// Mark the new summaries as such
// Need more?
if ((error || summaryResponse.finish_reason === 'length') && continueCounter > 0) {
// Our summary was truncated
// Try to get more of it
delete summaryResponse.finish_reason
_this.updatingMessage = 'Summarizing more...'
let _recount = countPromptTokens(top.concat(rw).concat([summaryRequest]).concat([summaryResponse]), model)
while (rw.length && (_recount + maxSummaryTokens >= maxTokens)) {
rw.shift()
_recount = countPromptTokens(top.concat(rw).concat([summaryRequest]).concat([summaryResponse]), model)
}
loopCount++
continue
} else {
// We're done
continueCounter = 0
} }
} }
summaryResponse.summary = summarizedIds summaryResponse.summary = summarizedIds

View File

@ -70,6 +70,7 @@
const doChange = () => { const doChange = () => {
if (message.content !== original) { if (message.content !== original) {
dispatch('change', message) dispatch('change', message)
updateMessages(chatId)
} }
} }

View File

@ -78,6 +78,7 @@ const defaults:ChatSettings = {
summaryThreshold: 3000, summaryThreshold: 3000,
summarySize: 1000, summarySize: 1000,
summaryExtend: 0, summaryExtend: 0,
summaryTemperature: 0.1,
pinTop: 0, pinTop: 0,
pinBottom: 6, pinBottom: 6,
summaryPrompt: '', summaryPrompt: '',
@ -247,6 +248,16 @@ const summarySettings: ChatSetting[] = [
], ],
hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary' hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
}, },
{
key: 'summaryTemperature',
name: 'Summary Temperature',
title: 'What sampling temperature to use, between 0 and 2, when generating summary. Lower values, like 0, will be more deterministic.',
min: 0,
max: 2,
step: 0.1,
type: 'number',
hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
},
{ {
key: 'pinTop', key: 'pinTop',
name: 'Keep First Prompts', name: 'Keep First Prompts',

View File

@ -235,18 +235,19 @@
}, 10) }, 10)
} }
let setMessagesTimer: any const setMessagesTimers: any = {}
export const setMessages = (chatId: number, messages: Message[]) => { export const setMessages = (chatId: number, messages: Message[]) => {
if (get(currentChatId) === chatId) { if (get(currentChatId) === chatId) {
// update current message cache right away // update current message cache right away
currentChatMessages.set(messages) currentChatMessages.set(messages)
clearTimeout(setMessagesTimer) clearTimeout(setMessagesTimers[chatId])
// delay expensive all chats update for a bit // delay expensive all chats update for a bit
setMessagesTimer = setTimeout(() => { setMessagesTimers[chatId] = setTimeout(() => {
getChat(chatId).messages = messages getChat(chatId).messages = messages
saveChatStore() saveChatStore()
}, 200) }, 200)
} else { } else {
clearTimeout(setMessagesTimers[chatId])
getChat(chatId).messages = messages getChat(chatId).messages = messages
saveChatStore() saveChatStore()
} }

View File

@ -86,6 +86,7 @@
summaryThreshold: number; summaryThreshold: number;
summarySize: number; summarySize: number;
summaryExtend: number; summaryExtend: number;
summaryTemperature: number;
pinTop: number; pinTop: number;
pinBottom: number; pinBottom: number;
summaryPrompt: string; summaryPrompt: string;