Merge pull request #187 from Webifi/main
Fix message edits not being saved to chatStore, a few other tweaks
This commit is contained in:
commit
b3bb4ddcba
|
@ -3,7 +3,7 @@
|
||||||
import { mergeProfileFields, prepareSummaryPrompt } from './Profiles.svelte'
|
import { mergeProfileFields, prepareSummaryPrompt } from './Profiles.svelte'
|
||||||
import { countMessageTokens, countPromptTokens, getModelMaxTokens } from './Stats.svelte'
|
import { countMessageTokens, countPromptTokens, getModelMaxTokens } from './Stats.svelte'
|
||||||
import type { Chat, ChatCompletionOpts, ChatSettings, Message, Model, Request, RequestImageGeneration } from './Types.svelte'
|
import type { Chat, ChatCompletionOpts, ChatSettings, Message, Model, Request, RequestImageGeneration } from './Types.svelte'
|
||||||
import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages } from './Storage.svelte'
|
import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages, deleteSummaryMessage } from './Storage.svelte'
|
||||||
import { scrollToBottom, scrollToMessage } from './Util.svelte'
|
import { scrollToBottom, scrollToMessage } from './Util.svelte'
|
||||||
import { getRequestSettingList, defaultModel } from './Settings.svelte'
|
import { getRequestSettingList, defaultModel } from './Settings.svelte'
|
||||||
import { EventStreamContentType, fetchEventSource } from '@microsoft/fetch-event-source'
|
import { EventStreamContentType, fetchEventSource } from '@microsoft/fetch-event-source'
|
||||||
|
@ -418,7 +418,9 @@ export class ChatRequest {
|
||||||
const summarizedIds = rw.map(m => m.uuid)
|
const summarizedIds = rw.map(m => m.uuid)
|
||||||
const summaryIds = [summaryResponse.uuid]
|
const summaryIds = [summaryResponse.uuid]
|
||||||
let loopCount = 0
|
let loopCount = 0
|
||||||
|
let networkRetry = 2 // number of retries on network error
|
||||||
while (continueCounter-- > 0) {
|
while (continueCounter-- > 0) {
|
||||||
|
let error = false
|
||||||
try {
|
try {
|
||||||
const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
|
const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
|
||||||
summaryRequest: true,
|
summaryRequest: true,
|
||||||
|
@ -430,7 +432,7 @@ export class ChatRequest {
|
||||||
if (opts.streaming) scrollToMessage(summaryResponse.uuid, 150, true, true)
|
if (opts.streaming) scrollToMessage(summaryResponse.uuid, 150, true, true)
|
||||||
}
|
}
|
||||||
} as ChatCompletionOpts, {
|
} as ChatCompletionOpts, {
|
||||||
temperature: 0.1, // make summary more deterministic
|
temperature: chatSettings.summaryTemperature, // make summary more deterministic
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
presence_penalty: 0,
|
presence_penalty: 0,
|
||||||
frequency_penalty: 0,
|
frequency_penalty: 0,
|
||||||
|
@ -445,10 +447,21 @@ export class ChatRequest {
|
||||||
deleteMessage(chatId, srid)
|
deleteMessage(chatId, srid)
|
||||||
return summary
|
return summary
|
||||||
}
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (e.message?.includes('network error') && networkRetry > 0) {
|
||||||
|
networkRetry--
|
||||||
|
error = true
|
||||||
|
} else {
|
||||||
|
_this.updating = false
|
||||||
|
_this.updatingMessage = ''
|
||||||
|
deleteSummaryMessage(chatId, srid)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
// Looks like we got our summarized messages.
|
// Looks like we got our summarized messages.
|
||||||
// Mark the new summaries as such
|
// Mark the new summaries as such
|
||||||
// Need more?
|
// Need more?
|
||||||
if (summaryResponse.finish_reason === 'length' && continueCounter > 0) {
|
if ((error || summaryResponse.finish_reason === 'length') && continueCounter > 0) {
|
||||||
// Our summary was truncated
|
// Our summary was truncated
|
||||||
// Try to get more of it
|
// Try to get more of it
|
||||||
delete summaryResponse.finish_reason
|
delete summaryResponse.finish_reason
|
||||||
|
@ -464,12 +477,6 @@ export class ChatRequest {
|
||||||
// We're done
|
// We're done
|
||||||
continueCounter = 0
|
continueCounter = 0
|
||||||
}
|
}
|
||||||
} catch (e) {
|
|
||||||
_this.updating = false
|
|
||||||
_this.updatingMessage = ''
|
|
||||||
deleteMessage(chatId, srid)
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
summaryResponse.summary = summarizedIds
|
summaryResponse.summary = summarizedIds
|
||||||
// Disable the messages we summarized so they still show in history
|
// Disable the messages we summarized so they still show in history
|
||||||
|
|
|
@ -70,6 +70,7 @@
|
||||||
const doChange = () => {
|
const doChange = () => {
|
||||||
if (message.content !== original) {
|
if (message.content !== original) {
|
||||||
dispatch('change', message)
|
dispatch('change', message)
|
||||||
|
updateMessages(chatId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,6 +78,7 @@ const defaults:ChatSettings = {
|
||||||
summaryThreshold: 3000,
|
summaryThreshold: 3000,
|
||||||
summarySize: 1000,
|
summarySize: 1000,
|
||||||
summaryExtend: 0,
|
summaryExtend: 0,
|
||||||
|
summaryTemperature: 0.1,
|
||||||
pinTop: 0,
|
pinTop: 0,
|
||||||
pinBottom: 6,
|
pinBottom: 6,
|
||||||
summaryPrompt: '',
|
summaryPrompt: '',
|
||||||
|
@ -247,6 +248,16 @@ const summarySettings: ChatSetting[] = [
|
||||||
],
|
],
|
||||||
hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
|
hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'summaryTemperature',
|
||||||
|
name: 'Summary Temperature',
|
||||||
|
title: 'What sampling temperature to use, between 0 and 2, when generating summary. Lower values, like 0, will be more deterministic.',
|
||||||
|
min: 0,
|
||||||
|
max: 2,
|
||||||
|
step: 0.1,
|
||||||
|
type: 'number',
|
||||||
|
hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'pinTop',
|
key: 'pinTop',
|
||||||
name: 'Keep First Prompts',
|
name: 'Keep First Prompts',
|
||||||
|
|
|
@ -235,18 +235,19 @@
|
||||||
}, 10)
|
}, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
let setMessagesTimer: any
|
const setMessagesTimers: any = {}
|
||||||
export const setMessages = (chatId: number, messages: Message[]) => {
|
export const setMessages = (chatId: number, messages: Message[]) => {
|
||||||
if (get(currentChatId) === chatId) {
|
if (get(currentChatId) === chatId) {
|
||||||
// update current message cache right away
|
// update current message cache right away
|
||||||
currentChatMessages.set(messages)
|
currentChatMessages.set(messages)
|
||||||
clearTimeout(setMessagesTimer)
|
clearTimeout(setMessagesTimers[chatId])
|
||||||
// delay expensive all chats update for a bit
|
// delay expensive all chats update for a bit
|
||||||
setMessagesTimer = setTimeout(() => {
|
setMessagesTimers[chatId] = setTimeout(() => {
|
||||||
getChat(chatId).messages = messages
|
getChat(chatId).messages = messages
|
||||||
saveChatStore()
|
saveChatStore()
|
||||||
}, 200)
|
}, 200)
|
||||||
} else {
|
} else {
|
||||||
|
clearTimeout(setMessagesTimers[chatId])
|
||||||
getChat(chatId).messages = messages
|
getChat(chatId).messages = messages
|
||||||
saveChatStore()
|
saveChatStore()
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,6 +86,7 @@
|
||||||
summaryThreshold: number;
|
summaryThreshold: number;
|
||||||
summarySize: number;
|
summarySize: number;
|
||||||
summaryExtend: number;
|
summaryExtend: number;
|
||||||
|
summaryTemperature: number;
|
||||||
pinTop: number;
|
pinTop: number;
|
||||||
pinBottom: number;
|
pinBottom: number;
|
||||||
summaryPrompt: string;
|
summaryPrompt: string;
|
||||||
|
|
Loading…
Reference in New Issue