Merge pull request #187 from Webifi/main
Fix message edits not being saved to chatStore, a few other tweaks
This commit is contained in:
		
						commit
						b3bb4ddcba
					
				| 
						 | 
				
			
			@ -3,7 +3,7 @@
 | 
			
		|||
    import { mergeProfileFields, prepareSummaryPrompt } from './Profiles.svelte'
 | 
			
		||||
    import { countMessageTokens, countPromptTokens, getModelMaxTokens } from './Stats.svelte'
 | 
			
		||||
    import type { Chat, ChatCompletionOpts, ChatSettings, Message, Model, Request, RequestImageGeneration } from './Types.svelte'
 | 
			
		||||
    import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages } from './Storage.svelte'
 | 
			
		||||
    import { deleteMessage, getChatSettingValueNullDefault, insertMessages, getApiKey, addError, currentChatMessages, getMessages, updateMessages, deleteSummaryMessage } from './Storage.svelte'
 | 
			
		||||
    import { scrollToBottom, scrollToMessage } from './Util.svelte'
 | 
			
		||||
    import { getRequestSettingList, defaultModel } from './Settings.svelte'
 | 
			
		||||
    import { EventStreamContentType, fetchEventSource } from '@microsoft/fetch-event-source'
 | 
			
		||||
| 
						 | 
				
			
			@ -418,7 +418,9 @@ export class ChatRequest {
 | 
			
		|||
          const summarizedIds = rw.map(m => m.uuid)
 | 
			
		||||
          const summaryIds = [summaryResponse.uuid]
 | 
			
		||||
          let loopCount = 0
 | 
			
		||||
          let networkRetry = 2 // number of retries on network error
 | 
			
		||||
          while (continueCounter-- > 0) {
 | 
			
		||||
            let error = false
 | 
			
		||||
            try {
 | 
			
		||||
              const summary = await _this.sendRequest(top.concat(rw).concat([summaryRequest]).concat(loopCount > 0 ? [summaryResponse] : []), {
 | 
			
		||||
                summaryRequest: true,
 | 
			
		||||
| 
						 | 
				
			
			@ -430,7 +432,7 @@ export class ChatRequest {
 | 
			
		|||
                  if (opts.streaming) scrollToMessage(summaryResponse.uuid, 150, true, true)
 | 
			
		||||
                }
 | 
			
		||||
              } as ChatCompletionOpts, {
 | 
			
		||||
                temperature: 0.1, // make summary more deterministic
 | 
			
		||||
                temperature: chatSettings.summaryTemperature, // make summary more deterministic
 | 
			
		||||
                top_p: 1,
 | 
			
		||||
                presence_penalty: 0,
 | 
			
		||||
                frequency_penalty: 0,
 | 
			
		||||
| 
						 | 
				
			
			@ -445,10 +447,21 @@ export class ChatRequest {
 | 
			
		|||
                deleteMessage(chatId, srid)
 | 
			
		||||
                return summary
 | 
			
		||||
              }
 | 
			
		||||
            } catch (e) {
 | 
			
		||||
              if (e.message?.includes('network error') && networkRetry > 0) {
 | 
			
		||||
                networkRetry--
 | 
			
		||||
                error = true
 | 
			
		||||
              } else {
 | 
			
		||||
                _this.updating = false
 | 
			
		||||
                _this.updatingMessage = ''
 | 
			
		||||
                deleteSummaryMessage(chatId, srid)
 | 
			
		||||
                throw e
 | 
			
		||||
              }
 | 
			
		||||
            }
 | 
			
		||||
            // Looks like we got our summarized messages.
 | 
			
		||||
            // Mark the new summaries as such
 | 
			
		||||
            // Need more?
 | 
			
		||||
              if (summaryResponse.finish_reason === 'length' && continueCounter > 0) {
 | 
			
		||||
            if ((error || summaryResponse.finish_reason === 'length') && continueCounter > 0) {
 | 
			
		||||
              // Our summary was truncated
 | 
			
		||||
              // Try to get more of it
 | 
			
		||||
              delete summaryResponse.finish_reason
 | 
			
		||||
| 
						 | 
				
			
			@ -464,12 +477,6 @@ export class ChatRequest {
 | 
			
		|||
              // We're done
 | 
			
		||||
              continueCounter = 0
 | 
			
		||||
            }
 | 
			
		||||
            } catch (e) {
 | 
			
		||||
              _this.updating = false
 | 
			
		||||
              _this.updatingMessage = ''
 | 
			
		||||
              deleteMessage(chatId, srid)
 | 
			
		||||
              throw e
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
          summaryResponse.summary = summarizedIds
 | 
			
		||||
          // Disable the messages we summarized so they still show in history
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -70,6 +70,7 @@
 | 
			
		|||
  const doChange = () => {
 | 
			
		||||
    if (message.content !== original) {
 | 
			
		||||
      dispatch('change', message)
 | 
			
		||||
      updateMessages(chatId)
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -78,6 +78,7 @@ const defaults:ChatSettings = {
 | 
			
		|||
  summaryThreshold: 3000,
 | 
			
		||||
  summarySize: 1000,
 | 
			
		||||
  summaryExtend: 0,
 | 
			
		||||
  summaryTemperature: 0.1,
 | 
			
		||||
  pinTop: 0,
 | 
			
		||||
  pinBottom: 6,
 | 
			
		||||
  summaryPrompt: '',
 | 
			
		||||
| 
						 | 
				
			
			@ -247,6 +248,16 @@ const summarySettings: ChatSetting[] = [
 | 
			
		|||
        ],
 | 
			
		||||
        hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
 | 
			
		||||
      },
 | 
			
		||||
      {
 | 
			
		||||
        key: 'summaryTemperature',
 | 
			
		||||
        name: 'Summary Temperature',
 | 
			
		||||
        title: 'What sampling temperature to use, between 0 and 2, when generating summary. Lower values, like 0, will be more deterministic.',
 | 
			
		||||
        min: 0,
 | 
			
		||||
        max: 2,
 | 
			
		||||
        step: 0.1,
 | 
			
		||||
        type: 'number',
 | 
			
		||||
        hide: (chatId) => getChatSettings(chatId).continuousChat !== 'summary'
 | 
			
		||||
      },
 | 
			
		||||
      {
 | 
			
		||||
        key: 'pinTop',
 | 
			
		||||
        name: 'Keep First Prompts',
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -235,18 +235,19 @@
 | 
			
		|||
    }, 10)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  let setMessagesTimer: any
 | 
			
		||||
  const setMessagesTimers: any = {}
 | 
			
		||||
  export const setMessages = (chatId: number, messages: Message[]) => {
 | 
			
		||||
    if (get(currentChatId) === chatId) {
 | 
			
		||||
      // update current message cache right away
 | 
			
		||||
      currentChatMessages.set(messages)
 | 
			
		||||
      clearTimeout(setMessagesTimer)
 | 
			
		||||
      clearTimeout(setMessagesTimers[chatId])
 | 
			
		||||
      // delay expensive all chats update for a bit
 | 
			
		||||
      setMessagesTimer = setTimeout(() => {
 | 
			
		||||
      setMessagesTimers[chatId] = setTimeout(() => {
 | 
			
		||||
        getChat(chatId).messages = messages
 | 
			
		||||
        saveChatStore()
 | 
			
		||||
      }, 200)
 | 
			
		||||
    } else {
 | 
			
		||||
      clearTimeout(setMessagesTimers[chatId])
 | 
			
		||||
      getChat(chatId).messages = messages
 | 
			
		||||
      saveChatStore()
 | 
			
		||||
    }
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -86,6 +86,7 @@
 | 
			
		|||
    summaryThreshold: number;
 | 
			
		||||
    summarySize: number;
 | 
			
		||||
    summaryExtend: number;
 | 
			
		||||
    summaryTemperature: number;
 | 
			
		||||
    pinTop: number;
 | 
			
		||||
    pinBottom: number;
 | 
			
		||||
    summaryPrompt: string;
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue