Merge pull request #176 from Webifi/main
Update pricing again, fix memory leak in image support
This commit is contained in:
		
						commit
						f47113e699
					
				| 
						 | 
				
			
			@ -7,7 +7,7 @@
 | 
			
		|||
  const dbCheck = _hasIndexedDb && window.indexedDB.open('test')
 | 
			
		||||
  if (_hasIndexedDb) dbCheck.onerror = () => { _hasIndexedDb = false }
 | 
			
		||||
  
 | 
			
		||||
  const imageCache: Record<string, ChatImage> = {}
 | 
			
		||||
  let imageCache: Record<string, ChatImage> = {}
 | 
			
		||||
 | 
			
		||||
  class ChatImageStore extends Dexie {
 | 
			
		||||
    images!: Table<ChatImage>
 | 
			
		||||
| 
						 | 
				
			
			@ -46,6 +46,13 @@
 | 
			
		|||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  export const clearAllImages = async (): Promise<void> => {
 | 
			
		||||
    imageCache = {}
 | 
			
		||||
    if (_hasIndexedDb) {
 | 
			
		||||
      imageDb.images.clear()
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  export const setImage = async (chatId:number, image:ChatImage): Promise<ChatImage> => {
 | 
			
		||||
    image.id = image.id || uuidv4()
 | 
			
		||||
    let current: ChatImage
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -14,13 +14,8 @@ const modelDetails : Record<string, ModelDetail> = {
 | 
			
		|||
        completion: 0.00006, // $0.06 per 1000 tokens completion
 | 
			
		||||
        max: 8192 // 8k max token buffer
 | 
			
		||||
      },
 | 
			
		||||
      'gpt-3.5-turbo-0613': {
 | 
			
		||||
        prompt: 0.0000015, // $0.0015 per 1000 tokens prompt
 | 
			
		||||
        completion: 0.000002, // $0.002 per 1000 tokens completion
 | 
			
		||||
        max: 4096 // 4k max token buffer
 | 
			
		||||
      },
 | 
			
		||||
      'gpt-3.5': {
 | 
			
		||||
        prompt: 0.000002, // $0.002 per 1000 tokens prompt
 | 
			
		||||
        prompt: 0.0000015, // $0.0015 per 1000 tokens prompt
 | 
			
		||||
        completion: 0.000002, // $0.002 per 1000 tokens completion
 | 
			
		||||
        max: 4096 // 4k max token buffer
 | 
			
		||||
      },
 | 
			
		||||
| 
						 | 
				
			
			@ -67,7 +62,7 @@ export const supportedModels : Record<string, ModelDetail> = {
 | 
			
		|||
      'gpt-3.5-turbo': modelDetails['gpt-3.5'],
 | 
			
		||||
      'gpt-3.5-turbo-16k': modelDetails['gpt-3.5-turbo-16k'],
 | 
			
		||||
      'gpt-3.5-turbo-0301': modelDetails['gpt-3.5'],
 | 
			
		||||
      'gpt-3.5-turbo-0613': modelDetails['gpt-3.5-turbo-0613']
 | 
			
		||||
      'gpt-3.5-turbo-0613': modelDetails['gpt-3.5']
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
const lookupList = {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -6,7 +6,7 @@
 | 
			
		|||
  import { v4 as uuidv4 } from 'uuid'
 | 
			
		||||
  import { getProfile, getProfiles, isStaticProfile, newNameForProfile, restartProfile } from './Profiles.svelte'
 | 
			
		||||
  import { errorNotice } from './Util.svelte'
 | 
			
		||||
  import { deleteImage, setImage } from './ImageStore.svelte'
 | 
			
		||||
  import { clearAllImages, deleteImage, setImage } from './ImageStore.svelte'
 | 
			
		||||
 | 
			
		||||
  // TODO: move chatsStorage to indexedDB with localStorage as a fallback for private browsing.
 | 
			
		||||
  //       Enough long chats will overflow localStorage.
 | 
			
		||||
| 
						 | 
				
			
			@ -161,10 +161,8 @@
 | 
			
		|||
  }
 | 
			
		||||
 | 
			
		||||
  export const clearChats = () => {
 | 
			
		||||
    const chats = get(chatsStorage)
 | 
			
		||||
    chats.forEach(c => deleteChat(c.id)) // make sure images are removed
 | 
			
		||||
    // TODO: add a clear images option to make this faster
 | 
			
		||||
    // chatsStorage.set([])
 | 
			
		||||
    chatsStorage.set([])
 | 
			
		||||
    clearAllImages()
 | 
			
		||||
  }
 | 
			
		||||
  export const saveChatStore = () => {
 | 
			
		||||
    const chats = get(chatsStorage)
 | 
			
		||||
| 
						 | 
				
			
			@ -293,6 +291,12 @@
 | 
			
		|||
    chatsStorage.set(chats)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const clearImages = (chatId: number, messages: Message[]) => {
 | 
			
		||||
    messages.forEach(m => {
 | 
			
		||||
      if (m.image) deleteImage(chatId, m.image.id)
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  export const truncateFromMessage = (chatId: number, uuid: string) => {
 | 
			
		||||
    const chats = get(chatsStorage)
 | 
			
		||||
    const chat = chats.find((chat) => chat.id === chatId) as Chat
 | 
			
		||||
| 
						 | 
				
			
			@ -303,13 +307,15 @@
 | 
			
		|||
    if (index < 0) {
 | 
			
		||||
      throw new Error(`Unable to find message with ID: ${uuid}`)
 | 
			
		||||
    }
 | 
			
		||||
    chat.messages.splice(index + 1) // remove every item after
 | 
			
		||||
    const truncated = chat.messages.splice(index + 1) // remove every item after
 | 
			
		||||
    clearImages(chatId, truncated)
 | 
			
		||||
    chatsStorage.set(chats)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  export const clearMessages = (chatId: number) => {
 | 
			
		||||
    const chats = get(chatsStorage)
 | 
			
		||||
    const chat = chats.find((chat) => chat.id === chatId) as Chat
 | 
			
		||||
    clearImages(chatId, chat.messages)
 | 
			
		||||
    chat.messages = []
 | 
			
		||||
    chatsStorage.set(chats)
 | 
			
		||||
  }
 | 
			
		||||
| 
						 | 
				
			
			@ -317,9 +323,7 @@
 | 
			
		|||
  export const deleteChat = (chatId: number) => {
 | 
			
		||||
    const chats = get(chatsStorage)
 | 
			
		||||
    const chat = getChat(chatId)
 | 
			
		||||
    chat?.messages?.forEach(m => {
 | 
			
		||||
      if (m.image) deleteImage(chatId, m.image.id)
 | 
			
		||||
    })
 | 
			
		||||
    clearImages(chatId, chat?.messages || [])
 | 
			
		||||
    chatsStorage.set(chats.filter((chat) => chat.id !== chatId))
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue