From 868035c3a0a7f66b81819d21a1f5eedc15771285 Mon Sep 17 00:00:00 2001 From: Webifi Date: Thu, 29 Jun 2023 18:28:06 -0500 Subject: [PATCH] Try to catch intermittent stuck in updating issue --- src/lib/ChatCompletionResponse.svelte | 14 +++++++++----- src/lib/ChatRequest.svelte | 6 ++++++ src/lib/Storage.svelte | 12 ++++++------ 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/src/lib/ChatCompletionResponse.svelte b/src/lib/ChatCompletionResponse.svelte index dbc3eff..03c1c31 100644 --- a/src/lib/ChatCompletionResponse.svelte +++ b/src/lib/ChatCompletionResponse.svelte @@ -104,9 +104,9 @@ export class ChatCompletionResponse { completion_tokens: 0, total_tokens: 0 } as Usage - message.usage.completion_tokens += response.usage.completion_tokens - message.usage.prompt_tokens = response.usage.prompt_tokens + (this.offsetTotals?.prompt_tokens || 0) - message.usage.total_tokens = response.usage.total_tokens + (this.offsetTotals?.total_tokens || 0) + message.usage.completion_tokens += response?.usage?.completion_tokens || 0 + message.usage.prompt_tokens = (response?.usage?.prompt_tokens || 0) + (this.offsetTotals?.prompt_tokens || 0) + message.usage.total_tokens = (response?.usage?.total_tokens || 0) + (this.offsetTotals?.total_tokens || 0) } else { message.content = choice.message.content message.usage = response.usage @@ -124,7 +124,7 @@ export class ChatCompletionResponse { updateFromAsyncResponse (response: Response) { let completionTokenCount = 0 this.setModel(response.model) - if (!response.choices) { + if (!response.choices || response?.error) { return this.updateFromError(response?.error?.message || 'unexpected streaming response from API') } response.choices?.forEach((choice, i) => { @@ -174,7 +174,11 @@ export class ChatCompletionResponse { setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first } - updateFromClose (): void { + updateFromClose (force: boolean = false): void { + if (!this.finished && !this.error && !this.messages?.find(m => m.content)) { + if (!force) return setTimeout(() => this.updateFromClose(true), 250) as any + return this.updateFromError('Unexpected connection termination') + } setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first } diff --git a/src/lib/ChatRequest.svelte b/src/lib/ChatRequest.svelte index cc53183..2ab3008 100644 --- a/src/lib/ChatRequest.svelte +++ b/src/lib/ChatRequest.svelte @@ -202,6 +202,8 @@ export class ChatRequest { _this.controller = new AbortController() const signal = _this.controller.signal const abortListener = (e:Event) => { + _this.updating = false + _this.updatingMessage = '' chatResponse.updateFromError('User aborted request.') signal.removeEventListener('abort', abortListener) } @@ -245,6 +247,8 @@ export class ChatRequest { } }, onclose () { + _this.updating = false + _this.updatingMessage = '' chatResponse.updateFromClose() }, onerror (err) { @@ -260,6 +264,8 @@ export class ChatRequest { } } }).catch(err => { + _this.updating = false + _this.updatingMessage = '' chatResponse.updateFromError(err.message) }) } else { diff --git a/src/lib/Storage.svelte b/src/lib/Storage.svelte index fa08d21..a5d3960 100644 --- a/src/lib/Storage.svelte +++ b/src/lib/Storage.svelte @@ -200,9 +200,9 @@ } chat.usage[model] = total } - total.completion_tokens += usage.completion_tokens - total.prompt_tokens += usage.prompt_tokens - total.total_tokens += usage.total_tokens + total.completion_tokens += usage?.completion_tokens || 0 + total.prompt_tokens += usage?.prompt_tokens || 0 + total.total_tokens += usage?.total_tokens || 0 chatsStorage.set(chats) } @@ -218,9 +218,9 @@ } chat.usage[model] = total } - total.completion_tokens -= usage.completion_tokens - total.prompt_tokens -= usage.prompt_tokens - total.total_tokens -= usage.total_tokens + total.completion_tokens -= usage?.completion_tokens || 0 + total.prompt_tokens -= usage?.prompt_tokens || 0 + total.total_tokens -= usage?.total_tokens || 0 chatsStorage.set(chats) }