Try to catch intermittent stuck in updating issue

This commit is contained in:
Webifi 2023-06-29 18:28:06 -05:00
parent eacb81d1b7
commit 868035c3a0
3 changed files with 21 additions and 11 deletions

View File

@ -104,9 +104,9 @@ export class ChatCompletionResponse {
completion_tokens: 0, completion_tokens: 0,
total_tokens: 0 total_tokens: 0
} as Usage } as Usage
message.usage.completion_tokens += response.usage.completion_tokens message.usage.completion_tokens += response?.usage?.completion_tokens || 0
message.usage.prompt_tokens = response.usage.prompt_tokens + (this.offsetTotals?.prompt_tokens || 0) message.usage.prompt_tokens = (response?.usage?.prompt_tokens || 0) + (this.offsetTotals?.prompt_tokens || 0)
message.usage.total_tokens = response.usage.total_tokens + (this.offsetTotals?.total_tokens || 0) message.usage.total_tokens = (response?.usage?.total_tokens || 0) + (this.offsetTotals?.total_tokens || 0)
} else { } else {
message.content = choice.message.content message.content = choice.message.content
message.usage = response.usage message.usage = response.usage
@ -124,7 +124,7 @@ export class ChatCompletionResponse {
updateFromAsyncResponse (response: Response) { updateFromAsyncResponse (response: Response) {
let completionTokenCount = 0 let completionTokenCount = 0
this.setModel(response.model) this.setModel(response.model)
if (!response.choices) { if (!response.choices || response?.error) {
return this.updateFromError(response?.error?.message || 'unexpected streaming response from API') return this.updateFromError(response?.error?.message || 'unexpected streaming response from API')
} }
response.choices?.forEach((choice, i) => { response.choices?.forEach((choice, i) => {
@ -174,7 +174,11 @@ export class ChatCompletionResponse {
setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first
} }
updateFromClose (): void { updateFromClose (force: boolean = false): void {
if (!this.finished && !this.error && !this.messages?.find(m => m.content)) {
if (!force) return setTimeout(() => this.updateFromClose(true), 250) as any
return this.updateFromError('Unexpected connection termination')
}
setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first
} }

View File

@ -202,6 +202,8 @@ export class ChatRequest {
_this.controller = new AbortController() _this.controller = new AbortController()
const signal = _this.controller.signal const signal = _this.controller.signal
const abortListener = (e:Event) => { const abortListener = (e:Event) => {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromError('User aborted request.') chatResponse.updateFromError('User aborted request.')
signal.removeEventListener('abort', abortListener) signal.removeEventListener('abort', abortListener)
} }
@ -245,6 +247,8 @@ export class ChatRequest {
} }
}, },
onclose () { onclose () {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromClose() chatResponse.updateFromClose()
}, },
onerror (err) { onerror (err) {
@ -260,6 +264,8 @@ export class ChatRequest {
} }
} }
}).catch(err => { }).catch(err => {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromError(err.message) chatResponse.updateFromError(err.message)
}) })
} else { } else {

View File

@ -200,9 +200,9 @@
} }
chat.usage[model] = total chat.usage[model] = total
} }
total.completion_tokens += usage.completion_tokens total.completion_tokens += usage?.completion_tokens || 0
total.prompt_tokens += usage.prompt_tokens total.prompt_tokens += usage?.prompt_tokens || 0
total.total_tokens += usage.total_tokens total.total_tokens += usage?.total_tokens || 0
chatsStorage.set(chats) chatsStorage.set(chats)
} }
@ -218,9 +218,9 @@
} }
chat.usage[model] = total chat.usage[model] = total
} }
total.completion_tokens -= usage.completion_tokens total.completion_tokens -= usage?.completion_tokens || 0
total.prompt_tokens -= usage.prompt_tokens total.prompt_tokens -= usage?.prompt_tokens || 0
total.total_tokens -= usage.total_tokens total.total_tokens -= usage?.total_tokens || 0
chatsStorage.set(chats) chatsStorage.set(chats)
} }