Merge pull request #206 from Webifi/main

Sticky tool drawer, try to track down streaming responses getting stuck
This commit is contained in:
Niek van der Maas 2023-06-30 10:22:09 +02:00 committed by GitHub
commit d120109924
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 31 additions and 11 deletions

View File

@ -537,6 +537,12 @@ aside.menu.main-menu .menu-expanse {
min-width: 60px;
min-height: 1.3em;
}
.button-pack {
display: block;
position: sticky;
top: calc(var(--sectionPaddingTop) + 10px);
padding-bottom: 0.1px;
}
.button-pack .button {
display: block;
margin: 4px;
@ -597,23 +603,27 @@ aside.menu.main-menu .menu-expanse {
width: var(--chatToolDrawerSize);
visibility: visible;
max-height: 300%;
overflow: unset;
}
.user-message:hover .tool-drawer,
.user-message.editing .tool-drawer {
width: var(--chatToolDrawerSize);
visibility: visible;
max-height: 300%;
overflow: unset;
}
.assistant-message:hover .tool-drawer-mask,
.assistant-message.editing .tool-drawer {
width: var(--chatToolDrawerSize);
visibility: visible;
overflow: unset;
}
.user-message:hover .tool-drawer-mask,
.user-message.editing .tool-drawer {
width: var(--chatToolDrawerSize);
visibility: visible;
overflow: unset;
}
.assistant-message:hover, .assistant-message.editing {
border-top-right-radius: 0px !important;

View File

@ -104,9 +104,9 @@ export class ChatCompletionResponse {
completion_tokens: 0,
total_tokens: 0
} as Usage
message.usage.completion_tokens += response.usage.completion_tokens
message.usage.prompt_tokens = response.usage.prompt_tokens + (this.offsetTotals?.prompt_tokens || 0)
message.usage.total_tokens = response.usage.total_tokens + (this.offsetTotals?.total_tokens || 0)
message.usage.completion_tokens += response?.usage?.completion_tokens || 0
message.usage.prompt_tokens = (response?.usage?.prompt_tokens || 0) + (this.offsetTotals?.prompt_tokens || 0)
message.usage.total_tokens = (response?.usage?.total_tokens || 0) + (this.offsetTotals?.total_tokens || 0)
} else {
message.content = choice.message.content
message.usage = response.usage
@ -124,7 +124,7 @@ export class ChatCompletionResponse {
updateFromAsyncResponse (response: Response) {
let completionTokenCount = 0
this.setModel(response.model)
if (!response.choices) {
if (!response.choices || response?.error) {
return this.updateFromError(response?.error?.message || 'unexpected streaming response from API')
}
response.choices?.forEach((choice, i) => {
@ -174,7 +174,11 @@ export class ChatCompletionResponse {
setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first
}
updateFromClose (): void {
updateFromClose (force: boolean = false): void {
if (!this.finished && !this.error && !this.messages?.find(m => m.content)) {
if (!force) return setTimeout(() => this.updateFromClose(true), 250) as any
return this.updateFromError('Unexpected connection termination')
}
setTimeout(() => this.finish(), 250) // give others a chance to signal the finish first
}

View File

@ -202,6 +202,8 @@ export class ChatRequest {
_this.controller = new AbortController()
const signal = _this.controller.signal
const abortListener = (e:Event) => {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromError('User aborted request.')
signal.removeEventListener('abort', abortListener)
}
@ -245,6 +247,8 @@ export class ChatRequest {
}
},
onclose () {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromClose()
},
onerror (err) {
@ -260,6 +264,8 @@ export class ChatRequest {
}
}
}).catch(err => {
_this.updating = false
_this.updatingMessage = ''
chatResponse.updateFromError(err.message)
})
} else {

View File

@ -200,9 +200,9 @@
}
chat.usage[model] = total
}
total.completion_tokens += usage.completion_tokens
total.prompt_tokens += usage.prompt_tokens
total.total_tokens += usage.total_tokens
total.completion_tokens += usage?.completion_tokens || 0
total.prompt_tokens += usage?.prompt_tokens || 0
total.total_tokens += usage?.total_tokens || 0
chatsStorage.set(chats)
}
@ -218,9 +218,9 @@
}
chat.usage[model] = total
}
total.completion_tokens -= usage.completion_tokens
total.prompt_tokens -= usage.prompt_tokens
total.total_tokens -= usage.total_tokens
total.completion_tokens -= usage?.completion_tokens || 0
total.prompt_tokens -= usage?.prompt_tokens || 0
total.total_tokens -= usage?.total_tokens || 0
chatsStorage.set(chats)
}