Make sure max_tokens is always integer.
This commit is contained in:
parent
6ff9585d6a
commit
d8c797e619
|
@ -166,6 +166,7 @@ export class ChatRequest {
|
|||
if (key === 'max_tokens') {
|
||||
if (opts.maxTokens) value = opts.maxTokens // only as large as requested
|
||||
if (value > maxAllowed || value < 1) value = null // if over max model, do not define max
|
||||
if (value) value = Math.floor(value)
|
||||
}
|
||||
if (key === 'n') {
|
||||
if (opts.streaming || opts.summaryRequest) {
|
||||
|
|
Loading…
Reference in New Issue