Don't allow too low of temp or top_p

This commit is contained in:
Webifi 2023-07-22 17:21:01 -05:00
parent 7aadca3c5c
commit ca19bab19d
1 changed files with 4 additions and 4 deletions

View File

@ -56,11 +56,11 @@ export const runPetalsCompletionRequest = async (
const rMessages = request.messages || [] as Message[]
// make sure top_p and temperature are set the way we need
let temperature = request.temperature || 0
if (isNaN(temperature as any) || temperature === 1) temperature = 1
if (temperature === 0) temperature = 0.0001
if (isNaN(temperature as any)) temperature = 1
if (!temperature || temperature <= 0) temperature = 0.01
let topP = request.top_p
if (isNaN(topP as any) || topP === 1) topP = 1
if (topP === 0) topP = 0.0001
if (topP === undefined || isNaN(topP as any)) topP = 1
if (!topP || topP <= 0) topP = 0.01
// build the message array
const inputArray = (rMessages).reduce((a, m) => {
const c = getRoleTag(m.role, model, chatRequest.chat) + m.content