From ca19bab19dae52f7966a7993fcc9d2130b766ec4 Mon Sep 17 00:00:00 2001 From: Webifi Date: Sat, 22 Jul 2023 17:21:01 -0500 Subject: [PATCH] Don't allow too low of temp or top_p --- src/lib/ChatRequestPetals.svelte | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/lib/ChatRequestPetals.svelte b/src/lib/ChatRequestPetals.svelte index 356ae12..9d72ebe 100644 --- a/src/lib/ChatRequestPetals.svelte +++ b/src/lib/ChatRequestPetals.svelte @@ -56,11 +56,11 @@ export const runPetalsCompletionRequest = async ( const rMessages = request.messages || [] as Message[] // make sure top_p and temperature are set the way we need let temperature = request.temperature || 0 - if (isNaN(temperature as any) || temperature === 1) temperature = 1 - if (temperature === 0) temperature = 0.0001 + if (isNaN(temperature as any)) temperature = 1 + if (!temperature || temperature <= 0) temperature = 0.01 let topP = request.top_p - if (isNaN(topP as any) || topP === 1) topP = 1 - if (topP === 0) topP = 0.0001 + if (topP === undefined || isNaN(topP as any)) topP = 1 + if (!topP || topP <= 0) topP = 0.01 // build the message array const inputArray = (rMessages).reduce((a, m) => { const c = getRoleTag(m.role, model, chatRequest.chat) + m.content