From c05fb7379d2591d5d8873445d0c3d6893a1ebaf1 Mon Sep 17 00:00:00 2001 From: Niek van der Maas Date: Mon, 6 Mar 2023 19:11:00 +0100 Subject: [PATCH] Add type for request too --- src/lib/Chat.svelte | 37 +++++++++++++++++++------------------ src/lib/Types.svelte | 15 +++++++++++++++ 2 files changed, 34 insertions(+), 18 deletions(-) diff --git a/src/lib/Chat.svelte b/src/lib/Chat.svelte index b62968c..5bac56f 100644 --- a/src/lib/Chat.svelte +++ b/src/lib/Chat.svelte @@ -2,7 +2,7 @@ //import { fetchEventSource } from "@microsoft/fetch-event-source"; import { apiKeyStorage, chatsStorage, addMessage, clearMessages } from "./Storage.svelte"; - import type { Response, Message } from "./Types.svelte"; + import type { Request, Response, Message } from "./Types.svelte"; import { afterUpdate, onMount } from "svelte"; import SvelteMarkdown from "svelte-markdown"; @@ -64,6 +64,23 @@ let response: Response; try { + const request: Request = { + model: "gpt-3.5-turbo", + // Submit only the role and content of the messages, provide the previous messages as well for context + messages: messages + .map((message): Message => { + const { role, content } = message; + return { role, content }; + }) + // Skip system messages + .filter((message) => message.role !== "system"), + // temperature: 1 + // top_p: 1 + // n: 1 + //stream: false, + // stop: null + //max_tokens: 4096, + }; response = await ( await fetch("https://api.openai.com/v1/chat/completions", { method: "POST", @@ -71,23 +88,7 @@ Authorization: `Bearer ${$apiKeyStorage}`, "Content-Type": "application/json", }, - body: JSON.stringify({ - model: "gpt-3.5-turbo", - // Submit only the role and content of the messages, provide the previous messages as well for context - messages: messages - .map((message): Message => { - const { role, content } = message; - return { role, content }; - }) - // Skip system messages - .filter((message) => message.role !== "system"), - // temperature: 1 - // top_p: 1 - // n: 1 - //stream: false, - // stop: null - //max_tokens: 4096, - }), + body: JSON.stringify(request), }) ).json(); } catch (e) { diff --git a/src/lib/Types.svelte b/src/lib/Types.svelte index 256043c..f07d3fc 100644 --- a/src/lib/Types.svelte +++ b/src/lib/Types.svelte @@ -17,6 +17,21 @@ total_tokens: number; }; + export type Request = { + model: "gpt-3.5-turbo" | "gpt-3.5-turbo-0301"; + messages: Message[]; + temperature?: number; + top_p?: number; + n?: number; + stream?: boolean; + stop?: string | null; + max_tokens?: number; + presence_penalty?: number; + frequency_penalty?: number; + logit_bias?: Record; + user?: string; + }; + type ResponseOK = { status: "ok"; id: string;