GPT-4-1106-preview + 4000 Max Tokens

This commit is contained in:
DirkSchlossmacher 2023-11-09 16:00:01 +01:00
parent 4373e3ed8d
commit a953fab34b

View File

@ -46,7 +46,7 @@ export const DEFAULT_CONFIG = {
models: DEFAULT_MODELS as any as LLMModel[],
modelConfig: {
model: "gpt-4" as ModelType,
model: "GPT-4-1106-preview" as ModelType,
temperature: 0.5,
top_p: 1,
max_tokens: 4000,