mishig HF staff commited on
Commit
ac63724
1 Parent(s): e4a4cf3

typo maxTokens vs max_tokens

Browse files
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts CHANGED
@@ -24,7 +24,7 @@ export async function handleStreamingResponse(
24
  model: model.id,
25
  messages,
26
  temperature: conversation.config.temperature,
27
- max_tokens: conversation.config.maxTokens,
28
  },
29
  { signal: abortController.signal, use_cache: false }
30
  )) {
@@ -50,7 +50,7 @@ export async function handleNonStreamingResponse(
50
  model: model.id,
51
  messages,
52
  temperature: conversation.config.temperature,
53
- max_tokens: conversation.config.maxTokens,
54
  },
55
  { use_cache: false }
56
  );
 
24
  model: model.id,
25
  messages,
26
  temperature: conversation.config.temperature,
27
+ max_tokens: conversation.config.max_tokens,
28
  },
29
  { signal: abortController.signal, use_cache: false }
30
  )) {
 
50
  model: model.id,
51
  messages,
52
  temperature: conversation.config.temperature,
53
+ max_tokens: conversation.config.max_tokens,
54
  },
55
  { use_cache: false }
56
  );