davidberenstein1957 HF staff commited on
Commit
1853f75
1 Parent(s): a4aa9e7

fix: limit max new tokens

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -7,7 +7,7 @@ from distilabel.steps.tasks.argillalabeller import ArgillaLabeller
7
  llm = InferenceEndpointsLLM(
8
  model_id="meta-llama/Meta-Llama-3.1-8B-Instruct",
9
  tokenizer_id="meta-llama/Meta-Llama-3.1-8B-Instruct",
10
- generation_kwargs={"max_new_tokens": 1000 * 4},
11
  )
12
  task = ArgillaLabeller(llm=llm)
13
  task.load()
 
7
  llm = InferenceEndpointsLLM(
8
  model_id="meta-llama/Meta-Llama-3.1-8B-Instruct",
9
  tokenizer_id="meta-llama/Meta-Llama-3.1-8B-Instruct",
10
+ generation_kwargs={"max_new_tokens": 1000},
11
  )
12
  task = ArgillaLabeller(llm=llm)
13
  task.load()