davidberenstein1957 HF staff commited on
Commit
62ab45c
1 Parent(s): 78f9744

chore: update max new tokes

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -29,7 +29,8 @@ if not os.path.exists(file_path):
29
  llm = LlamaCppLLM(
30
  model_path=file_path,
31
  n_gpu_layers=-1,
32
- n_ctx=1024 * 128,
 
33
  )
34
  task = ArgillaLabeller(llm=llm)
35
  task.load()
@@ -126,6 +127,7 @@ result = client.predict(
126
  interface = gr.Interface(
127
  fn=process_records_gradio,
128
  inputs=[
 
129
  gr.Code(label="Records (JSON)", language="json", lines=5),
130
  gr.Code(label="Example Records (JSON, optional)", language="json", lines=5),
131
  gr.Code(label="Fields (JSON, optional)", language="json"),
 
29
  llm = LlamaCppLLM(
30
  model_path=file_path,
31
  n_gpu_layers=-1,
32
+ # n_ctx=1024 * 128,
33
+ generation_kwargs={"max_new_tokens": 1024 * 128},
34
  )
35
  task = ArgillaLabeller(llm=llm)
36
  task.load()
 
127
  interface = gr.Interface(
128
  fn=process_records_gradio,
129
  inputs=[
130
+ gr.Number(label="Number of Records"),
131
  gr.Code(label="Records (JSON)", language="json", lines=5),
132
  gr.Code(label="Example Records (JSON, optional)", language="json", lines=5),
133
  gr.Code(label="Fields (JSON, optional)", language="json"),