numb3r3 commited on
Commit
2a41ea3
·
verified ·
1 Parent(s): 9cb0bd3

fix: tokenizer in prompt building

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -177,7 +177,7 @@ html = "<html><body><h1>Hello, world!</h1></body></html>"
177
 
178
  html = clean_html(html)
179
 
180
- input_prompt = create_prompt(html)
181
  inputs = tokenizer.encode(input_prompt, return_tensors="pt").to(device)
182
  outputs = model.generate(
183
  inputs, max_new_tokens=1024, temperature=0, do_sample=False, repetition_penalty=1.08
 
177
 
178
  html = clean_html(html)
179
 
180
+ input_prompt = create_prompt(html, tokenizer=tokenizer)
181
  inputs = tokenizer.encode(input_prompt, return_tensors="pt").to(device)
182
  outputs = model.generate(
183
  inputs, max_new_tokens=1024, temperature=0, do_sample=False, repetition_penalty=1.08