example doesn't work with typo
Browse files
README.md
CHANGED
@@ -108,14 +108,14 @@ from transformers import AutoTokenizer, pipeline, logging
|
|
108 |
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
|
109 |
|
110 |
model_name_or_path = "TheBloke/Wizard-Vicuna-7B-Uncensored-GPTQ"
|
111 |
-
model_basename = "
|
112 |
|
113 |
use_triton = False
|
114 |
|
115 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
116 |
|
117 |
model = AutoGPTQForCausalLM.from_quantized(model_name_or_path,
|
118 |
-
model_basename=model_basename
|
119 |
use_safetensors=True,
|
120 |
trust_remote_code=True,
|
121 |
device="cuda:0",
|
|
|
108 |
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
|
109 |
|
110 |
model_name_or_path = "TheBloke/Wizard-Vicuna-7B-Uncensored-GPTQ"
|
111 |
+
model_basename = "model"
|
112 |
|
113 |
use_triton = False
|
114 |
|
115 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
116 |
|
117 |
model = AutoGPTQForCausalLM.from_quantized(model_name_or_path,
|
118 |
+
model_basename=model_basename,
|
119 |
use_safetensors=True,
|
120 |
trust_remote_code=True,
|
121 |
device="cuda:0",
|