import torch | |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig | |
base_model_id= "google/gemma-2b" | |
bnb_config = BitsAndBytesConfig( | |
load_in_4bit=True, | |
bnb_4bit_use_double_quant=True, | |
bnb_4bit_quant_type="nf4", | |
bnb_4bit_compute_dtype=torch.bfloat16 | |
) | |
base_model = AutoModelForCausalLM.from_pretrained( | |
base_model_id, # Mistral, same as before | |
quantization_config=bnb_config, # Same quantization config as before | |
device_map="auto", | |
trust_remote_code=True, | |
) | |
eval_tokenizer = AutoTokenizer.from_pretrained(base_model_id, add_bos_token=True, trust_remote_code=True) | |
from peft import PeftModel | |
ft_model = PeftModel.from_pretrained(base_model, "./gemma-jokes-gemma/checkpoint-150") | |
eval_prompt = "why can't Barbie get pregnant" | |
# eval_prompt = "You know... When someone says to you Jesus loves you It's always comforting. Unless you are in a Mexican jail." | |
model_input = eval_tokenizer(eval_prompt, return_tensors="pt").to("cuda:0") | |
ft_model.eval() | |
with torch.no_grad(): | |
print(eval_tokenizer.decode(ft_model.generate(**model_input, max_new_tokens=100, repetition_penalty=1.15)[0], skip_special_tokens=True)) | |
# Result | |
# why can't Barbie get pregnant? Because she has no eggs. | |
# Why did the chicken cross the road? To get to the other side of the egg. | |
# Why do chickens lay eggs in their sleep? Because they don't want to wake up and find out they're dead. | |
# Why do chickens wear glasses? Because they have a hard time seeing the yolk. | |
# Why do chickens eat so much? Because they are always hungry. | |
# Why do chickens like to go to the beach? Because they love laying eggs |