|
import transformers |
|
from transformers import AutoTokenizer, AutoModel |
|
tokenizer = AutoTokenizer.from_pretrained("PKU-Alignment/beaver-7b-v1.0-reward") |
|
model = AutoModel.from_pretrained("PKU-Alignment/beaver-7b-v1.0-reward") |
|
def generate_response(prompt): |
|
input_ids = tokenizer.encode(prompt, return_tensors="pt") |
|
output = model.generate(input_ids, max_length=64, num_return_sequences=1) |
|
response = tokenizer.decode(output[0], skip_special_tokens=True) |
|
return generate_response |
|
prompt = "What is the capital of France?" |
|
response = generate_response(prompt) |
|
print(response) |