# Use a pipeline as a high-level helper | |
from transformers import pipeline | |
messages = [ | |
{"role": "user", "content": "Who are you?"}, | |
] | |
pipe = pipeline("text-generation", model="mistralai/Mixtral-8x7B-Instruct-v0.1") | |
pipe(messages) | |
# Load model directly | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1") | |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1") |