download+save+use
from transformers import AutoModelForSequenceClassification, AutoTokenizer, TrainingArguments
Assuming you are using a pre-trained model from Hugging Face model hub
model_name = "unsloth/Qwen2.5-Coder-14B-bnb-4bit" # Replace with your desired model name
model = AutoModelForSequenceClassification.from_pretrained(model_name, num_labels=2) # Assuming 2 labels for classification
tokenizer = AutoTokenizer.from_pretrained(model_name)
... (Your training code here if you are training the model) ...
model.save_pretrained("lora_model") # Local saving
tokenizer.save_pretrained("lora_model")
model.push_to_hub("sdyy/tgreb", token = "ءءءءءءءءءءءءءءءءءءءءءءءءء") # Online saving
tokenizer.push_to_hub("sdyy/tgreb", token = "ءءءءءءءءءءءءءءءءءءءءء") # Online saving
from transformers import AutoModelForCausalLM, AutoTokenizer
تحميل النموذج المحفوظ
model = AutoModelForCausalLM.from_pretrained("lora_model")
tokenizer = AutoTokenizer.from_pretrained("lora_model")
استخدم النموذج للاستدلال ...
def generate_text(prompt, max_length=128):
"""
توليد نص باستخدام النموذج.
Args:
prompt: النص المدخل.
max_length: الحد الأقصى لطول النص الناتج.
Returns:
النص الناتج.
"""
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
generated_ids = model.generate(**inputs, max_length=max_length)
generated_text = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
return generated_text
مثال على الاستخدام
prompt = "ما هو برج ايفل؟"
generated_text = generate_text(prompt)
print(generated_text)
%%capture
!pip install unsloth "xformers==0.0.28.post2"
Also get the latest nightly Unsloth!
!pip uninstall unsloth -y && pip install --upgrade --no-cache-dir "unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git"
!pip install torch
!pip install git+https://github.com/huggingface/transformers.git
!pip install git+https://github.com/huggingface/accelerate.git
!pip install git+https://github.com/huggingface/bitsandbytes.git
!pip install huggingface-hub
!pip install -U bitsandbytes