metadata
base_model: Qwen/Qwen2-7B
license: apache-2.0
datasets:
- macadeliccc/opus_samantha
- teknium/OpenHermes-2.5
- HuggingfaceH4/ultrachat_200k
- cognitivecomputations/samantha-data
- cognitivecomputaions/uncensored-ultrachat
from peft import AutoPeftModelForCausalLM
path_to_adapter="macadeliccc/Samantha-Qwen-2-7B-lora"
model = AutoPeftModelForCausalLM.from_pretrained(
# path to the output directory
path_to_adapter,
device_map="auto",
trust_remote_code=True
).eval()
vpm_resampler_embedtokens_weight = torch.load(f"{path_to_adapter}/vpm_resampler_embedtokens.pt")
msg = model.load_state_dict(vpm_resampler_embedtokens_weight, strict=False)