fsaudm commited on
Commit
9a6dece
1 Parent(s): 8e79ce9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -43,7 +43,7 @@ from transformers import pipeline
43
  messages = [
44
  {"role": "user", "content": "Who are you?"},
45
  ]
46
- pipe = pipeline("text-generation", model="fsaudm/Meta-Llama-3.1-8B-Instruct-BF4")
47
  pipe(messages)
48
  ```
49
 
@@ -54,8 +54,8 @@ Load model directly
54
  # Load model directly
55
  from transformers import AutoTokenizer, AutoModelForCausalLM
56
 
57
- tokenizer = AutoTokenizer.from_pretrained("fsaudm/Meta-Llama-3.1-8B-Instruct-BF4")
58
- model = AutoModelForCausalLM.from_pretrained("fsaudm/Meta-Llama-3.1-8B-Instruct-BF4")
59
  ```
60
 
61
  The base model information can be found in the original [meta-llama/Meta-Llama-3.1-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct)
 
43
  messages = [
44
  {"role": "user", "content": "Who are you?"},
45
  ]
46
+ pipe = pipeline("text-generation", model="fsaudm/Meta-Llama-3.1-8B-Instruct-NF4")
47
  pipe(messages)
48
  ```
49
 
 
54
  # Load model directly
55
  from transformers import AutoTokenizer, AutoModelForCausalLM
56
 
57
+ tokenizer = AutoTokenizer.from_pretrained("fsaudm/Meta-Llama-3.1-8B-Instruct-NF4")
58
+ model = AutoModelForCausalLM.from_pretrained("fsaudm/Meta-Llama-3.1-8B-Instruct-NF4")
59
  ```
60
 
61
  The base model information can be found in the original [meta-llama/Meta-Llama-3.1-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct)