Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -15,7 +15,10 @@ else:
|
|
15 |
|
16 |
# Load the model and tokenizer
|
17 |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3.1-70B")
|
18 |
-
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
|
|
|
19 |
|
20 |
# Create a text generation pipeline
|
21 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
|
|
15 |
|
16 |
# Load the model and tokenizer
|
17 |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3.1-70B")
|
18 |
+
model = AutoModelForCausalLM.from_pretrained(
|
19 |
+
"meta-llama/Meta-Llama-3.1-70B",
|
20 |
+
rope_scaling={'type': 'llama3', 'factor': 8.0} # Adjust the type and factor as needed
|
21 |
+
)
|
22 |
|
23 |
# Create a text generation pipeline
|
24 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|