Spaces:
Sleeping
Sleeping
dvruette
commited on
Commit
·
cb15a9e
1
Parent(s):
015750a
add comment
Browse files
main.py
CHANGED
@@ -18,6 +18,7 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
18 |
# comment in/out the models you want to use
|
19 |
# RAM requirements: ~16GB x #models (+ ~4GB overhead)
|
20 |
# VRAM requirements: ~16GB
|
|
|
21 |
MODEL_CONFIGS = {
|
22 |
"Llama-2-7b-chat-hf": {
|
23 |
"identifier": "meta-llama/Llama-2-7b-chat-hf",
|
|
|
18 |
# comment in/out the models you want to use
|
19 |
# RAM requirements: ~16GB x #models (+ ~4GB overhead)
|
20 |
# VRAM requirements: ~16GB
|
21 |
+
# if using int8: ~8GB VRAM x #models, low RAM requirements
|
22 |
MODEL_CONFIGS = {
|
23 |
"Llama-2-7b-chat-hf": {
|
24 |
"identifier": "meta-llama/Llama-2-7b-chat-hf",
|