Update app.py
Browse files
app.py
CHANGED
@@ -7,10 +7,7 @@ from transformers import (AutoModelForCausalLM, AutoTokenizer, LlamaForCausalLM,
|
|
7 |
|
8 |
app = FastAPI()
|
9 |
|
10 |
-
|
11 |
-
print("CUDA is available. GPU will be used.")
|
12 |
-
else:
|
13 |
-
print("CUDA is not available. CPU will be used.")
|
14 |
# Load the model and tokenizer
|
15 |
model_name_or_path = "TheBloke/Wizard-Vicuna-7B-Uncensored-GPT/"
|
16 |
# Dictionary to store conversation threads and their context
|
|
|
7 |
|
8 |
app = FastAPI()
|
9 |
|
10 |
+
|
|
|
|
|
|
|
11 |
# Load the model and tokenizer
|
12 |
model_name_or_path = "TheBloke/Wizard-Vicuna-7B-Uncensored-GPT/"
|
13 |
# Dictionary to store conversation threads and their context
|