explorewithai
commited on
Commit
•
c89af47
1
Parent(s):
c7bf96d
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ device = 0 if torch.cuda.is_available() else -1
|
|
12 |
# Define the function to generate responses
|
13 |
@spaces.GPU(duration = 190)
|
14 |
def generate_response(user_input, history):
|
15 |
-
pipe = pipeline("text-generation", model="explorewithai/ChatFrame-Uncensored-Instruct-Small")
|
16 |
messages = [
|
17 |
{"role": "user", "content": user_input},
|
18 |
]
|
|
|
12 |
# Define the function to generate responses
|
13 |
@spaces.GPU(duration = 190)
|
14 |
def generate_response(user_input, history):
|
15 |
+
pipe = pipeline("text-generation", model="explorewithai/ChatFrame-Uncensored-Instruct-Small", device = device)
|
16 |
messages = [
|
17 |
{"role": "user", "content": user_input},
|
18 |
]
|