william4416 commited on
Commit
98b4c9a
·
verified ·
1 Parent(s): f1adaab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -4,11 +4,11 @@ import torch
4
  import json
5
 
6
  title = "AI ChatBot"
7
- description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
8
  examples = [["How are you?"]]
9
 
10
- tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
11
- model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  model.to(device)
14
 
@@ -62,7 +62,7 @@ gr.Interface(
62
  title=title,
63
  description=description,
64
  examples=examples,
65
- inputs=["text", "state"], # Changed input from "state" to "text"
66
- outputs=["chatbot", "state"], # Changed output to match the tuple return type
67
  theme="finlaymacklon/boxy_violet"
68
- ).launch()
 
4
  import json
5
 
6
  title = "AI ChatBot"
7
+ description = "A State-of-the-Art Large-scale Pretrained Response generation model (GEMMA)"
8
  examples = [["How are you?"]]
9
 
10
+ tokenizer = AutoTokenizer.from_pretrained("your-gemma-model-name")
11
+ model = AutoModelForCausalLM.from_pretrained("your-gemma-model-name")
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  model.to(device)
14
 
 
62
  title=title,
63
  description=description,
64
  examples=examples,
65
+ inputs=["text"],
66
+ outputs=["text", "state"],
67
  theme="finlaymacklon/boxy_violet"
68
+ ).launch()