coeuslearning commited on
Commit
6370d36
1 Parent(s): fb8b8c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -36
app.py CHANGED
@@ -21,7 +21,7 @@ LICENSE = """
21
  """
22
 
23
  if not torch.cuda.is_available():
24
- DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
25
 
26
 
27
  if torch.cuda.is_available():
@@ -79,41 +79,6 @@ chat_interface = gr.ChatInterface(
79
  fn=generate,
80
  additional_inputs=[
81
  gr.Textbox(label="System prompt", lines=6),
82
- gr.Slider(
83
- label="Max new tokens",
84
- minimum=1,
85
- maximum=MAX_MAX_NEW_TOKENS,
86
- step=1,
87
- value=DEFAULT_MAX_NEW_TOKENS,
88
- ),
89
- gr.Slider(
90
- label="Temperature",
91
- minimum=0.1,
92
- maximum=4.0,
93
- step=0.1,
94
- value=0.6,
95
- ),
96
- gr.Slider(
97
- label="Top-p (nucleus sampling)",
98
- minimum=0.05,
99
- maximum=1.0,
100
- step=0.05,
101
- value=0.9,
102
- ),
103
- gr.Slider(
104
- label="Top-k",
105
- minimum=1,
106
- maximum=1000,
107
- step=1,
108
- value=50,
109
- ),
110
- gr.Slider(
111
- label="Repetition penalty",
112
- minimum=1.0,
113
- maximum=2.0,
114
- step=0.05,
115
- value=1.2,
116
- ),
117
  ],
118
  stop_btn=None,
119
  examples=[
 
21
  """
22
 
23
  if not torch.cuda.is_available():
24
+ DESCRIPTION += "\n<p>Running on CPU. Please enable GPU</p>"
25
 
26
 
27
  if torch.cuda.is_available():
 
79
  fn=generate,
80
  additional_inputs=[
81
  gr.Textbox(label="System prompt", lines=6),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
  ],
83
  stop_btn=None,
84
  examples=[