0x7o commited on
Commit
0ad4d13
Β·
verified Β·
1 Parent(s): f5249eb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -9,12 +9,13 @@ if torch.cuda.is_available():
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
  @spaces.GPU
12
- def predict(text):
13
- return pipe(text)
14
 
15
  demo = gr.Interface(
16
  fn=predict,
17
- inputs=["text"],
 
18
  outputs=["text"],
19
  )
20
 
 
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
  @spaces.GPU
12
+ def predict(prompt, temperature, max_length):
13
+ return pipe(prompt, temperature=temperature, max_length=max_length)[0]["generated_text"]
14
 
15
  demo = gr.Interface(
16
  fn=predict,
17
+ title="mGPT-13B Demo",
18
+ inputs=["text", gr.Slider(minimum=0.01, maximum=1.0, value=0.7), gr.Slider(minimum=1, maximum=1024, value=50)],
19
  outputs=["text"],
20
  )
21