Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -21,7 +21,7 @@ pipe = pipeline(
|
|
21 |
)
|
22 |
|
23 |
# Original model setup
|
24 |
-
repo_id = "
|
25 |
model = ParlerTTSForConditionalGeneration.from_pretrained(repo_id).to(device)
|
26 |
text_tokenizer = AutoTokenizer.from_pretrained(repo_id)
|
27 |
description_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large")
|
@@ -213,7 +213,7 @@ with gr.Blocks(css=css) as block:
|
|
213 |
gr.HTML(
|
214 |
"""<p><a href="https://github.com/huggingface/parler-tts">Parler-TTS</a> is a training and inference library for
|
215 |
high-fidelity text-to-speech (TTS) models.</p>
|
216 |
-
<p>This multilingual model supports French, Spanish, Italian, Portuguese, Polish, German, Dutch, and English. It generates high-quality speech with features that can be controlled using a simple text prompt.</p>
|
217 |
<p>By default, Parler-TTS generates 🎲 random voice characteristics. To ensure 🎯 <b>speaker consistency</b> across generations, try to use consistent descriptions in your prompts.</p>"""
|
218 |
)
|
219 |
|
|
|
21 |
)
|
22 |
|
23 |
# Original model setup
|
24 |
+
repo_id = "parler-tts/parler-tts-mini-multilingual"
|
25 |
model = ParlerTTSForConditionalGeneration.from_pretrained(repo_id).to(device)
|
26 |
text_tokenizer = AutoTokenizer.from_pretrained(repo_id)
|
27 |
description_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large")
|
|
|
213 |
gr.HTML(
|
214 |
"""<p><a href="https://github.com/huggingface/parler-tts">Parler-TTS</a> is a training and inference library for
|
215 |
high-fidelity text-to-speech (TTS) models.</p>
|
216 |
+
<p>This <a href="https://huggingface.co/parler-tts/parler-tts-mini-multilingual">multilingual model</a> supports French, Spanish, Italian, Portuguese, Polish, German, Dutch, and English. It generates high-quality speech with features that can be controlled using a simple text prompt.</p>
|
217 |
<p>By default, Parler-TTS generates 🎲 random voice characteristics. To ensure 🎯 <b>speaker consistency</b> across generations, try to use consistent descriptions in your prompts.</p>"""
|
218 |
)
|
219 |
|