Spaces:
Sleeping
Sleeping
anyantudre
commited on
Commit
•
ebf5733
1
Parent(s):
a9c04c6
Update src/translation.py
Browse files- src/translation.py +1 -1
src/translation.py
CHANGED
@@ -94,7 +94,7 @@ def translate_facebook(s:str, src_iso:str, dest_iso:str)-> str:
|
|
94 |
|
95 |
# Inference
|
96 |
encoded = tokenizer(s, return_tensors="pt")
|
97 |
-
translated_tokens = model.generate(**encoded, forced_bos_token_id=tokenizer.convert_tokens_to_ids
|
98 |
translation = tokenizer.batch_decode(translated_tokens, skip_special_tokens=True)[0]
|
99 |
|
100 |
return translation
|
|
|
94 |
|
95 |
# Inference
|
96 |
encoded = tokenizer(s, return_tensors="pt")
|
97 |
+
translated_tokens = model.generate(**encoded, forced_bos_token_id=tokenizer.convert_tokens_to_ids(f"{dest_iso}_Latn"), max_length=120)
|
98 |
translation = tokenizer.batch_decode(translated_tokens, skip_special_tokens=True)[0]
|
99 |
|
100 |
return translation
|