File size: 1,471 Bytes
3f0a242
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# -*- coding: utf-8 -*-
"""TurjumanDemo

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1VVJ7uPEYD8Q1pR-IINWWAQVpqyP1XnzD
"""

# Installing dependencies
!pip install gradio 
!pip install turjuman transformers
!git clone https://huggingface.co/spaces/ahmedoumar/TurjumanDemo

# Import our modules 
import gradio as gr
from turjuman import turjuman 
import logging
import os
from transformers import AutoTokenizer

logging.basicConfig(
    format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
    datefmt="%Y-%m-%d %H:%M:%S",
    level=os.environ.get("LOGLEVEL", "INFO").upper(),
)
logger = logging.getLogger("turjuman.translate")
cache_dir="/content/mycache"

# Get the turjuman object and its tokenizer
turj = turjuman.turjuman(logger, cache_dir)
tokenizer = AutoTokenizer.from_pretrained('UBC-NLP/AraT5-base-title-generation')

# The translate function
def translate(sent):
    beam_options = {"search_method":"beam", "seq_length": 300, "num_beams":5, "no_repeat_ngram_size":2, "max_outputs":1}
    targets = turj.translate(sent,**beam_options)
    #print(targets)
    ans = ""
    for target in targets:
        target = tokenizer.decode(target, skip_special_tokens=True, clean_up_tokenization_spaces=True)
        ans += target
    return ans

print(translate('Здравствуй, друг'))

gr.Interface(fn=translate, inputs=['text'], outputs=['text']).launch(width=1000, height=1000)