Lowin's picture
init
0888f68
|
raw
history blame
870 Bytes
metadata
language:
  - zh
license:
  - apache-2.0
import jieba_fast
from transformers import BertTokenizer
from transformers import BigBirdModel
class JiebaTokenizer(BertTokenizer):
    def __init__(
        self, pre_tokenizer=lambda x: jieba_fast.cut(x, HMM=False), *args, **kwargs
    ):
        super().__init__(*args, **kwargs)
        self.pre_tokenizer = pre_tokenizer
    def _tokenize(self, text, *arg, **kwargs):
        split_tokens = []
        for text in self.pre_tokenizer(text):
            if text in self.vocab:
                split_tokens.append(text)
            else:
                split_tokens.extend(super()._tokenize(text))
        return split_tokens
model = BigBirdModel.from_pretrained('Lowin/chinese-bigbird-base')
tokenizer = JiebaTokenizer.from_pretrained('Lowin/chinese-bigbird-base')

https://github.com/LowinLi/chinese-bigbird