from functools import lru_cache from transformers import BertTokenizer, BertConfig, BertForSequenceClassification @lru_cache() def initialize_model(): tokenizer = BertTokenizer.from_pretrained('indobenchmark/indobert-base-p1') config = BertConfig.from_pretrained('daphinokio/indobert-smsa-fine-tuned') model = BertForSequenceClassification.from_pretrained('daphinokio/indobert-smsa-fine-tuned', config=config) return model, tokenizer model, tokenizer = initialize_model()