Update functions.py
Browse files- functions.py +3 -3
functions.py
CHANGED
@@ -112,8 +112,9 @@ def load_models():
|
|
112 |
sum_pipe = pipeline("summarization",model="facebook/bart-large-cnn", tokenizer="facebook/bart-large-cnn",clean_up_tokenization_spaces=True)
|
113 |
ner_pipe = pipeline("ner", model=ner_model, tokenizer=ner_tokenizer, grouped_entities=True)
|
114 |
cross_encoder = CrossEncoder('cross-encoder/mmarco-mMiniLMv2-L12-H384-v1') #cross-encoder/ms-marco-MiniLM-L-12-v2
|
|
|
115 |
|
116 |
-
return sent_pipe, sum_pipe, ner_pipe, cross_encoder, kg_model, kg_tokenizer, emb_tokenizer
|
117 |
|
118 |
@st.experimental_singleton(suppress_st_warning=True)
|
119 |
def load_asr_model(asr_model_name):
|
@@ -752,8 +753,7 @@ def save_network_html(kb, filename="network.html"):
|
|
752 |
)
|
753 |
net.set_edge_smooth('dynamic')
|
754 |
net.show(filename)
|
755 |
-
|
756 |
|
757 |
nlp = get_spacy()
|
758 |
|
759 |
-
sent_pipe, sum_pipe, ner_pipe, cross_encoder, kg_model, kg_tokenizer, emb_tokenizer = load_models()
|
|
|
112 |
sum_pipe = pipeline("summarization",model="facebook/bart-large-cnn", tokenizer="facebook/bart-large-cnn",clean_up_tokenization_spaces=True)
|
113 |
ner_pipe = pipeline("ner", model=ner_model, tokenizer=ner_tokenizer, grouped_entities=True)
|
114 |
cross_encoder = CrossEncoder('cross-encoder/mmarco-mMiniLMv2-L12-H384-v1') #cross-encoder/ms-marco-MiniLM-L-12-v2
|
115 |
+
sbert = SentenceTransformer(all-MiniLM-L6-v2)
|
116 |
|
117 |
+
return sent_pipe, sum_pipe, ner_pipe, cross_encoder, kg_model, kg_tokenizer, emb_tokenizer, sbert
|
118 |
|
119 |
@st.experimental_singleton(suppress_st_warning=True)
|
120 |
def load_asr_model(asr_model_name):
|
|
|
753 |
)
|
754 |
net.set_edge_smooth('dynamic')
|
755 |
net.show(filename)
|
|
|
756 |
|
757 |
nlp = get_spacy()
|
758 |
|
759 |
+
sent_pipe, sum_pipe, ner_pipe, cross_encoder, kg_model, kg_tokenizer, emb_tokenizer, sbert = load_models()
|