Upload folder using huggingface_hub
Browse files- config.json +9 -0
- newsagency_ner.py +1 -1
config.json
CHANGED
@@ -5,6 +5,15 @@
|
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
7 |
"classifier_dropout": null,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
"gradient_checkpointing": false,
|
9 |
"hidden_act": "gelu",
|
10 |
"hidden_dropout_prob": 0.1,
|
|
|
5 |
],
|
6 |
"attention_probs_dropout_prob": 0.1,
|
7 |
"classifier_dropout": null,
|
8 |
+
"custom_pipelines": {
|
9 |
+
"newsagency-ner": {
|
10 |
+
"impl": "newsagency_ner.NewsAgencyModelPipeline",
|
11 |
+
"pt": [
|
12 |
+
"AutoModelForTokenClassification"
|
13 |
+
],
|
14 |
+
"tf": []
|
15 |
+
}
|
16 |
+
},
|
17 |
"gradient_checkpointing": false,
|
18 |
"hidden_act": "gelu",
|
19 |
"hidden_dropout_prob": 0.1,
|
newsagency_ner.py
CHANGED
@@ -210,7 +210,7 @@ class NewsAgencyModelPipeline(Pipeline):
|
|
210 |
:return:
|
211 |
"""
|
212 |
tokens_result, text_sentence, text = outputs
|
213 |
-
import pdb;pdb.set_trace()
|
214 |
# Get raw logits and convert to numpy array
|
215 |
logits = tokens_result["logits"].detach().cpu().numpy()
|
216 |
|
|
|
210 |
:return:
|
211 |
"""
|
212 |
tokens_result, text_sentence, text = outputs
|
213 |
+
# import pdb;pdb.set_trace()
|
214 |
# Get raw logits and convert to numpy array
|
215 |
logits = tokens_result["logits"].detach().cpu().numpy()
|
216 |
|