MutazYoune commited on
Commit
7c4c06c
1 Parent(s): 582d0cd

Upload RobertaForTokenClassification

Browse files
Files changed (2) hide show
  1. config.json +35 -91
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "./fine_tuned_deid_roberta_i2b2",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
@@ -12,100 +12,44 @@
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 1024,
14
  "id2label": {
15
- "0": "B-AGE",
16
- "1": "B-DATE",
17
- "2": "B-EMAIL",
18
- "3": "B-HOSP",
19
- "4": "B-ID",
20
- "5": "B-LOC",
21
- "6": "B-OTHERPHI",
22
- "7": "B-PATIENT",
23
- "8": "B-PATORG",
24
- "9": "B-PHONE",
25
- "10": "B-STAFF",
26
- "11": "I-AGE",
27
- "12": "I-DATE",
28
- "13": "I-EMAIL",
29
- "14": "I-HOSP",
30
- "15": "I-ID",
31
- "16": "I-LOC",
32
- "17": "I-OTHERPHI",
33
- "18": "I-PATIENT",
34
- "19": "I-PATORG",
35
- "20": "I-PHONE",
36
- "21": "I-STAFF",
37
- "22": "L-AGE",
38
- "23": "L-DATE",
39
- "24": "L-EMAIL",
40
- "25": "L-HOSP",
41
- "26": "L-ID",
42
- "27": "L-LOC",
43
- "28": "L-OTHERPHI",
44
- "29": "L-PATIENT",
45
- "30": "L-PATORG",
46
- "31": "L-PHONE",
47
- "32": "L-STAFF",
48
- "33": "O",
49
- "34": "U-AGE",
50
- "35": "U-DATE",
51
- "36": "U-EMAIL",
52
- "37": "U-HOSP",
53
- "38": "U-ID",
54
- "39": "U-LOC",
55
- "40": "U-OTHERPHI",
56
- "41": "U-PATIENT",
57
- "42": "U-PATORG",
58
- "43": "U-PHONE",
59
- "44": "U-STAFF"
60
  },
61
  "initializer_range": 0.02,
62
  "intermediate_size": 4096,
63
  "label2id": {
64
- "B-AGE": 0,
65
- "B-DATE": 1,
66
- "B-EMAIL": 2,
67
- "B-HOSP": 3,
68
- "B-ID": 4,
69
- "B-LOC": 5,
70
- "B-OTHERPHI": 6,
71
- "B-PATIENT": 7,
72
- "B-PATORG": 8,
73
- "B-PHONE": 9,
74
- "B-STAFF": 10,
75
- "I-AGE": 11,
76
- "I-DATE": 12,
77
- "I-EMAIL": 13,
78
- "I-HOSP": 14,
79
- "I-ID": 15,
80
- "I-LOC": 16,
81
- "I-OTHERPHI": 17,
82
- "I-PATIENT": 18,
83
- "I-PATORG": 19,
84
- "I-PHONE": 20,
85
- "I-STAFF": 21,
86
- "L-AGE": 22,
87
- "L-DATE": 23,
88
- "L-EMAIL": 24,
89
- "L-HOSP": 25,
90
- "L-ID": 26,
91
- "L-LOC": 27,
92
- "L-OTHERPHI": 28,
93
- "L-PATIENT": 29,
94
- "L-PATORG": 30,
95
- "L-PHONE": 31,
96
- "L-STAFF": 32,
97
- "O": 33,
98
- "U-AGE": 34,
99
- "U-DATE": 35,
100
- "U-EMAIL": 36,
101
- "U-HOSP": 37,
102
- "U-ID": 38,
103
- "U-LOC": 39,
104
- "U-OTHERPHI": 40,
105
- "U-PATIENT": 41,
106
- "U-PATORG": 42,
107
- "U-PHONE": 43,
108
- "U-STAFF": 44
109
  },
110
  "layer_norm_eps": 1e-05,
111
  "max_position_embeddings": 514,
 
1
  {
2
+ "_name_or_path": "./trained_clinicalbert_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 1024,
14
  "id2label": {
15
+ "0": "O",
16
+ "1": "B-DOCTOR_NAME",
17
+ "2": "B-HOSPITAL_NAME",
18
+ "3": "B-PATIENT_NAME",
19
+ "4": "B-PATIENT_ID",
20
+ "5": "B-OTHER_NAME",
21
+ "6": "B-PHONE_NUMBER",
22
+ "7": "B-BIRTHDAY",
23
+ "8": "B-LOCATION",
24
+ "9": "I-DOCTOR_NAME",
25
+ "10": "I-HOSPITAL_NAME",
26
+ "11": "I-PATIENT_NAME",
27
+ "12": "I-PATIENT_ID",
28
+ "13": "I-OTHER_NAME",
29
+ "14": "I-PHONE_NUMBER",
30
+ "15": "I-BIRTHDAY",
31
+ "16": "I-LOCATION"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  },
33
  "initializer_range": 0.02,
34
  "intermediate_size": 4096,
35
  "label2id": {
36
+ "B-BIRTHDAY": 7,
37
+ "B-DOCTOR_NAME": 1,
38
+ "B-HOSPITAL_NAME": 2,
39
+ "B-LOCATION": 8,
40
+ "B-OTHER_NAME": 5,
41
+ "B-PATIENT_ID": 4,
42
+ "B-PATIENT_NAME": 3,
43
+ "B-PHONE_NUMBER": 6,
44
+ "I-BIRTHDAY": 15,
45
+ "I-DOCTOR_NAME": 9,
46
+ "I-HOSPITAL_NAME": 10,
47
+ "I-LOCATION": 16,
48
+ "I-OTHER_NAME": 13,
49
+ "I-PATIENT_ID": 12,
50
+ "I-PATIENT_NAME": 11,
51
+ "I-PHONE_NUMBER": 14,
52
+ "O": 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  },
54
  "layer_norm_eps": 1e-05,
55
  "max_position_embeddings": 514,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0fe31b811744cd3cf69d7a5a4daef4143cbe57b37ae4b2d76bab1c055c9e079f
3
- size 1417473100
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:387053a2c52475b2a9bcc0648ac2fec6e25a3e8cdd5cd1924d249cce802bce34
3
+ size 1417358292