Update README.md
Browse files
README.md
CHANGED
@@ -1,11 +1,5 @@
|
|
1 |
---
|
2 |
license: apache-2.0
|
3 |
-
metrics:
|
4 |
-
- precision
|
5 |
-
- recall
|
6 |
-
- f1
|
7 |
-
- accuracy
|
8 |
-
|
9 |
|
10 |
# BPMN element detection
|
11 |
|
@@ -60,10 +54,6 @@ It achieves the following results on the evaluation set with arrows:
|
|
60 |
|
61 |
| Class | Precision | Recall | F1 |
|
62 |
|:-----------------:|:---------:|:--------:|:-------:|
|
63 |
-
| background | 0 | 0 | 0 |
|
64 |
-
| sequenceFlow | 0.9292 | 0.9605 | 0.9446 |
|
65 |
-
| dataAssociation | 0.8472 | 0.8095 | 0.8279 |
|
66 |
-
| messageFlow | 0.8589 | 0.7910 | 0.8235 |
|
67 |
| task | 0.9518 | 0.9875 | 0.9693 |
|
68 |
| exclusiveGateway | 0.9548 | 0.9427 | 0.9487 |
|
69 |
| event | 0.9515 | 0.9235 | 0.9373 |
|
@@ -73,9 +63,11 @@ It achieves the following results on the evaluation set with arrows:
|
|
73 |
| lane | 0.9178 | 0.67 | 0.7746 |
|
74 |
| dataObject | 0.9333 | 0.9565 | 0.9448 |
|
75 |
| dataStore | 1.0 | 0.64 | 0.7805 |
|
76 |
-
| subProcess | 1.0 | 0.1429 | 0.25 |
|
77 |
| eventBasedGateway | 0.7273 | 0.7273 | 0.7273 |
|
78 |
| timerEvent | 0.8571 | 0.75 | 0.8 |
|
|
|
|
|
|
|
79 |
|
80 |
|
81 |
## Model description
|
@@ -103,57 +95,57 @@ The following hyperparameters were used during training:
|
|
103 |
- lr_scheduler_type: linear
|
104 |
- num_epochs: 50
|
105 |
|
106 |
-
### Training results
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
|
111 |
-
|
|
112 |
-
|
|
113 |
-
|
|
114 |
-
|
|
115 |
-
|
|
116 |
-
|
|
117 |
-
|
|
118 |
-
|
|
119 |
-
|
|
120 |
-
|
|
121 |
-
|
|
122 |
-
|
|
123 |
-
|
|
124 |
-
|
|
125 |
-
|
|
126 |
-
|
|
127 |
-
|
|
128 |
-
|
|
129 |
-
|
|
130 |
-
|
|
131 |
-
|
|
132 |
-
|
|
133 |
-
|
|
134 |
-
|
|
135 |
-
|
|
136 |
-
|
|
137 |
-
|
|
138 |
-
|
|
139 |
-
|
|
140 |
-
|
|
141 |
-
|
|
142 |
-
|
|
143 |
-
|
|
144 |
-
|
|
145 |
-
|
|
146 |
-
|
|
147 |
-
|
|
148 |
-
|
|
149 |
-
|
|
150 |
-
|
|
151 |
-
|
|
152 |
-
|
|
153 |
-
|
|
154 |
-
|
|
155 |
-
|
|
156 |
-
|
|
157 |
-
|
|
158 |
-
|
|
159 |
-
|
|
|
1 |
---
|
2 |
license: apache-2.0
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
# BPMN element detection
|
5 |
|
|
|
54 |
|
55 |
| Class | Precision | Recall | F1 |
|
56 |
|:-----------------:|:---------:|:--------:|:-------:|
|
|
|
|
|
|
|
|
|
57 |
| task | 0.9518 | 0.9875 | 0.9693 |
|
58 |
| exclusiveGateway | 0.9548 | 0.9427 | 0.9487 |
|
59 |
| event | 0.9515 | 0.9235 | 0.9373 |
|
|
|
63 |
| lane | 0.9178 | 0.67 | 0.7746 |
|
64 |
| dataObject | 0.9333 | 0.9565 | 0.9448 |
|
65 |
| dataStore | 1.0 | 0.64 | 0.7805 |
|
|
|
66 |
| eventBasedGateway | 0.7273 | 0.7273 | 0.7273 |
|
67 |
| timerEvent | 0.8571 | 0.75 | 0.8 |
|
68 |
+
| sequenceFlow | 0.9292 | 0.9605 | 0.9446 |
|
69 |
+
| dataAssociation | 0.8472 | 0.8095 | 0.8279 |
|
70 |
+
| messageFlow | 0.8589 | 0.7910 | 0.8235 |
|
71 |
|
72 |
|
73 |
## Model description
|
|
|
95 |
- lr_scheduler_type: linear
|
96 |
- num_epochs: 50
|
97 |
|
98 |
+
### Example of Training results
|
99 |
+
| Epoch | Avg Loss | Test Loss | Classifier Loss | Box Reg Loss | Objectness Loss | RPN Box Reg Loss | Precision | Recall | F1 Score |
|
100 |
+
|:-----:|:--------:|:---------:|:---------------:|:------------:|:---------------:|:----------------:|:---------:|:------:|:--------:|
|
101 |
+
| 1 | 3.9451 | 2.0591 | 2.4416 | 0.5426 | 0.6502 | 0.3107 | 0.2763 | 0.0393 | 0.0689 |
|
102 |
+
| 2 | 2.7259 | 1.5387 | 1.6724 | 0.6697 | 0.1868 | 0.1969 | 0.5754 | 0.3358 | 0.4241 |
|
103 |
+
| 3 | 2.2004 | 1.1307 | 1.3860 | 0.5330 | 0.1216 | 0.1598 | 0.8657 | 0.6841 | 0.7643 |
|
104 |
+
| 4 | 1.8611 | 1.0110 | 1.1775 | 0.4172 | 0.1099 | 0.1565 | 0.7708 | 0.7790 | 0.7749 |
|
105 |
+
| 5 | 1.7461 | 0.9593 | 1.1202 | 0.3820 | 0.0971 | 0.1468 | 0.8542 | 0.8046 | 0.8287 |
|
106 |
+
| 6 | 1.5859 | 0.8956 | 0.9986 | 0.3590 | 0.0872 | 0.1412 | 0.8884 | 0.8002 | 0.8420 |
|
107 |
+
| 7 | 1.5621 | 0.9073 | 1.0214 | 0.3351 | 0.0776 | 0.1280 | 0.9435 | 0.8034 | 0.8678 |
|
108 |
+
| 8 | 1.5194 | 0.8695 | 0.9881 | 0.3261 | 0.0738 | 0.1314 | 0.9048 | 0.8246 | 0.8628 |
|
109 |
+
| 9 | 1.5449 | 0.9014 | 1.0105 | 0.3229 | 0.0769 | 0.1346 | 0.9478 | 0.8046 | 0.8704 |
|
110 |
+
| 10 | 1.5805 | 0.8134 | 1.0333 | 0.3338 | 0.0703 | 0.1431 | 0.8920 | 0.8920 | 0.8920 |
|
111 |
+
| 11 | 1.3838 | 0.8097 | 0.8743 | 0.3065 | 0.0653 | 0.1376 | 0.9634 | 0.8371 | 0.8958 |
|
112 |
+
| 12 | 1.3582 | 0.7362 | 0.8751 | 0.2909 | 0.0617 | 0.1306 | 0.9457 | 0.8596 | 0.9006 |
|
113 |
+
| 13 | 1.3126 | 0.7149 | 0.8347 | 0.2921 | 0.0593 | 0.1264 | 0.9152 | 0.9295 | 0.9223 |
|
114 |
+
| 14 | 1.3532 | 0.7775 | 0.9079 | 0.2783 | 0.0543 | 0.1128 | 0.9639 | 0.8508 | 0.9038 |
|
115 |
+
| 15 | 1.3188 | 0.6738 | 0.8986 | 0.2720 | 0.0434 | 0.1048 | 0.8856 | 0.9419 | 0.9129 |
|
116 |
+
| 16 | 1.2512 | 0.7478 | 0.7840 | 0.2784 | 0.0621 | 0.1268 | 0.9181 | 0.9101 | 0.9141 |
|
117 |
+
| 17 | 1.2909 | 0.6556 | 0.8425 | 0.2778 | 0.0547 | 0.1159 | 0.9012 | 0.9282 | 0.9145 |
|
118 |
+
| 18 | 1.2526 | 0.7003 | 0.8442 | 0.2607 | 0.0443 | 0.1034 | 0.9169 | 0.9020 | 0.9094 |
|
119 |
+
| 19 | 1.1980 | 0.7136 | 0.8062 | 0.2528 | 0.0361 | 0.1029 | 0.9520 | 0.9157 | 0.9335 |
|
120 |
+
| 20 | 1.1821 | 0.6308 | 0.7895 | 0.2517 | 0.0378 | 0.1030 | 0.9023 | 0.9513 | 0.9262 |
|
121 |
+
| 21 | 1.0843 | 0.6883 | 0.7168 | 0.2402 | 0.0316 | 0.0957 | 0.9348 | 0.9032 | 0.9187 |
|
122 |
+
| 22 | 1.1058 | 0.6192 | 0.7367 | 0.2336 | 0.0374 | 0.0981 | 0.9321 | 0.9513 | 0.9416 |
|
123 |
+
| 23 | 1.0699 | 0.5962 | 0.7119 | 0.2340 | 0.0306 | 0.0935 | 0.9353 | 0.9476 | 0.9414 |
|
124 |
+
| 24 | 1.0616 | 0.6674 | 0.7031 | 0.2367 | 0.0311 | 0.0908 | 0.9418 | 0.9301 | 0.9359 |
|
125 |
+
| 25 | 1.0784 | 0.6158 | 0.7275 | 0.2311 | 0.0295 | 0.0904 | 0.9176 | 0.9320 | 0.9247 |
|
126 |
+
| 26 | 1.0618 | 0.6483 | 0.7121 | 0.2283 | 0.0297 | 0.0916 | 0.9411 | 0.9182 | 0.9295 |
|
127 |
+
| 27 | 1.0530 | 0.5958 | 0.7139 | 0.2236 | 0.0279 | 0.0876 | 0.9477 | 0.9395 | 0.9436 |
|
128 |
+
| 28 | 1.0452 | 0.5964 | 0.7097 | 0.2223 | 0.0283 | 0.0849 | 0.9465 | 0.9494 | 0.9480 |
|
129 |
+
| 29 | 1.0966 | 0.6288 | 0.7795 | 0.2176 | 0.0203 | 0.0792 | 0.9558 | 0.9320 | 0.9437 |
|
130 |
+
| 30 | 1.0506 | 0.5956 | 0.7312 | 0.2142 | 0.0195 | 0.0856 | 0.9370 | 0.9370 | 0.9370 |
|
131 |
+
| 31 | 1.0030 | 0.6099 | 0.6777 | 0.2163 | 0.0204 | 0.0886 | 0.9506 | 0.9251 | 0.9377 |
|
132 |
+
| 32 | 0.9748 | 0.5976 | 0.6610 | 0.2098 | 0.0201 | 0.0839 | 0.9527 | 0.9313 | 0.9419 |
|
133 |
+
| 33 | 0.9540 | 0.5907 | 0.6402 | 0.2059 | 0.0216 | 0.0863 | 0.9536 | 0.9238 | 0.9385 |
|
134 |
+
| 34 | 0.9730 | 0.5809 | 0.6500 | 0.2076 | 0.0281 | 0.0873 | 0.9407 | 0.9413 | 0.9410 |
|
135 |
+
| 35 | 0.9894 | 0.5837 | 0.6831 | 0.2066 | 0.0202 | 0.0794 | 0.9451 | 0.9345 | 0.9397 |
|
136 |
+
| 36 | 0.9042 | 0.5534 | 0.5873 | 0.2096 | 0.0214 | 0.0860 | 0.9460 | 0.9519 | 0.9490 |
|
137 |
+
| 37 | 0.9546 | 0.5562 | 0.6400 | 0.2112 | 0.0216 | 0.0818 | 0.9260 | 0.9457 | 0.9358 |
|
138 |
+
| 38 | 0.9806 | 0.5792 | 0.6800 | 0.2031 | 0.0175 | 0.0800 | 0.9476 | 0.9363 | 0.9419 |
|
139 |
+
| 39 | 0.9294 | 0.5703 | 0.6247 | 0.2016 | 0.0204 | 0.0826 | 0.9401 | 0.9501 | 0.9450 |
|
140 |
+
| 40 | 0.9786 | 0.5880 | 0.6733 | 0.2010 | 0.0268 | 0.0775 | 0.9375 | 0.9170 | 0.9271 |
|
141 |
+
| 41 | 1.0026 | 0.5875 | 0.7073 | 0.2033 | 0.0179 | 0.0742 | 0.9476 | 0.9251 | 0.9362 |
|
142 |
+
| 42 | 0.9567 | 0.5724 | 0.6677 | 0.1992 | 0.0164 | 0.0734 | 0.9468 | 0.9332 | 0.9400 |
|
143 |
+
| 43 | 0.8747 | 0.5709 | 0.5794 | 0.1980 | 0.0159 | 0.0814 | 0.9557 | 0.9432 | 0.9494 |
|
144 |
+
| 44 | 1.0310 | 0.5497 | 0.7392 | 0.1956 | 0.0254 | 0.0709 | 0.9589 | 0.9313 | 0.9449 |
|
145 |
+
| 45 | 0.9526 | 0.5580 | 0.6598 | 0.1982 | 0.0185 | 0.0762 | 0.9401 | 0.9413 | 0.9407 |
|
146 |
+
| 46 | 0.8753 | 0.5548 | 0.5940 | 0.1939 | 0.0176 | 0.0698 | 0.9468 | 0.9438 | 0.9453 |
|
147 |
+
| 47 | 0.9328 | 0.5735 | 0.6493 | 0.1953 | 0.0163 | 0.0720 | 0.9534 | 0.9320 | 0.9426 |
|
148 |
+
| 48 | 0.9019 | 0.5605 | 0.6071 | 0.2002 | 0.0182 | 0.0765 | 0.9496 | 0.9413 | 0.9455 |
|
149 |
+
| 49 | 0.8335 | 0.5637 | 0.5459 | 0.1918 | 0.0175 | 0.0783 | 0.9588 | 0.9307 | 0.9446 |
|
150 |
+
| 50 | 0.9043 | 0.5617 | 0.6179 | 0.1933 | 0.0154 | 0.0776 | 0.9597 | 0.9370 | 0.9482 |
|
151 |
+
|