rickysambora55 commited on
Commit
0f345e3
·
1 Parent(s): 8149867

model update

Browse files
runs/detect/train-70/args.yaml ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ task: detect
2
+ mode: train
3
+ model: /content/runs/detect/train/weights/last.pt
4
+ data: /content/datasets/Crowd-Detection-7//data.yaml
5
+ epochs: 100
6
+ patience: 50
7
+ batch: 32
8
+ imgsz: 640
9
+ save: true
10
+ save_period: -1
11
+ cache: true
12
+ device: null
13
+ workers: 16
14
+ project: null
15
+ name: null
16
+ exist_ok: false
17
+ pretrained: true
18
+ optimizer: auto
19
+ verbose: true
20
+ seed: 0
21
+ deterministic: true
22
+ single_cls: false
23
+ rect: false
24
+ cos_lr: true
25
+ close_mosaic: 10
26
+ resume: false
27
+ amp: true
28
+ fraction: 1.0
29
+ profile: false
30
+ freeze: 10
31
+ overlap_mask: true
32
+ mask_ratio: 4
33
+ dropout: 0.0
34
+ val: true
35
+ split: val
36
+ save_json: false
37
+ save_hybrid: false
38
+ conf: null
39
+ iou: 0.7
40
+ max_det: 300
41
+ half: false
42
+ dnn: false
43
+ plots: true
44
+ source: null
45
+ show: false
46
+ save_txt: false
47
+ save_conf: false
48
+ save_crop: false
49
+ show_labels: true
50
+ show_conf: true
51
+ vid_stride: 1
52
+ stream_buffer: false
53
+ line_width: null
54
+ visualize: false
55
+ augment: false
56
+ agnostic_nms: false
57
+ classes: null
58
+ retina_masks: false
59
+ boxes: true
60
+ format: torchscript
61
+ keras: false
62
+ optimize: false
63
+ int8: false
64
+ dynamic: false
65
+ simplify: false
66
+ opset: null
67
+ workspace: 4
68
+ nms: false
69
+ lr0: 0.01
70
+ lrf: 0.01
71
+ momentum: 0.937
72
+ weight_decay: 0.0005
73
+ warmup_epochs: 3.0
74
+ warmup_momentum: 0.8
75
+ warmup_bias_lr: 0.0
76
+ box: 7.5
77
+ cls: 0.5
78
+ dfl: 1.5
79
+ pose: 12.0
80
+ kobj: 1.0
81
+ label_smoothing: 0.0
82
+ nbs: 64
83
+ hsv_h: 0.015
84
+ hsv_s: 0.7
85
+ hsv_v: 0.4
86
+ degrees: 0.0
87
+ translate: 0.1
88
+ scale: 0.5
89
+ shear: 0.0
90
+ perspective: 0.0
91
+ flipud: 0.0
92
+ fliplr: 0.5
93
+ mosaic: 1.0
94
+ mixup: 0.0
95
+ copy_paste: 0.0
96
+ cfg: null
97
+ tracker: botsort.yaml
98
+ save_dir: runs/detect/train
runs/detect/train-70/events.out.tfevents.1716859362.19783cab3b86.1944.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d54fe02bacfdb728dbb3fa9e70a7fe99c19febcb6cd8bac19028e4820f4a8491
3
+ size 2298351
runs/detect/train-70/events.out.tfevents.1716870390.2b2fadae5779.2867.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f978e4baf77b21a4142f28bb2ce7b390b6d71a5d082b06eb137f6713df3086b1
3
+ size 2183723
runs/detect/train-70/labels.jpg ADDED
runs/detect/train-70/labels_correlogram.jpg ADDED
runs/detect/train-70/results.csv ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
2
+ 1, 1.4335, 1.5766, 1.1566, 0.85784, 0.75756, 0.83502, 0.49836, 1.3464, 0.98022, 1.1587, 0.0033238, 0.0033238, 0.0033238
3
+ 2, 1.352, 0.94795, 1.0645, 0.90854, 0.79751, 0.87897, 0.54487, 1.2742, 0.7818, 1.0735, 0.0066555, 0.0066555, 0.0066555
4
+ 3, 1.3552, 0.86902, 1.0577, 0.8996, 0.79936, 0.8749, 0.54398, 1.2795, 0.79179, 1.0775, 0.0099807, 0.0099807, 0.0099807
5
+ 4, 1.341, 0.81318, 1.0537, 0.91585, 0.80929, 0.89009, 0.55762, 1.2542, 0.7617, 1.0681, 0.009978, 0.009978, 0.009978
6
+ 5, 1.3011, 0.76871, 1.038, 0.90308, 0.82636, 0.88905, 0.56811, 1.2331, 0.74178, 1.062, 0.009978, 0.009978, 0.009978
7
+ 6, 1.2874, 0.74276, 1.0295, 0.92116, 0.81945, 0.89395, 0.57055, 1.232, 0.6811, 1.0559, 0.009961, 0.009961, 0.009961
8
+ 7, 1.2718, 0.71703, 1.0227, 0.9228, 0.83076, 0.90131, 0.58191, 1.2123, 0.64966, 1.0505, 0.0099391, 0.0099391, 0.0099391
9
+ 8, 1.2546, 0.6995, 1.0177, 0.93101, 0.83183, 0.90625, 0.58492, 1.2074, 0.63196, 1.0463, 0.0099123, 0.0099123, 0.0099123
10
+ 9, 1.2511, 0.68853, 1.0134, 0.92611, 0.83444, 0.90522, 0.58513, 1.1969, 0.63898, 1.0437, 0.0098808, 0.0098808, 0.0098808
11
+ 10, 1.2405, 0.68043, 1.0102, 0.92886, 0.83678, 0.90829, 0.59223, 1.1872, 0.61315, 1.031, 0.0098445, 0.0098445, 0.0098445
12
+ 11, 1.2332, 0.68096, 1.007, 0.93163, 0.84244, 0.91086, 0.59711, 1.1779, 0.60133, 1.036, 0.0098035, 0.0098035, 0.0098035
13
+ 12, 1.2222, 0.66248, 1.0042, 0.93834, 0.84085, 0.91504, 0.60376, 1.1766, 0.59292, 1.0312, 0.0097577, 0.0097577, 0.0097577
14
+ 13, 1.2215, 0.65821, 1.001, 0.92985, 0.84721, 0.91345, 0.604, 1.1683, 0.59794, 1.0248, 0.0097074, 0.0097074, 0.0097074
15
+ 14, 1.2176, 0.65427, 0.99964, 0.9321, 0.84801, 0.91408, 0.60588, 1.1647, 0.58937, 1.0265, 0.0096524, 0.0096524, 0.0096524
16
+ 15, 1.2151, 0.64883, 0.99873, 0.9318, 0.8517, 0.91688, 0.60565, 1.1629, 0.58423, 1.0249, 0.0095929, 0.0095929, 0.0095929
17
+ 16, 1.2103, 0.64316, 0.99902, 0.93073, 0.84943, 0.91722, 0.60672, 1.1664, 0.58206, 1.0271, 0.0095289, 0.0095289, 0.0095289
18
+ 17, 1.2062, 0.63888, 0.99602, 0.92929, 0.85518, 0.91741, 0.60617, 1.1702, 0.5773, 1.0251, 0.0094605, 0.0094605, 0.0094605
19
+ 18, 1.1999, 0.64226, 0.99198, 0.93651, 0.85035, 0.91817, 0.60921, 1.1609, 0.57825, 1.0226, 0.0093877, 0.0093877, 0.0093877
20
+ 19, 1.1925, 0.62746, 0.98806, 0.93778, 0.84562, 0.91744, 0.60841, 1.1594, 0.57257, 1.02, 0.0093107, 0.0093107, 0.0093107
21
+ 20, 1.1965, 0.62692, 0.98806, 0.9356, 0.85597, 0.92008, 0.61337, 1.154, 0.563, 1.0164, 0.0092294, 0.0092294, 0.0092294
22
+ 21, 1.1868, 0.62074, 0.9875, 0.93494, 0.85759, 0.92054, 0.61165, 1.1562, 0.56269, 1.0179, 0.009144, 0.009144, 0.009144
23
+ 22, 1.1818, 0.6189, 0.98639, 0.93504, 0.85685, 0.92101, 0.6129, 1.1508, 0.5605, 1.0164, 0.0090546, 0.0090546, 0.0090546
24
+ 23, 1.1904, 0.62386, 0.98785, 0.9407, 0.84997, 0.91875, 0.61211, 1.1541, 0.55777, 1.0165, 0.0089613, 0.0089613, 0.0089613
25
+ 24, 1.1782, 0.61503, 0.98395, 0.93677, 0.85674, 0.92294, 0.61878, 1.1415, 0.55298, 1.0066, 0.008864, 0.008864, 0.008864
26
+ 25, 1.1771, 0.63213, 0.98145, 0.94044, 0.85491, 0.92355, 0.6188, 1.1445, 0.54717, 1.008, 0.008763, 0.008763, 0.008763
27
+ 26, 1.172, 0.60827, 0.97814, 0.9394, 0.85931, 0.92427, 0.61735, 1.1395, 0.54684, 1.0091, 0.0086584, 0.0086584, 0.0086584
28
+ 27, 1.1746, 0.61041, 0.98049, 0.94758, 0.85514, 0.92486, 0.62072, 1.1411, 0.54249, 1.0094, 0.0085502, 0.0085502, 0.0085502
29
+ 28, 1.1675, 0.60553, 0.97908, 0.94497, 0.85358, 0.92414, 0.62053, 1.1411, 0.54318, 1.0118, 0.0084385, 0.0084385, 0.0084385
30
+ 29, 1.1661, 0.60246, 0.97773, 0.9438, 0.85632, 0.92369, 0.61954, 1.1422, 0.54275, 1.0109, 0.0083235, 0.0083235, 0.0083235
31
+ 30, 1.1609, 0.60121, 0.97517, 0.94236, 0.86286, 0.92576, 0.62281, 1.1409, 0.53693, 1.0065, 0.0082052, 0.0082052, 0.0082052
32
+ 31, 1.1601, 0.59629, 0.97636, 0.94574, 0.85535, 0.92544, 0.62537, 1.1353, 0.53387, 1.0062, 0.0080839, 0.0080839, 0.0080839
33
+ 32, 1.1627, 0.59603, 0.97512, 0.94365, 0.85862, 0.92518, 0.62409, 1.1331, 0.53255, 1.0048, 0.0079595, 0.0079595, 0.0079595
34
+ 33, 1.1565, 0.59495, 0.97627, 0.94131, 0.86355, 0.92729, 0.62469, 1.134, 0.52987, 1.0069, 0.0078323, 0.0078323, 0.0078323
35
+ 34, 1.1611, 0.59372, 0.97357, 0.95521, 0.85223, 0.92556, 0.62388, 1.1331, 0.52801, 1.0046, 0.0077023, 0.0077023, 0.0077023
36
+ 35, 1.1538, 0.58989, 0.97267, 0.94214, 0.86393, 0.92721, 0.62549, 1.1302, 0.52361, 1.0032, 0.0075698, 0.0075698, 0.0075698
37
+ 36, 1.1525, 0.58747, 0.97047, 0.9457, 0.86075, 0.92779, 0.62681, 1.1313, 0.52577, 1.0032, 0.0074347, 0.0074347, 0.0074347
38
+ 37, 1.1516, 0.58625, 0.97222, 0.94259, 0.86522, 0.92771, 0.62578, 1.1287, 0.52203, 1.0029, 0.0072973, 0.0072973, 0.0072973
39
+ 38, 1.1424, 0.58142, 0.969, 0.95049, 0.85881, 0.92797, 0.62649, 1.1324, 0.52281, 1.005, 0.0071576, 0.0071576, 0.0071576
40
+ 39, 1.1501, 0.58541, 0.97249, 0.94517, 0.86419, 0.92847, 0.62655, 1.13, 0.5188, 1.0019, 0.0070159, 0.0070159, 0.0070159
41
+ 40, 1.1404, 0.57918, 0.96848, 0.94571, 0.86207, 0.92806, 0.62707, 1.1298, 0.51892, 1.003, 0.0068722, 0.0068722, 0.0068722
42
+ 41, 1.1316, 0.57738, 0.96498, 0.94812, 0.8628, 0.92955, 0.62867, 1.1276, 0.51714, 1.001, 0.0067268, 0.0067268, 0.0067268
43
+ 42, 1.1347, 0.58061, 0.96364, 0.94342, 0.86808, 0.92879, 0.6291, 1.1279, 0.51426, 0.99912, 0.0065796, 0.0065796, 0.0065796
44
+ 43, 1.1332, 0.57473, 0.96393, 0.95069, 0.86437, 0.92924, 0.62904, 1.1269, 0.51563, 0.99778, 0.006431, 0.006431, 0.006431
45
+ 44, 1.126, 0.56951, 0.96115, 0.94791, 0.86876, 0.92883, 0.62959, 1.1269, 0.51281, 0.99749, 0.006281, 0.006281, 0.006281
46
+ 45, 1.1286, 0.56862, 0.95925, 0.94568, 0.8682, 0.92923, 0.62836, 1.1263, 0.51285, 0.99688, 0.0061298, 0.0061298, 0.0061298
47
+ 46, 1.1278, 0.56771, 0.9595, 0.94716, 0.86849, 0.92953, 0.63075, 1.1213, 0.51083, 0.99413, 0.0059775, 0.0059775, 0.0059775
48
+ 47, 1.1244, 0.56841, 0.95758, 0.94744, 0.86537, 0.92989, 0.62939, 1.1232, 0.51149, 0.9941, 0.0058244, 0.0058244, 0.0058244
49
+ 48, 1.12, 0.56341, 0.95829, 0.9484, 0.86613, 0.92988, 0.63187, 1.1213, 0.50879, 0.99483, 0.0056704, 0.0056704, 0.0056704
50
+ 49, 1.1183, 0.56216, 0.95611, 0.94862, 0.86737, 0.93031, 0.63203, 1.1187, 0.50866, 0.99419, 0.0055158, 0.0055158, 0.0055158
51
+ 50, 1.1159, 0.5638, 0.95683, 0.94578, 0.86836, 0.93055, 0.63174, 1.1199, 0.50638, 0.99464, 0.0053608, 0.0053608, 0.0053608
52
+ 51, 1.1176, 0.56107, 0.9554, 0.9515, 0.86563, 0.92995, 0.63178, 1.1198, 0.50415, 0.99385, 0.0052055, 0.0052055, 0.0052055
53
+ 52, 1.1151, 0.55869, 0.95673, 0.94931, 0.86434, 0.93012, 0.63132, 1.1197, 0.50428, 0.99344, 0.00505, 0.00505, 0.00505
54
+ 53, 1.1161, 0.55847, 0.95628, 0.94674, 0.86941, 0.93092, 0.63356, 1.1195, 0.50349, 0.99489, 0.0048945, 0.0048945, 0.0048945
55
+ 54, 1.109, 0.56331, 0.95079, 0.94735, 0.86871, 0.93195, 0.63289, 1.1179, 0.50324, 0.99326, 0.0047392, 0.0047392, 0.0047392
56
+ 55, 1.1039, 0.55042, 0.94942, 0.94747, 0.86905, 0.93146, 0.63282, 1.1199, 0.50229, 0.99443, 0.0045842, 0.0045842, 0.0045842
57
+ 56, 1.1085, 0.55164, 0.95149, 0.94925, 0.86622, 0.93075, 0.63238, 1.1198, 0.50251, 0.99438, 0.0044296, 0.0044296, 0.0044296
58
+ 57, 1.1023, 0.54556, 0.95052, 0.95218, 0.86569, 0.93174, 0.63291, 1.1199, 0.50172, 0.99409, 0.0042756, 0.0042756, 0.0042756
59
+ 58, 1.0959, 0.5471, 0.95114, 0.95041, 0.86614, 0.93159, 0.63276, 1.1192, 0.5004, 0.9943, 0.0041225, 0.0041225, 0.0041225
60
+ 59, 1.106, 0.55099, 0.95371, 0.94786, 0.86888, 0.93141, 0.63354, 1.1198, 0.50021, 0.99515, 0.0039702, 0.0039702, 0.0039702
61
+ 60, 1.0961, 0.54304, 0.94927, 0.94843, 0.86825, 0.9313, 0.63414, 1.1191, 0.49955, 0.9938, 0.003819, 0.003819, 0.003819
62
+ 61, 1.0964, 0.56394, 0.9476, 0.94978, 0.8695, 0.93173, 0.63285, 1.1189, 0.49927, 0.99254, 0.003669, 0.003669, 0.003669
63
+ 62, 1.0895, 0.53814, 0.94549, 0.94806, 0.86988, 0.93111, 0.63351, 1.12, 0.49882, 0.99287, 0.0035204, 0.0035204, 0.0035204
64
+ 63, 1.0928, 0.54269, 0.94735, 0.94551, 0.87147, 0.93098, 0.6332, 1.1204, 0.49862, 0.99284, 0.0033732, 0.0033732, 0.0033732
65
+ 64, 1.0857, 0.53606, 0.94614, 0.94794, 0.871, 0.93069, 0.63324, 1.1198, 0.49849, 0.99273, 0.0032278, 0.0032278, 0.0032278
66
+ 65, 1.0878, 0.53651, 0.94499, 0.94954, 0.87017, 0.93131, 0.63441, 1.12, 0.49777, 0.993, 0.0030841, 0.0030841, 0.0030841
67
+ 66, 1.0822, 0.53586, 0.9428, 0.94972, 0.87109, 0.93175, 0.63451, 1.1206, 0.49838, 0.9932, 0.0029424, 0.0029424, 0.0029424
68
+ 67, 1.0803, 0.53196, 0.94372, 0.95092, 0.87023, 0.93203, 0.63397, 1.1206, 0.49724, 0.99272, 0.0028027, 0.0028027, 0.0028027
69
+ 68, 1.0839, 0.53463, 0.94223, 0.9495, 0.87117, 0.93201, 0.63354, 1.1202, 0.49644, 0.99241, 0.0026653, 0.0026653, 0.0026653
70
+ 69, 1.0792, 0.53133, 0.9439, 0.95051, 0.87047, 0.93231, 0.63352, 1.1208, 0.49641, 0.99251, 0.0025302, 0.0025302, 0.0025302
71
+ 70, 1.0803, 0.52985, 0.94157, 0.95178, 0.86941, 0.93243, 0.63375, 1.1201, 0.49591, 0.99226, 0.0023977, 0.0023977, 0.0023977
runs/detect/train-70/train_batch0.jpg ADDED
runs/detect/train-70/train_batch1.jpg ADDED
runs/detect/train-70/train_batch2.jpg ADDED
runs/detect/train-70/weights/best.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69a96785abe1cb3d61258d44fe2d6a9e5d0705b9d76ff2bbd2a469cfaa9b0ae4
3
+ size 19420046
runs/detect/train-70/weights/last.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92fa410cfda0b089c07714145283e1547bcb8fe015c66247823d59cc4657104d
3
+ size 19420558
runs/detect/train/args.yaml CHANGED
@@ -1,6 +1,6 @@
1
  task: detect
2
  mode: train
3
- model: yolov8n.pt
4
  data: /content/datasets/Crowd-Detection-7//data.yaml
5
  epochs: 100
6
  patience: 50
@@ -72,7 +72,7 @@ momentum: 0.937
72
  weight_decay: 0.0005
73
  warmup_epochs: 3.0
74
  warmup_momentum: 0.8
75
- warmup_bias_lr: 0.1
76
  box: 7.5
77
  cls: 0.5
78
  dfl: 1.5
 
1
  task: detect
2
  mode: train
3
+ model: /content/runs/detect/train/weights/last.pt
4
  data: /content/datasets/Crowd-Detection-7//data.yaml
5
  epochs: 100
6
  patience: 50
 
72
  weight_decay: 0.0005
73
  warmup_epochs: 3.0
74
  warmup_momentum: 0.8
75
+ warmup_bias_lr: 0.0
76
  box: 7.5
77
  cls: 0.5
78
  dfl: 1.5
runs/detect/train/events.out.tfevents.1716870390.2b2fadae5779.2867.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f978e4baf77b21a4142f28bb2ce7b390b6d71a5d082b06eb137f6713df3086b1
3
+ size 2183723
runs/detect/train/results.csv CHANGED
@@ -35,3 +35,37 @@
35
  34, 1.1611, 0.59372, 0.97357, 0.95521, 0.85223, 0.92556, 0.62388, 1.1331, 0.52801, 1.0046, 0.0077023, 0.0077023, 0.0077023
36
  35, 1.1538, 0.58989, 0.97267, 0.94214, 0.86393, 0.92721, 0.62549, 1.1302, 0.52361, 1.0032, 0.0075698, 0.0075698, 0.0075698
37
  36, 1.1525, 0.58747, 0.97047, 0.9457, 0.86075, 0.92779, 0.62681, 1.1313, 0.52577, 1.0032, 0.0074347, 0.0074347, 0.0074347
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  34, 1.1611, 0.59372, 0.97357, 0.95521, 0.85223, 0.92556, 0.62388, 1.1331, 0.52801, 1.0046, 0.0077023, 0.0077023, 0.0077023
36
  35, 1.1538, 0.58989, 0.97267, 0.94214, 0.86393, 0.92721, 0.62549, 1.1302, 0.52361, 1.0032, 0.0075698, 0.0075698, 0.0075698
37
  36, 1.1525, 0.58747, 0.97047, 0.9457, 0.86075, 0.92779, 0.62681, 1.1313, 0.52577, 1.0032, 0.0074347, 0.0074347, 0.0074347
38
+ 37, 1.1516, 0.58625, 0.97222, 0.94259, 0.86522, 0.92771, 0.62578, 1.1287, 0.52203, 1.0029, 0.0072973, 0.0072973, 0.0072973
39
+ 38, 1.1424, 0.58142, 0.969, 0.95049, 0.85881, 0.92797, 0.62649, 1.1324, 0.52281, 1.005, 0.0071576, 0.0071576, 0.0071576
40
+ 39, 1.1501, 0.58541, 0.97249, 0.94517, 0.86419, 0.92847, 0.62655, 1.13, 0.5188, 1.0019, 0.0070159, 0.0070159, 0.0070159
41
+ 40, 1.1404, 0.57918, 0.96848, 0.94571, 0.86207, 0.92806, 0.62707, 1.1298, 0.51892, 1.003, 0.0068722, 0.0068722, 0.0068722
42
+ 41, 1.1316, 0.57738, 0.96498, 0.94812, 0.8628, 0.92955, 0.62867, 1.1276, 0.51714, 1.001, 0.0067268, 0.0067268, 0.0067268
43
+ 42, 1.1347, 0.58061, 0.96364, 0.94342, 0.86808, 0.92879, 0.6291, 1.1279, 0.51426, 0.99912, 0.0065796, 0.0065796, 0.0065796
44
+ 43, 1.1332, 0.57473, 0.96393, 0.95069, 0.86437, 0.92924, 0.62904, 1.1269, 0.51563, 0.99778, 0.006431, 0.006431, 0.006431
45
+ 44, 1.126, 0.56951, 0.96115, 0.94791, 0.86876, 0.92883, 0.62959, 1.1269, 0.51281, 0.99749, 0.006281, 0.006281, 0.006281
46
+ 45, 1.1286, 0.56862, 0.95925, 0.94568, 0.8682, 0.92923, 0.62836, 1.1263, 0.51285, 0.99688, 0.0061298, 0.0061298, 0.0061298
47
+ 46, 1.1278, 0.56771, 0.9595, 0.94716, 0.86849, 0.92953, 0.63075, 1.1213, 0.51083, 0.99413, 0.0059775, 0.0059775, 0.0059775
48
+ 47, 1.1244, 0.56841, 0.95758, 0.94744, 0.86537, 0.92989, 0.62939, 1.1232, 0.51149, 0.9941, 0.0058244, 0.0058244, 0.0058244
49
+ 48, 1.12, 0.56341, 0.95829, 0.9484, 0.86613, 0.92988, 0.63187, 1.1213, 0.50879, 0.99483, 0.0056704, 0.0056704, 0.0056704
50
+ 49, 1.1183, 0.56216, 0.95611, 0.94862, 0.86737, 0.93031, 0.63203, 1.1187, 0.50866, 0.99419, 0.0055158, 0.0055158, 0.0055158
51
+ 50, 1.1159, 0.5638, 0.95683, 0.94578, 0.86836, 0.93055, 0.63174, 1.1199, 0.50638, 0.99464, 0.0053608, 0.0053608, 0.0053608
52
+ 51, 1.1176, 0.56107, 0.9554, 0.9515, 0.86563, 0.92995, 0.63178, 1.1198, 0.50415, 0.99385, 0.0052055, 0.0052055, 0.0052055
53
+ 52, 1.1151, 0.55869, 0.95673, 0.94931, 0.86434, 0.93012, 0.63132, 1.1197, 0.50428, 0.99344, 0.00505, 0.00505, 0.00505
54
+ 53, 1.1161, 0.55847, 0.95628, 0.94674, 0.86941, 0.93092, 0.63356, 1.1195, 0.50349, 0.99489, 0.0048945, 0.0048945, 0.0048945
55
+ 54, 1.109, 0.56331, 0.95079, 0.94735, 0.86871, 0.93195, 0.63289, 1.1179, 0.50324, 0.99326, 0.0047392, 0.0047392, 0.0047392
56
+ 55, 1.1039, 0.55042, 0.94942, 0.94747, 0.86905, 0.93146, 0.63282, 1.1199, 0.50229, 0.99443, 0.0045842, 0.0045842, 0.0045842
57
+ 56, 1.1085, 0.55164, 0.95149, 0.94925, 0.86622, 0.93075, 0.63238, 1.1198, 0.50251, 0.99438, 0.0044296, 0.0044296, 0.0044296
58
+ 57, 1.1023, 0.54556, 0.95052, 0.95218, 0.86569, 0.93174, 0.63291, 1.1199, 0.50172, 0.99409, 0.0042756, 0.0042756, 0.0042756
59
+ 58, 1.0959, 0.5471, 0.95114, 0.95041, 0.86614, 0.93159, 0.63276, 1.1192, 0.5004, 0.9943, 0.0041225, 0.0041225, 0.0041225
60
+ 59, 1.106, 0.55099, 0.95371, 0.94786, 0.86888, 0.93141, 0.63354, 1.1198, 0.50021, 0.99515, 0.0039702, 0.0039702, 0.0039702
61
+ 60, 1.0961, 0.54304, 0.94927, 0.94843, 0.86825, 0.9313, 0.63414, 1.1191, 0.49955, 0.9938, 0.003819, 0.003819, 0.003819
62
+ 61, 1.0964, 0.56394, 0.9476, 0.94978, 0.8695, 0.93173, 0.63285, 1.1189, 0.49927, 0.99254, 0.003669, 0.003669, 0.003669
63
+ 62, 1.0895, 0.53814, 0.94549, 0.94806, 0.86988, 0.93111, 0.63351, 1.12, 0.49882, 0.99287, 0.0035204, 0.0035204, 0.0035204
64
+ 63, 1.0928, 0.54269, 0.94735, 0.94551, 0.87147, 0.93098, 0.6332, 1.1204, 0.49862, 0.99284, 0.0033732, 0.0033732, 0.0033732
65
+ 64, 1.0857, 0.53606, 0.94614, 0.94794, 0.871, 0.93069, 0.63324, 1.1198, 0.49849, 0.99273, 0.0032278, 0.0032278, 0.0032278
66
+ 65, 1.0878, 0.53651, 0.94499, 0.94954, 0.87017, 0.93131, 0.63441, 1.12, 0.49777, 0.993, 0.0030841, 0.0030841, 0.0030841
67
+ 66, 1.0822, 0.53586, 0.9428, 0.94972, 0.87109, 0.93175, 0.63451, 1.1206, 0.49838, 0.9932, 0.0029424, 0.0029424, 0.0029424
68
+ 67, 1.0803, 0.53196, 0.94372, 0.95092, 0.87023, 0.93203, 0.63397, 1.1206, 0.49724, 0.99272, 0.0028027, 0.0028027, 0.0028027
69
+ 68, 1.0839, 0.53463, 0.94223, 0.9495, 0.87117, 0.93201, 0.63354, 1.1202, 0.49644, 0.99241, 0.0026653, 0.0026653, 0.0026653
70
+ 69, 1.0792, 0.53133, 0.9439, 0.95051, 0.87047, 0.93231, 0.63352, 1.1208, 0.49641, 0.99251, 0.0025302, 0.0025302, 0.0025302
71
+ 70, 1.0803, 0.52985, 0.94157, 0.95178, 0.86941, 0.93243, 0.63375, 1.1201, 0.49591, 0.99226, 0.0023977, 0.0023977, 0.0023977
runs/detect/train/weights/best.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4700a325a8677fed898ca59719a5fc4a6751f4c20664e03b5f4a62fd28ad2fcb
3
- size 19416398
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69a96785abe1cb3d61258d44fe2d6a9e5d0705b9d76ff2bbd2a469cfaa9b0ae4
3
+ size 19420046
runs/detect/train/weights/last.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20f3f95f2bef2093b701d0aedbdc3312fda3db36e15f134b431bfb98fb5e1de3
3
- size 19416398
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92fa410cfda0b089c07714145283e1547bcb8fe015c66247823d59cc4657104d
3
+ size 19420558