Upload 30 files
Browse files- .gitattributes +1 -0
- train/F1_curve.png +0 -0
- train/PR_curve.png +0 -0
- train/P_curve.png +0 -0
- train/R_curve.png +0 -0
- train/args.yaml +107 -0
- train/confusion_matrix.png +0 -0
- train/confusion_matrix_normalized.png +0 -0
- train/labels.jpg +0 -0
- train/labels_correlogram.jpg +0 -0
- train/results.csv +80 -0
- train/results.png +0 -0
- train/train_batch0.jpg +0 -0
- train/train_batch1.jpg +0 -0
- train/train_batch2.jpg +0 -0
- train/val_batch0_labels.jpg +0 -0
- train/val_batch0_pred.jpg +0 -0
- train/val_batch1_labels.jpg +0 -0
- train/val_batch1_pred.jpg +0 -0
- train/val_batch2_labels.jpg +0 -0
- train/val_batch2_pred.jpg +0 -0
- train/weights/best.onnx +3 -0
- train/weights/best.pt +3 -0
- train/weights/best.torchscript +3 -0
- train/weights/best_ncnn_model/metadata.yaml +14 -0
- train/weights/best_ncnn_model/model.ncnn.bin +3 -0
- train/weights/best_ncnn_model/model.ncnn.param +347 -0
- train/weights/best_ncnn_model/model_ncnn.py +26 -0
- train/weights/last.pt +3 -0
- yolo11m_urchin_trained.pt +3 -0
- yolo11m_urchin_weights.pth +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
train/weights/best.torchscript filter=lfs diff=lfs merge=lfs -text
|
train/F1_curve.png
ADDED
train/PR_curve.png
ADDED
train/P_curve.png
ADDED
train/R_curve.png
ADDED
train/args.yaml
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
task: detect
|
2 |
+
mode: train
|
3 |
+
model: yolo11m.pt
|
4 |
+
data: O:\OTHER\AI_DATASETS\yolo\datasets\urchin_datasetv2\split_dataset\data.yaml
|
5 |
+
epochs: 100
|
6 |
+
time: null
|
7 |
+
patience: 10
|
8 |
+
batch: 32
|
9 |
+
imgsz: 640
|
10 |
+
save: true
|
11 |
+
save_period: 10
|
12 |
+
cache: false
|
13 |
+
device: cuda
|
14 |
+
workers: 8
|
15 |
+
project: training_logs
|
16 |
+
name: train
|
17 |
+
exist_ok: false
|
18 |
+
pretrained: true
|
19 |
+
optimizer: AdamW
|
20 |
+
verbose: true
|
21 |
+
seed: 0
|
22 |
+
deterministic: true
|
23 |
+
single_cls: false
|
24 |
+
rect: false
|
25 |
+
cos_lr: true
|
26 |
+
close_mosaic: 10
|
27 |
+
resume: false
|
28 |
+
amp: true
|
29 |
+
fraction: 1.0
|
30 |
+
profile: false
|
31 |
+
freeze: null
|
32 |
+
multi_scale: false
|
33 |
+
overlap_mask: true
|
34 |
+
mask_ratio: 4
|
35 |
+
dropout: 0.0
|
36 |
+
val: true
|
37 |
+
split: val
|
38 |
+
save_json: false
|
39 |
+
save_hybrid: false
|
40 |
+
conf: null
|
41 |
+
iou: 0.7
|
42 |
+
max_det: 300
|
43 |
+
half: false
|
44 |
+
dnn: false
|
45 |
+
plots: true
|
46 |
+
source: null
|
47 |
+
vid_stride: 1
|
48 |
+
stream_buffer: false
|
49 |
+
visualize: false
|
50 |
+
augment: true
|
51 |
+
agnostic_nms: false
|
52 |
+
classes: null
|
53 |
+
retina_masks: false
|
54 |
+
embed: null
|
55 |
+
show: false
|
56 |
+
save_frames: false
|
57 |
+
save_txt: false
|
58 |
+
save_conf: false
|
59 |
+
save_crop: false
|
60 |
+
show_labels: true
|
61 |
+
show_conf: true
|
62 |
+
show_boxes: true
|
63 |
+
line_width: null
|
64 |
+
format: torchscript
|
65 |
+
keras: false
|
66 |
+
optimize: false
|
67 |
+
int8: false
|
68 |
+
dynamic: false
|
69 |
+
simplify: true
|
70 |
+
opset: null
|
71 |
+
workspace: 4
|
72 |
+
nms: false
|
73 |
+
lr0: 0.001
|
74 |
+
lrf: 0.0001
|
75 |
+
momentum: 0.937
|
76 |
+
weight_decay: 0.0005
|
77 |
+
warmup_epochs: 3.0
|
78 |
+
warmup_momentum: 0.8
|
79 |
+
warmup_bias_lr: 0.1
|
80 |
+
box: 7.5
|
81 |
+
cls: 0.5
|
82 |
+
dfl: 1.5
|
83 |
+
pose: 12.0
|
84 |
+
kobj: 1.0
|
85 |
+
label_smoothing: 0.0
|
86 |
+
nbs: 64
|
87 |
+
hsv_h: 0.015
|
88 |
+
hsv_s: 0.7
|
89 |
+
hsv_v: 0.4
|
90 |
+
degrees: 0.0
|
91 |
+
translate: 0.1
|
92 |
+
scale: 0.5
|
93 |
+
shear: 0.0
|
94 |
+
perspective: 0.0
|
95 |
+
flipud: 0.0
|
96 |
+
fliplr: 0.5
|
97 |
+
bgr: 0.0
|
98 |
+
mosaic: true
|
99 |
+
mixup: true
|
100 |
+
copy_paste: 0.0
|
101 |
+
copy_paste_mode: flip
|
102 |
+
auto_augment: randaugment
|
103 |
+
erasing: 0.4
|
104 |
+
crop_fraction: 1.0
|
105 |
+
cfg: null
|
106 |
+
tracker: botsort.yaml
|
107 |
+
save_dir: training_logs\train
|
train/confusion_matrix.png
ADDED
train/confusion_matrix_normalized.png
ADDED
train/labels.jpg
ADDED
train/labels_correlogram.jpg
ADDED
train/results.csv
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch,time,train/box_loss,train/cls_loss,train/dfl_loss,metrics/precision(B),metrics/recall(B),metrics/mAP50(B),metrics/mAP50-95(B),val/box_loss,val/cls_loss,val/dfl_loss,lr/pg0,lr/pg1,lr/pg2
|
2 |
+
1,39.9058,1.96447,2.04901,2.1708,0.414,0.40805,0.31957,0.10862,2.21667,5.01382,3.39114,0.0678919,0.000324324,0.000324324
|
3 |
+
2,67.1991,1.74033,1.59953,1.92945,0.30972,0.32759,0.2833,0.12004,2.32532,34.4803,3.43695,0.0348917,0.000657495,0.000657495
|
4 |
+
3,91.6917,1.72275,1.57313,1.91896,0.16158,0.14655,0.08722,0.03155,2.33368,16.8685,3.24926,0.00189091,0.000990013,0.000990013
|
5 |
+
4,114.254,1.72195,1.56371,1.91584,0.27254,0.28448,0.21746,0.08835,2.34932,16.8955,3.0468,0.000997781,0.000997781,0.000997781
|
6 |
+
5,137.415,1.67082,1.53995,1.89658,0.29312,0.34793,0.23886,0.09054,2.43788,26.6153,3.07938,0.000996058,0.000996058,0.000996058
|
7 |
+
6,160.86,1.6562,1.47434,1.85547,0.54615,0.51006,0.50823,0.22441,1.9897,23.4631,2.47451,0.000993845,0.000993845,0.000993845
|
8 |
+
7,185.109,1.6259,1.43232,1.84162,0.43991,0.40661,0.38107,0.16274,1.9519,13.5073,2.27658,0.000991145,0.000991145,0.000991145
|
9 |
+
8,208.517,1.57969,1.38325,1.7967,0.63088,0.49569,0.54052,0.27282,1.67106,9.40404,2.13012,0.00098796,0.00098796,0.00098796
|
10 |
+
9,233.464,1.59354,1.35296,1.7884,0.68891,0.54598,0.6284,0.33017,1.57252,5.26121,1.9309,0.000984293,0.000984293,0.000984293
|
11 |
+
10,257.826,1.56466,1.31983,1.77991,0.54173,0.5273,0.52801,0.26752,1.67775,5.09949,2.17611,0.000980149,0.000980149,0.000980149
|
12 |
+
11,280.124,1.55283,1.30161,1.77377,0.66308,0.56897,0.59743,0.29105,1.7102,3.30935,2.14128,0.000975531,0.000975531,0.000975531
|
13 |
+
12,304.769,1.52272,1.26399,1.74914,0.8104,0.66092,0.75381,0.40461,1.51554,2.57184,1.88051,0.000970443,0.000970443,0.000970443
|
14 |
+
13,329.088,1.51704,1.28675,1.75951,0.79333,0.65634,0.75504,0.40896,1.47537,3.44177,1.82203,0.000964892,0.000964892,0.000964892
|
15 |
+
14,353.074,1.48176,1.25374,1.73477,0.7314,0.65517,0.70579,0.40558,1.44893,2.11268,1.83402,0.000958881,0.000958881,0.000958881
|
16 |
+
15,375.618,1.52342,1.25091,1.75882,0.61545,0.58477,0.61672,0.33028,1.5932,4.15626,1.99363,0.000952418,0.000952418,0.000952418
|
17 |
+
16,398.361,1.52664,1.22967,1.72988,0.55508,0.53592,0.51486,0.26773,1.43942,7.42985,1.79353,0.000945509,0.000945509,0.000945509
|
18 |
+
17,420.541,1.49225,1.23058,1.72652,0.72637,0.64368,0.70288,0.38483,1.51916,2.33918,1.83336,0.00093816,0.00093816,0.00093816
|
19 |
+
18,443.151,1.45472,1.21049,1.69743,0.77287,0.70404,0.75642,0.43283,1.44472,2.25712,1.77893,0.000930378,0.000930378,0.000930378
|
20 |
+
19,467.225,1.50237,1.21969,1.70716,0.72698,0.60063,0.69026,0.37535,1.52963,2.73989,1.86944,0.000922172,0.000922172,0.000922172
|
21 |
+
20,489.734,1.48454,1.21078,1.6933,0.70677,0.6408,0.70956,0.40976,1.44679,1.42927,1.76232,0.000913549,0.000913549,0.000913549
|
22 |
+
21,512.169,1.45448,1.1704,1.68381,0.73584,0.67672,0.74166,0.40092,1.4832,3.54705,1.79574,0.000904518,0.000904518,0.000904518
|
23 |
+
22,536.224,1.46802,1.15474,1.69537,0.76454,0.71378,0.79908,0.45841,1.38044,1.59393,1.72696,0.000895088,0.000895088,0.000895088
|
24 |
+
23,560.448,1.46285,1.17513,1.70038,0.64633,0.66169,0.68082,0.38573,1.44508,2.21655,1.83624,0.000885268,0.000885268,0.000885268
|
25 |
+
24,583.067,1.44587,1.16773,1.68865,0.78023,0.67529,0.75824,0.43377,1.45043,3.806,1.78812,0.000875068,0.000875068,0.000875068
|
26 |
+
25,605.32,1.43293,1.12945,1.66684,0.76685,0.68822,0.74096,0.40162,1.49645,1.75937,1.8389,0.000864498,0.000864498,0.000864498
|
27 |
+
26,628.044,1.42075,1.12186,1.66553,0.75773,0.69109,0.75108,0.42257,1.43882,2.71837,1.78106,0.000853568,0.000853568,0.000853568
|
28 |
+
27,650.295,1.42926,1.10539,1.65859,0.826,0.76149,0.83142,0.48473,1.36593,1.51234,1.69526,0.000842289,0.000842289,0.000842289
|
29 |
+
28,674.701,1.38955,1.09276,1.63989,0.67198,0.66092,0.70387,0.39836,1.48629,1.28995,1.84815,0.000830673,0.000830673,0.000830673
|
30 |
+
29,696.823,1.40544,1.08543,1.6538,0.83969,0.73752,0.8156,0.4774,1.34559,1.13182,1.69417,0.00081873,0.00081873,0.00081873
|
31 |
+
30,719.13,1.41483,1.09865,1.65289,0.8046,0.71408,0.80223,0.46882,1.37373,1.40703,1.72666,0.000806473,0.000806473,0.000806473
|
32 |
+
31,741.468,1.38731,1.07804,1.64148,0.8165,0.72989,0.81992,0.48381,1.36251,1.4009,1.6928,0.000793913,0.000793913,0.000793913
|
33 |
+
32,766.026,1.3916,1.07766,1.64858,0.80326,0.6954,0.78244,0.44625,1.43441,1.26048,1.76821,0.000781064,0.000781064,0.000781064
|
34 |
+
33,788.321,1.39942,1.09333,1.64461,0.74669,0.73694,0.77688,0.45036,1.42942,1.57985,1.78914,0.000767937,0.000767937,0.000767937
|
35 |
+
34,810.581,1.39987,1.06059,1.64607,0.75988,0.71838,0.77853,0.44198,1.46233,1.2851,1.79255,0.000754545,0.000754545,0.000754545
|
36 |
+
35,832.645,1.35177,1.03882,1.60848,0.83711,0.74575,0.82053,0.49192,1.37613,0.9911,1.70091,0.000740903,0.000740903,0.000740903
|
37 |
+
36,856.475,1.37815,1.0522,1.61917,0.80562,0.74433,0.82085,0.4829,1.35738,1.3395,1.69744,0.000727023,0.000727023,0.000727023
|
38 |
+
37,878.546,1.36488,1.04066,1.61008,0.81594,0.72989,0.82308,0.49819,1.34468,1.13598,1.67069,0.000712918,0.000712918,0.000712918
|
39 |
+
38,902.245,1.34865,1.0027,1.6096,0.80822,0.70846,0.78857,0.4734,1.36055,1.05431,1.71895,0.000698604,0.000698604,0.000698604
|
40 |
+
39,924.486,1.36404,1.03412,1.61081,0.80739,0.76724,0.83247,0.50535,1.29845,1.11193,1.6314,0.000684094,0.000684094,0.000684094
|
41 |
+
40,948.418,1.3458,1.01798,1.60797,0.74989,0.73233,0.78275,0.46484,1.36468,1.17855,1.71995,0.000669402,0.000669402,0.000669402
|
42 |
+
41,970.652,1.34451,1.01644,1.59692,0.81849,0.70546,0.79222,0.47251,1.3181,1.70239,1.68005,0.000654543,0.000654543,0.000654543
|
43 |
+
42,994.409,1.32783,1.0066,1.60113,0.81324,0.75431,0.83093,0.50058,1.29745,0.933,1.63626,0.000639532,0.000639532,0.000639532
|
44 |
+
43,1016.66,1.3231,0.98219,1.59417,0.84719,0.75575,0.85394,0.51782,1.32427,0.9926,1.6502,0.000624383,0.000624383,0.000624383
|
45 |
+
44,1040.38,1.35276,1.01175,1.60215,0.84117,0.73809,0.83855,0.50611,1.29447,1.06355,1.62121,0.000609111,0.000609111,0.000609111
|
46 |
+
45,1062.45,1.34611,0.99188,1.58613,0.85368,0.76281,0.84268,0.51284,1.29287,1.04725,1.62132,0.000593731,0.000593731,0.000593731
|
47 |
+
46,1084.68,1.31412,0.98479,1.57659,0.76562,0.72746,0.7833,0.46618,1.34743,1.31482,1.68308,0.000578259,0.000578259,0.000578259
|
48 |
+
47,1106.67,1.31068,0.97449,1.57949,0.83399,0.74347,0.83234,0.51448,1.25981,0.9516,1.62093,0.00056271,0.00056271,0.00056271
|
49 |
+
48,1128.74,1.28513,0.96098,1.57653,0.79831,0.72414,0.80073,0.48595,1.3094,1.23365,1.67091,0.000547099,0.000547099,0.000547099
|
50 |
+
49,1150.83,1.29665,0.95738,1.57558,0.83509,0.78736,0.85553,0.5203,1.27712,0.84125,1.61417,0.000531442,0.000531442,0.000531442
|
51 |
+
50,1174.76,1.31226,0.95245,1.57992,0.82585,0.78161,0.84728,0.50805,1.30726,1.02443,1.64954,0.000515754,0.000515754,0.000515754
|
52 |
+
51,1196.69,1.28493,0.9407,1.55904,0.84192,0.74425,0.83636,0.50542,1.30188,0.98141,1.63755,0.00050005,0.00050005,0.00050005
|
53 |
+
52,1220.6,1.30912,0.96064,1.57441,0.83468,0.75718,0.84753,0.51344,1.29736,0.94088,1.62136,0.000484346,0.000484346,0.000484346
|
54 |
+
53,1242.64,1.30637,0.94137,1.56739,0.81394,0.76437,0.84356,0.50481,1.30779,1.0349,1.63968,0.000468658,0.000468658,0.000468658
|
55 |
+
54,1264.67,1.28637,0.93961,1.5621,0.85176,0.77599,0.85744,0.52737,1.26362,0.86099,1.59402,0.000453001,0.000453001,0.000453001
|
56 |
+
55,1288.47,1.27894,0.94315,1.56452,0.86362,0.76868,0.85477,0.53063,1.27552,0.78173,1.59822,0.00043739,0.00043739,0.00043739
|
57 |
+
56,1312.39,1.28484,0.93466,1.56139,0.83645,0.78161,0.85452,0.52469,1.27457,0.93494,1.6098,0.000421841,0.000421841,0.000421841
|
58 |
+
57,1334.57,1.27414,0.93468,1.55607,0.8464,0.76799,0.84786,0.52075,1.28469,0.82972,1.63059,0.000406369,0.000406369,0.000406369
|
59 |
+
58,1357.12,1.28052,0.92945,1.55642,0.83595,0.76144,0.8401,0.51078,1.30154,0.88763,1.65412,0.000390989,0.000390989,0.000390989
|
60 |
+
59,1379.27,1.27683,0.91666,1.54411,0.79544,0.78216,0.82764,0.50411,1.28114,1.10257,1.61236,0.000375717,0.000375717,0.000375717
|
61 |
+
60,1401.45,1.26371,0.91776,1.53082,0.83257,0.80732,0.85484,0.53513,1.24639,0.78569,1.58031,0.000360568,0.000360568,0.000360568
|
62 |
+
61,1425.33,1.24402,0.89296,1.53082,0.84849,0.7773,0.85084,0.52413,1.28492,0.80698,1.60103,0.000345557,0.000345557,0.000345557
|
63 |
+
62,1449.54,1.22845,0.87747,1.5318,0.82098,0.77011,0.84439,0.52312,1.26833,0.90105,1.60149,0.000330698,0.000330698,0.000330698
|
64 |
+
63,1471.79,1.26996,0.90257,1.5361,0.83796,0.81034,0.86758,0.53862,1.24787,0.86926,1.59071,0.000316006,0.000316006,0.000316006
|
65 |
+
64,1495.68,1.24784,0.89818,1.53197,0.85963,0.77431,0.85738,0.51866,1.2557,0.86068,1.60125,0.000301496,0.000301496,0.000301496
|
66 |
+
65,1517.73,1.25266,0.8989,1.52358,0.85829,0.78879,0.86353,0.52885,1.26889,1.05233,1.59662,0.000287182,0.000287182,0.000287182
|
67 |
+
66,1540.09,1.23536,0.87119,1.51083,0.84604,0.75287,0.8467,0.52545,1.24718,0.87181,1.59416,0.000273077,0.000273077,0.000273077
|
68 |
+
67,1561.89,1.21,0.86507,1.50726,0.83774,0.77586,0.86501,0.53886,1.24159,0.79614,1.5652,0.000259197,0.000259197,0.000259197
|
69 |
+
68,1584.12,1.21962,0.86517,1.50502,0.84221,0.77874,0.85842,0.52221,1.25969,0.82132,1.58557,0.000245555,0.000245555,0.000245555
|
70 |
+
69,1605.89,1.21052,0.85572,1.50393,0.82981,0.80564,0.874,0.54413,1.22541,0.75729,1.5525,0.000232163,0.000232163,0.000232163
|
71 |
+
70,1629.65,1.20918,0.84421,1.49898,0.85547,0.77443,0.86652,0.5334,1.25691,0.81601,1.58293,0.000219036,0.000219036,0.000219036
|
72 |
+
71,1651.76,1.21473,0.86137,1.51054,0.85869,0.79454,0.86915,0.544,1.22514,0.8142,1.54975,0.000206187,0.000206187,0.000206187
|
73 |
+
72,1676.09,1.22435,0.8574,1.5019,0.83473,0.79099,0.85885,0.5358,1.2335,0.91976,1.56059,0.000193627,0.000193627,0.000193627
|
74 |
+
73,1698.28,1.22346,0.85705,1.50283,0.8432,0.78879,0.86445,0.53083,1.26666,0.84932,1.58876,0.00018137,0.00018137,0.00018137
|
75 |
+
74,1720.67,1.19055,0.84102,1.5012,0.83299,0.80261,0.86586,0.53769,1.23297,0.74864,1.5637,0.000169427,0.000169427,0.000169427
|
76 |
+
75,1742.89,1.19284,0.83217,1.49252,0.83324,0.80408,0.8653,0.53308,1.24107,0.77889,1.57116,0.000157811,0.000157811,0.000157811
|
77 |
+
76,1765.23,1.19949,0.83713,1.50137,0.84052,0.79598,0.86361,0.53249,1.24612,0.80912,1.57262,0.000146532,0.000146532,0.000146532
|
78 |
+
77,1787.06,1.16587,0.80832,1.484,0.86353,0.7931,0.86918,0.54116,1.22938,0.7419,1.56947,0.000135602,0.000135602,0.000135602
|
79 |
+
78,1809.7,1.18405,0.82636,1.47507,0.86018,0.79454,0.87347,0.54331,1.22376,0.75735,1.55413,0.000125032,0.000125032,0.000125032
|
80 |
+
79,1831.65,1.17872,0.82119,1.48386,0.85486,0.79885,0.86958,0.54415,1.21595,0.76882,1.54784,0.000114832,0.000114832,0.000114832
|
train/results.png
ADDED
train/train_batch0.jpg
ADDED
train/train_batch1.jpg
ADDED
train/train_batch2.jpg
ADDED
train/val_batch0_labels.jpg
ADDED
train/val_batch0_pred.jpg
ADDED
train/val_batch1_labels.jpg
ADDED
train/val_batch1_pred.jpg
ADDED
train/val_batch2_labels.jpg
ADDED
train/val_batch2_pred.jpg
ADDED
train/weights/best.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5668ee97fa2c06ddffe7198dcbd5b3a072889736ae6f71cd906985fcc14f849d
|
3 |
+
size 80401757
|
train/weights/best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5966e0a2b3b089494c46b178129114b8cb67df1c294134e48f0bdd65b9b8f3b8
|
3 |
+
size 40517285
|
train/weights/best.torchscript
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ed5a7d3931eb475ebd562c4f80db9708fa6d552833da66131811348d226bfc6
|
3 |
+
size 80867482
|
train/weights/best_ncnn_model/metadata.yaml
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
description: Ultralytics YOLO11m model trained on O:\OTHER\AI_DATASETS\yolo\datasets\urchin_datasetv2\split_dataset\data.yaml
|
2 |
+
author: Ultralytics
|
3 |
+
date: '2024-10-21T14:05:16.912634'
|
4 |
+
version: 8.3.17
|
5 |
+
license: AGPL-3.0 License (https://ultralytics.com/license)
|
6 |
+
docs: https://docs.ultralytics.com
|
7 |
+
stride: 32
|
8 |
+
task: detect
|
9 |
+
batch: 1
|
10 |
+
imgsz:
|
11 |
+
- 640
|
12 |
+
- 640
|
13 |
+
names:
|
14 |
+
0: urchin
|
train/weights/best_ncnn_model/model.ncnn.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:57837a4ef07574788b7563b2ebf1888c0c9e8b0042bf1d2b7e36239dd34a8f5b
|
3 |
+
size 80291664
|
train/weights/best_ncnn_model/model.ncnn.param
ADDED
@@ -0,0 +1,347 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
7767517
|
2 |
+
345 407
|
3 |
+
Input in0 0 1 in0
|
4 |
+
Convolution conv_0 1 1 in0 1 0=64 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=1728
|
5 |
+
Swish silu_109 1 1 1 2
|
6 |
+
Convolution conv_1 1 1 2 3 0=128 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=73728
|
7 |
+
Swish silu_110 1 1 3 4
|
8 |
+
Convolution conv_2 1 1 4 5 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
|
9 |
+
Swish silu_111 1 1 5 6
|
10 |
+
Slice split_0 1 2 6 7 8 -23300=2,64,64 1=0
|
11 |
+
Split splitncnn_0 1 3 8 9 10 11
|
12 |
+
Convolution conv_3 1 1 11 12 0=32 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=2048
|
13 |
+
Swish silu_112 1 1 12 13
|
14 |
+
Split splitncnn_1 1 2 13 14 15
|
15 |
+
Convolution conv_4 1 1 15 16 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
|
16 |
+
Swish silu_113 1 1 16 17
|
17 |
+
Convolution conv_5 1 1 17 18 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
|
18 |
+
Swish silu_114 1 1 18 19
|
19 |
+
BinaryOp add_0 2 1 14 19 20 0=0
|
20 |
+
Split splitncnn_2 1 2 20 21 22
|
21 |
+
Convolution conv_6 1 1 22 23 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
|
22 |
+
Swish silu_115 1 1 23 24
|
23 |
+
Convolution conv_7 1 1 24 25 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
|
24 |
+
Swish silu_116 1 1 25 26
|
25 |
+
BinaryOp add_1 2 1 21 26 27 0=0
|
26 |
+
Convolution conv_8 1 1 10 28 0=32 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=2048
|
27 |
+
Swish silu_117 1 1 28 29
|
28 |
+
Concat cat_0 2 1 27 29 30 0=0
|
29 |
+
Convolution conv_9 1 1 30 31 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
|
30 |
+
Swish silu_118 1 1 31 32
|
31 |
+
Concat cat_1 3 1 7 9 32 33 0=0
|
32 |
+
Convolution conv_10 1 1 33 34 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=49152
|
33 |
+
Swish silu_119 1 1 34 35
|
34 |
+
Convolution conv_11 1 1 35 36 0=256 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=589824
|
35 |
+
Swish silu_120 1 1 36 37
|
36 |
+
Convolution conv_12 1 1 37 38 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
37 |
+
Swish silu_121 1 1 38 39
|
38 |
+
Slice split_1 1 2 39 40 41 -23300=2,128,128 1=0
|
39 |
+
Split splitncnn_3 1 3 41 42 43 44
|
40 |
+
Convolution conv_13 1 1 44 45 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
|
41 |
+
Swish silu_122 1 1 45 46
|
42 |
+
Split splitncnn_4 1 2 46 47 48
|
43 |
+
Convolution conv_14 1 1 48 49 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
44 |
+
Swish silu_123 1 1 49 50
|
45 |
+
Convolution conv_15 1 1 50 51 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
46 |
+
Swish silu_124 1 1 51 52
|
47 |
+
BinaryOp add_2 2 1 47 52 53 0=0
|
48 |
+
Split splitncnn_5 1 2 53 54 55
|
49 |
+
Convolution conv_16 1 1 55 56 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
50 |
+
Swish silu_125 1 1 56 57
|
51 |
+
Convolution conv_17 1 1 57 58 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
52 |
+
Swish silu_126 1 1 58 59
|
53 |
+
BinaryOp add_3 2 1 54 59 60 0=0
|
54 |
+
Convolution conv_18 1 1 43 61 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
|
55 |
+
Swish silu_127 1 1 61 62
|
56 |
+
Concat cat_2 2 1 60 62 63 0=0
|
57 |
+
Convolution conv_19 1 1 63 64 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
|
58 |
+
Swish silu_128 1 1 64 65
|
59 |
+
Concat cat_3 3 1 40 42 65 66 0=0
|
60 |
+
Convolution conv_20 1 1 66 67 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=196608
|
61 |
+
Swish silu_129 1 1 67 68
|
62 |
+
Split splitncnn_6 1 2 68 69 70
|
63 |
+
Convolution conv_21 1 1 70 71 0=512 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=2359296
|
64 |
+
Swish silu_130 1 1 71 72
|
65 |
+
Convolution conv_22 1 1 72 73 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=262144
|
66 |
+
Swish silu_131 1 1 73 74
|
67 |
+
Slice split_2 1 2 74 75 76 -23300=2,256,256 1=0
|
68 |
+
Split splitncnn_7 1 3 76 77 78 79
|
69 |
+
Convolution conv_23 1 1 79 80 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
70 |
+
Swish silu_132 1 1 80 81
|
71 |
+
Split splitncnn_8 1 2 81 82 83
|
72 |
+
Convolution conv_24 1 1 83 84 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
73 |
+
Swish silu_133 1 1 84 85
|
74 |
+
Convolution conv_25 1 1 85 86 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
75 |
+
Swish silu_134 1 1 86 87
|
76 |
+
BinaryOp add_4 2 1 82 87 88 0=0
|
77 |
+
Split splitncnn_9 1 2 88 89 90
|
78 |
+
Convolution conv_26 1 1 90 91 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
79 |
+
Swish silu_135 1 1 91 92
|
80 |
+
Convolution conv_27 1 1 92 93 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
81 |
+
Swish silu_136 1 1 93 94
|
82 |
+
BinaryOp add_5 2 1 89 94 95 0=0
|
83 |
+
Convolution conv_28 1 1 78 96 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
84 |
+
Swish silu_137 1 1 96 97
|
85 |
+
Concat cat_4 2 1 95 97 98 0=0
|
86 |
+
Convolution conv_29 1 1 98 99 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
87 |
+
Swish silu_138 1 1 99 100
|
88 |
+
Concat cat_5 3 1 75 77 100 101 0=0
|
89 |
+
Convolution conv_30 1 1 101 102 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
90 |
+
Swish silu_139 1 1 102 103
|
91 |
+
Split splitncnn_10 1 2 103 104 105
|
92 |
+
Convolution conv_31 1 1 105 106 0=512 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=2359296
|
93 |
+
Swish silu_140 1 1 106 107
|
94 |
+
Convolution conv_32 1 1 107 108 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=262144
|
95 |
+
Swish silu_141 1 1 108 109
|
96 |
+
Slice split_3 1 2 109 110 111 -23300=2,256,256 1=0
|
97 |
+
Split splitncnn_11 1 3 111 112 113 114
|
98 |
+
Convolution conv_33 1 1 114 115 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
99 |
+
Swish silu_142 1 1 115 116
|
100 |
+
Split splitncnn_12 1 2 116 117 118
|
101 |
+
Convolution conv_34 1 1 118 119 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
102 |
+
Swish silu_143 1 1 119 120
|
103 |
+
Convolution conv_35 1 1 120 121 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
104 |
+
Swish silu_144 1 1 121 122
|
105 |
+
BinaryOp add_6 2 1 117 122 123 0=0
|
106 |
+
Split splitncnn_13 1 2 123 124 125
|
107 |
+
Convolution conv_36 1 1 125 126 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
108 |
+
Swish silu_145 1 1 126 127
|
109 |
+
Convolution conv_37 1 1 127 128 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
110 |
+
Swish silu_146 1 1 128 129
|
111 |
+
BinaryOp add_7 2 1 124 129 130 0=0
|
112 |
+
Convolution conv_38 1 1 113 131 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
113 |
+
Swish silu_147 1 1 131 132
|
114 |
+
Concat cat_6 2 1 130 132 133 0=0
|
115 |
+
Convolution conv_39 1 1 133 134 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
116 |
+
Swish silu_148 1 1 134 135
|
117 |
+
Concat cat_7 3 1 110 112 135 136 0=0
|
118 |
+
Convolution conv_40 1 1 136 137 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
119 |
+
Swish silu_149 1 1 137 138
|
120 |
+
Convolution conv_41 1 1 138 139 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
121 |
+
Swish silu_150 1 1 139 140
|
122 |
+
Split splitncnn_14 1 2 140 141 142
|
123 |
+
Pooling maxpool2d_106 1 1 142 143 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
|
124 |
+
Split splitncnn_15 1 2 143 144 145
|
125 |
+
Pooling maxpool2d_107 1 1 145 146 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
|
126 |
+
Split splitncnn_16 1 2 146 147 148
|
127 |
+
Pooling maxpool2d_108 1 1 148 149 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
|
128 |
+
Concat cat_8 4 1 141 144 147 149 150 0=0
|
129 |
+
Convolution conv_42 1 1 150 151 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=524288
|
130 |
+
Swish silu_151 1 1 151 152
|
131 |
+
Convolution conv_43 1 1 152 153 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=262144
|
132 |
+
Swish silu_152 1 1 153 154
|
133 |
+
Slice split_4 1 2 154 155 156 -23300=2,256,256 1=0
|
134 |
+
Split splitncnn_17 1 2 156 157 158
|
135 |
+
Convolution conv_44 1 1 158 159 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
136 |
+
Reshape view_218 1 1 159 160 0=400 1=128 2=4
|
137 |
+
Slice split_5 1 3 160 161 162 163 -23300=3,32,32,64 1=1
|
138 |
+
Split splitncnn_18 1 2 163 164 165
|
139 |
+
Permute transpose_227 1 1 161 166 0=1
|
140 |
+
MatMul matmul_225 2 1 166 162 167
|
141 |
+
BinaryOp mul_8 1 1 167 168 0=2 1=1 2=1.767767e-01
|
142 |
+
Softmax softmax_214 1 1 168 169 0=2 1=1
|
143 |
+
MatMul matmultransb_0 2 1 165 169 170 0=1
|
144 |
+
Reshape view_219 1 1 170 171 0=20 1=20 2=256
|
145 |
+
Reshape reshape_216 1 1 164 172 0=20 1=20 2=256
|
146 |
+
ConvolutionDepthWise convdw_230 1 1 172 173 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
|
147 |
+
BinaryOp add_9 2 1 171 173 174 0=0
|
148 |
+
Convolution conv_45 1 1 174 175 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
149 |
+
BinaryOp add_10 2 1 157 175 176 0=0
|
150 |
+
Split splitncnn_19 1 2 176 177 178
|
151 |
+
Convolution conv_46 1 1 178 179 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
152 |
+
Swish silu_153 1 1 179 180
|
153 |
+
Convolution conv_47 1 1 180 181 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
154 |
+
BinaryOp add_11 2 1 177 181 182 0=0
|
155 |
+
Concat cat_9 2 1 155 182 183 0=0
|
156 |
+
Convolution conv_48 1 1 183 184 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=262144
|
157 |
+
Swish silu_154 1 1 184 185
|
158 |
+
Split splitncnn_20 1 2 185 186 187
|
159 |
+
Interp upsample_211 1 1 187 188 0=1 1=2.000000e+00 2=2.000000e+00 6=0
|
160 |
+
Concat cat_10 2 1 188 104 189 0=0
|
161 |
+
Convolution conv_49 1 1 189 190 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=524288
|
162 |
+
Swish silu_155 1 1 190 191
|
163 |
+
Slice split_6 1 2 191 192 193 -23300=2,256,256 1=0
|
164 |
+
Split splitncnn_21 1 3 193 194 195 196
|
165 |
+
Convolution conv_50 1 1 196 197 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
166 |
+
Swish silu_156 1 1 197 198
|
167 |
+
Split splitncnn_22 1 2 198 199 200
|
168 |
+
Convolution conv_51 1 1 200 201 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
169 |
+
Swish silu_157 1 1 201 202
|
170 |
+
Convolution conv_52 1 1 202 203 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
171 |
+
Swish silu_158 1 1 203 204
|
172 |
+
BinaryOp add_12 2 1 199 204 205 0=0
|
173 |
+
Split splitncnn_23 1 2 205 206 207
|
174 |
+
Convolution conv_53 1 1 207 208 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
175 |
+
Swish silu_159 1 1 208 209
|
176 |
+
Convolution conv_54 1 1 209 210 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
177 |
+
Swish silu_160 1 1 210 211
|
178 |
+
BinaryOp add_13 2 1 206 211 212 0=0
|
179 |
+
Convolution conv_55 1 1 195 213 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
180 |
+
Swish silu_161 1 1 213 214
|
181 |
+
Concat cat_11 2 1 212 214 215 0=0
|
182 |
+
Convolution conv_56 1 1 215 216 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
183 |
+
Swish silu_162 1 1 216 217
|
184 |
+
Concat cat_12 3 1 192 194 217 218 0=0
|
185 |
+
Convolution conv_57 1 1 218 219 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
186 |
+
Swish silu_163 1 1 219 220
|
187 |
+
Split splitncnn_24 1 2 220 221 222
|
188 |
+
Interp upsample_212 1 1 222 223 0=1 1=2.000000e+00 2=2.000000e+00 6=0
|
189 |
+
Concat cat_13 2 1 223 69 224 0=0
|
190 |
+
Convolution conv_58 1 1 224 225 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=262144
|
191 |
+
Swish silu_164 1 1 225 226
|
192 |
+
Slice split_7 1 2 226 227 228 -23300=2,128,128 1=0
|
193 |
+
Split splitncnn_25 1 3 228 229 230 231
|
194 |
+
Convolution conv_59 1 1 231 232 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
|
195 |
+
Swish silu_165 1 1 232 233
|
196 |
+
Split splitncnn_26 1 2 233 234 235
|
197 |
+
Convolution conv_60 1 1 235 236 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
198 |
+
Swish silu_166 1 1 236 237
|
199 |
+
Convolution conv_61 1 1 237 238 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
200 |
+
Swish silu_167 1 1 238 239
|
201 |
+
BinaryOp add_14 2 1 234 239 240 0=0
|
202 |
+
Split splitncnn_27 1 2 240 241 242
|
203 |
+
Convolution conv_62 1 1 242 243 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
204 |
+
Swish silu_168 1 1 243 244
|
205 |
+
Convolution conv_63 1 1 244 245 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
206 |
+
Swish silu_169 1 1 245 246
|
207 |
+
BinaryOp add_15 2 1 241 246 247 0=0
|
208 |
+
Convolution conv_64 1 1 230 248 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
|
209 |
+
Swish silu_170 1 1 248 249
|
210 |
+
Concat cat_14 2 1 247 249 250 0=0
|
211 |
+
Convolution conv_65 1 1 250 251 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
|
212 |
+
Swish silu_171 1 1 251 252
|
213 |
+
Concat cat_15 3 1 227 229 252 253 0=0
|
214 |
+
Convolution conv_66 1 1 253 254 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=98304
|
215 |
+
Swish silu_172 1 1 254 255
|
216 |
+
Split splitncnn_28 1 3 255 256 257 258
|
217 |
+
Convolution conv_67 1 1 257 259 0=256 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=589824
|
218 |
+
Swish silu_173 1 1 259 260
|
219 |
+
Concat cat_16 2 1 260 221 261 0=0
|
220 |
+
Convolution conv_68 1 1 261 262 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
221 |
+
Swish silu_174 1 1 262 263
|
222 |
+
Slice split_8 1 2 263 264 265 -23300=2,256,256 1=0
|
223 |
+
Split splitncnn_29 1 3 265 266 267 268
|
224 |
+
Convolution conv_69 1 1 268 269 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
225 |
+
Swish silu_175 1 1 269 270
|
226 |
+
Split splitncnn_30 1 2 270 271 272
|
227 |
+
Convolution conv_70 1 1 272 273 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
228 |
+
Swish silu_176 1 1 273 274
|
229 |
+
Convolution conv_71 1 1 274 275 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
230 |
+
Swish silu_177 1 1 275 276
|
231 |
+
BinaryOp add_16 2 1 271 276 277 0=0
|
232 |
+
Split splitncnn_31 1 2 277 278 279
|
233 |
+
Convolution conv_72 1 1 279 280 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
234 |
+
Swish silu_178 1 1 280 281
|
235 |
+
Convolution conv_73 1 1 281 282 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
236 |
+
Swish silu_179 1 1 282 283
|
237 |
+
BinaryOp add_17 2 1 278 283 284 0=0
|
238 |
+
Convolution conv_74 1 1 267 285 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
239 |
+
Swish silu_180 1 1 285 286
|
240 |
+
Concat cat_17 2 1 284 286 287 0=0
|
241 |
+
Convolution conv_75 1 1 287 288 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
242 |
+
Swish silu_181 1 1 288 289
|
243 |
+
Concat cat_18 3 1 264 266 289 290 0=0
|
244 |
+
Convolution conv_76 1 1 290 291 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
245 |
+
Swish silu_182 1 1 291 292
|
246 |
+
Split splitncnn_32 1 3 292 293 294 295
|
247 |
+
Convolution conv_77 1 1 294 296 0=512 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=2359296
|
248 |
+
Swish silu_183 1 1 296 297
|
249 |
+
Concat cat_19 2 1 297 186 298 0=0
|
250 |
+
Convolution conv_78 1 1 298 299 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=524288
|
251 |
+
Swish silu_184 1 1 299 300
|
252 |
+
Slice split_9 1 2 300 301 302 -23300=2,256,256 1=0
|
253 |
+
Split splitncnn_33 1 3 302 303 304 305
|
254 |
+
Convolution conv_79 1 1 305 306 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
255 |
+
Swish silu_185 1 1 306 307
|
256 |
+
Split splitncnn_34 1 2 307 308 309
|
257 |
+
Convolution conv_80 1 1 309 310 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
258 |
+
Swish silu_186 1 1 310 311
|
259 |
+
Convolution conv_81 1 1 311 312 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
260 |
+
Swish silu_187 1 1 312 313
|
261 |
+
BinaryOp add_18 2 1 308 313 314 0=0
|
262 |
+
Split splitncnn_35 1 2 314 315 316
|
263 |
+
Convolution conv_82 1 1 316 317 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
264 |
+
Swish silu_188 1 1 317 318
|
265 |
+
Convolution conv_83 1 1 318 319 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
266 |
+
Swish silu_189 1 1 319 320
|
267 |
+
BinaryOp add_19 2 1 315 320 321 0=0
|
268 |
+
Convolution conv_84 1 1 304 322 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
|
269 |
+
Swish silu_190 1 1 322 323
|
270 |
+
Concat cat_20 2 1 321 323 324 0=0
|
271 |
+
Convolution conv_85 1 1 324 325 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
272 |
+
Swish silu_191 1 1 325 326
|
273 |
+
Concat cat_21 3 1 301 303 326 327 0=0
|
274 |
+
Convolution conv_86 1 1 327 328 0=512 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=393216
|
275 |
+
Swish silu_192 1 1 328 329
|
276 |
+
Split splitncnn_36 1 2 329 330 331
|
277 |
+
MemoryData pnnx_213 0 1 332 0=8400
|
278 |
+
Convolution conv_87 1 1 256 333 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
|
279 |
+
Swish silu_193 1 1 333 334
|
280 |
+
Convolution conv_88 1 1 334 335 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
281 |
+
Swish silu_194 1 1 335 336
|
282 |
+
Convolution conv_89 1 1 336 337 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
|
283 |
+
ConvolutionDepthWise convdw_231 1 1 258 338 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
|
284 |
+
Swish silu_195 1 1 338 339
|
285 |
+
Convolution conv_90 1 1 339 340 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
286 |
+
Swish silu_196 1 1 340 341
|
287 |
+
ConvolutionDepthWise convdw_232 1 1 341 342 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
|
288 |
+
Swish silu_197 1 1 342 343
|
289 |
+
Convolution conv_91 1 1 343 344 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
290 |
+
Swish silu_198 1 1 344 345
|
291 |
+
Convolution conv_92 1 1 345 346 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=256
|
292 |
+
Concat cat_22 2 1 337 346 347 0=0
|
293 |
+
Convolution conv_93 1 1 293 348 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=294912
|
294 |
+
Swish silu_199 1 1 348 349
|
295 |
+
Convolution conv_94 1 1 349 350 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
296 |
+
Swish silu_200 1 1 350 351
|
297 |
+
Convolution conv_95 1 1 351 352 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
|
298 |
+
ConvolutionDepthWise convdw_233 1 1 295 353 0=512 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608 7=512
|
299 |
+
Swish silu_201 1 1 353 354
|
300 |
+
Convolution conv_96 1 1 354 355 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
301 |
+
Swish silu_202 1 1 355 356
|
302 |
+
ConvolutionDepthWise convdw_234 1 1 356 357 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
|
303 |
+
Swish silu_203 1 1 357 358
|
304 |
+
Convolution conv_97 1 1 358 359 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
305 |
+
Swish silu_204 1 1 359 360
|
306 |
+
Convolution conv_98 1 1 360 361 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=256
|
307 |
+
Concat cat_23 2 1 352 361 362 0=0
|
308 |
+
Convolution conv_99 1 1 330 363 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=294912
|
309 |
+
Swish silu_205 1 1 363 364
|
310 |
+
Convolution conv_100 1 1 364 365 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
|
311 |
+
Swish silu_206 1 1 365 366
|
312 |
+
Convolution conv_101 1 1 366 367 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
|
313 |
+
ConvolutionDepthWise convdw_235 1 1 331 368 0=512 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608 7=512
|
314 |
+
Swish silu_207 1 1 368 369
|
315 |
+
Convolution conv_102 1 1 369 370 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
|
316 |
+
Swish silu_208 1 1 370 371
|
317 |
+
ConvolutionDepthWise convdw_236 1 1 371 372 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
|
318 |
+
Swish silu_209 1 1 372 373
|
319 |
+
Convolution conv_103 1 1 373 374 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
|
320 |
+
Swish silu_210 1 1 374 375
|
321 |
+
Convolution conv_104 1 1 375 376 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=256
|
322 |
+
Concat cat_24 2 1 367 376 377 0=0
|
323 |
+
Reshape view_220 1 1 347 378 0=6400 1=65
|
324 |
+
Reshape view_221 1 1 362 379 0=1600 1=65
|
325 |
+
Reshape view_222 1 1 377 380 0=400 1=65
|
326 |
+
Concat cat_25 3 1 378 379 380 381 0=1
|
327 |
+
Slice split_10 1 2 381 382 383 -23300=2,64,1 1=0
|
328 |
+
Reshape view_223 1 1 382 384 0=8400 1=16 2=4
|
329 |
+
Permute transpose_229 1 1 384 385 0=2
|
330 |
+
Softmax softmax_215 1 1 385 386 0=0 1=1
|
331 |
+
Convolution conv_105 1 1 386 387 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=0 6=16
|
332 |
+
Reshape view_224 1 1 387 388 0=8400 1=4
|
333 |
+
MemoryData pnnx_fold_anchor_points.1 0 1 389 0=8400 1=2
|
334 |
+
MemoryData pnnx_fold_anchor_points.1_1 0 1 390 0=8400 1=2
|
335 |
+
Slice chunk_0 1 2 388 391 392 -23300=2,-233,-233 1=0
|
336 |
+
BinaryOp sub_20 2 1 389 391 393 0=1
|
337 |
+
Split splitncnn_37 1 2 393 394 395
|
338 |
+
BinaryOp add_21 2 1 390 392 396 0=0
|
339 |
+
Split splitncnn_38 1 2 396 397 398
|
340 |
+
BinaryOp add_22 2 1 394 397 399 0=0
|
341 |
+
BinaryOp div_23 1 1 399 400 0=3 1=1 2=2.000000e+00
|
342 |
+
BinaryOp sub_24 2 1 398 395 401 0=1
|
343 |
+
Concat cat_26 2 1 400 401 402 0=0
|
344 |
+
Reshape reshape_217 1 1 332 403 0=8400 1=1
|
345 |
+
BinaryOp mul_25 2 1 402 403 404 0=2
|
346 |
+
Sigmoid sigmoid_213 1 1 383 405
|
347 |
+
Concat cat_27 2 1 404 405 out0 0=0
|
train/weights/best_ncnn_model/model_ncnn.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import ncnn
|
3 |
+
import torch
|
4 |
+
|
5 |
+
def test_inference():
|
6 |
+
torch.manual_seed(0)
|
7 |
+
in0 = torch.rand(1, 3, 640, 640, dtype=torch.float)
|
8 |
+
out = []
|
9 |
+
|
10 |
+
with ncnn.Net() as net:
|
11 |
+
net.load_param("training_logs\train\weights\best_ncnn_model\model.ncnn.param")
|
12 |
+
net.load_model("training_logs\train\weights\best_ncnn_model\model.ncnn.bin")
|
13 |
+
|
14 |
+
with net.create_extractor() as ex:
|
15 |
+
ex.input("in0", ncnn.Mat(in0.squeeze(0).numpy()).clone())
|
16 |
+
|
17 |
+
_, out0 = ex.extract("out0")
|
18 |
+
out.append(torch.from_numpy(np.array(out0)).unsqueeze(0))
|
19 |
+
|
20 |
+
if len(out) == 1:
|
21 |
+
return out[0]
|
22 |
+
else:
|
23 |
+
return tuple(out)
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
print(test_inference())
|
train/weights/last.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5867096f5ade70db363f7086f74aa3cf1e0a2869fea44092d13e3daf15f309ee
|
3 |
+
size 40517285
|
yolo11m_urchin_trained.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e263837f7c43675e1ec2bbfda6319c8a591ba06da96b011f65f615ac76aff9b
|
3 |
+
size 40628379
|
yolo11m_urchin_weights.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:268006738237d71f97d585ae94a229854cce5e3f1429b130dcca987367d2879b
|
3 |
+
size 80629527
|