akridge commited on
Commit
5b75f22
1 Parent(s): c8f5cc3

Upload 34 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ train/weights/best.torchscript filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "YOLO11n",
3
+ "framework": "PyTorch",
4
+ "repo_name": "yolo11-sea-urchin-detector",
5
+ "repo_url": "https://huggingface.co/akridge/yolo11-sea-urchin-detector",
6
+ "countDownloads": "path:'yolo11n_urchin_trained.pt'"
7
+ }
results.jpg ADDED
train/F1_curve.png ADDED
train/PR_curve.png ADDED
train/P_curve.png ADDED
train/R_curve.png ADDED
train/args.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ task: detect
2
+ mode: train
3
+ model: yolo11n.pt
4
+ data: O:\OTHER\AI_DATASETS\yolo\datasets\urchin_datasetv2\split_dataset\data.yaml
5
+ epochs: 100
6
+ time: null
7
+ patience: 100
8
+ batch: 32
9
+ imgsz: 640
10
+ save: true
11
+ save_period: 10
12
+ cache: false
13
+ device: cuda
14
+ workers: 8
15
+ project: null
16
+ name: train
17
+ exist_ok: false
18
+ pretrained: true
19
+ optimizer: auto
20
+ verbose: true
21
+ seed: 0
22
+ deterministic: true
23
+ single_cls: false
24
+ rect: false
25
+ cos_lr: false
26
+ close_mosaic: 10
27
+ resume: false
28
+ amp: true
29
+ fraction: 1.0
30
+ profile: false
31
+ freeze: null
32
+ multi_scale: false
33
+ overlap_mask: true
34
+ mask_ratio: 4
35
+ dropout: 0.0
36
+ val: true
37
+ split: val
38
+ save_json: false
39
+ save_hybrid: false
40
+ conf: null
41
+ iou: 0.7
42
+ max_det: 300
43
+ half: false
44
+ dnn: false
45
+ plots: true
46
+ source: null
47
+ vid_stride: 1
48
+ stream_buffer: false
49
+ visualize: false
50
+ augment: false
51
+ agnostic_nms: false
52
+ classes: null
53
+ retina_masks: false
54
+ embed: null
55
+ show: false
56
+ save_frames: false
57
+ save_txt: false
58
+ save_conf: false
59
+ save_crop: false
60
+ show_labels: true
61
+ show_conf: true
62
+ show_boxes: true
63
+ line_width: null
64
+ format: torchscript
65
+ keras: false
66
+ optimize: false
67
+ int8: false
68
+ dynamic: false
69
+ simplify: true
70
+ opset: null
71
+ workspace: 4
72
+ nms: false
73
+ lr0: 0.001
74
+ lrf: 0.01
75
+ momentum: 0.937
76
+ weight_decay: 0.0005
77
+ warmup_epochs: 3.0
78
+ warmup_momentum: 0.8
79
+ warmup_bias_lr: 0.1
80
+ box: 7.5
81
+ cls: 0.5
82
+ dfl: 1.5
83
+ pose: 12.0
84
+ kobj: 1.0
85
+ label_smoothing: 0.0
86
+ nbs: 64
87
+ hsv_h: 0.015
88
+ hsv_s: 0.7
89
+ hsv_v: 0.4
90
+ degrees: 0.0
91
+ translate: 0.1
92
+ scale: 0.5
93
+ shear: 0.0
94
+ perspective: 0.0
95
+ flipud: 0.0
96
+ fliplr: 0.5
97
+ bgr: 0.0
98
+ mosaic: 1.0
99
+ mixup: 0.0
100
+ copy_paste: 0.0
101
+ copy_paste_mode: flip
102
+ auto_augment: randaugment
103
+ erasing: 0.4
104
+ crop_fraction: 1.0
105
+ cfg: null
106
+ tracker: botsort.yaml
107
+ save_dir: runs\detect\train
train/confusion_matrix.png ADDED
train/confusion_matrix_normalized.png ADDED
train/labels.jpg ADDED
train/labels_correlogram.jpg ADDED
train/results.csv ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,time,train/box_loss,train/cls_loss,train/dfl_loss,metrics/precision(B),metrics/recall(B),metrics/mAP50(B),metrics/mAP50-95(B),val/box_loss,val/cls_loss,val/dfl_loss,lr/pg0,lr/pg1,lr/pg2
2
+ 1,18.0087,1.57851,2.44934,1.84746,0.70307,0.21983,0.2652,0.11887,1.76134,3.03587,2.53316,0.000648649,0.000648649,0.000648649
3
+ 2,28.6769,1.48675,1.66619,1.67101,0.30734,0.16954,0.14891,0.06056,2.05878,3.04148,2.57005,0.00130229,0.00130229,0.00130229
4
+ 3,39.228,1.50342,1.46834,1.66571,0.26924,0.28879,0.20327,0.06807,2.23061,3.57287,3.15847,0.00194274,0.00194274,0.00194274
5
+ 4,48.756,1.49339,1.32043,1.66576,0.16677,0.21839,0.10518,0.03566,2.66079,4.33873,4.01088,0.0019406,0.0019406,0.0019406
6
+ 5,58.1978,1.49926,1.28882,1.6666,0.20696,0.22126,0.11733,0.03635,2.42514,3.42251,3.26463,0.0019208,0.0019208,0.0019208
7
+ 6,67.6335,1.43649,1.15885,1.60765,0.71515,0.54469,0.63097,0.33288,1.60097,2.02621,1.96242,0.001901,0.001901,0.001901
8
+ 7,77.0978,1.43328,1.11472,1.60348,0.77631,0.60057,0.69472,0.34312,1.75334,1.45547,2.13223,0.0018812,0.0018812,0.0018812
9
+ 8,86.3853,1.40936,1.10194,1.59489,0.18585,0.33764,0.12044,0.04481,2.40069,2.88224,2.97724,0.0018614,0.0018614,0.0018614
10
+ 9,95.6019,1.45399,1.10301,1.6152,0.70654,0.50862,0.57465,0.316,1.68399,1.59752,2.01173,0.0018416,0.0018416,0.0018416
11
+ 10,104.879,1.39388,1.04189,1.56575,0.69311,0.57902,0.63905,0.32772,1.69113,1.52786,1.98959,0.0018218,0.0018218,0.0018218
12
+ 11,113.982,1.38258,1.01319,1.54591,0.74446,0.60632,0.6921,0.37495,1.61396,1.30138,2.03584,0.001802,0.001802,0.001802
13
+ 12,123.173,1.36148,0.99117,1.55218,0.66643,0.68103,0.6792,0.35939,1.47269,1.13058,1.80146,0.0017822,0.0017822,0.0017822
14
+ 13,132.318,1.33444,0.96738,1.51321,0.74328,0.69828,0.74623,0.42392,1.4873,1.00355,1.78067,0.0017624,0.0017624,0.0017624
15
+ 14,141.454,1.3264,0.95115,1.51741,0.77231,0.60776,0.70361,0.38848,1.58937,1.20216,1.89459,0.0017426,0.0017426,0.0017426
16
+ 15,150.572,1.33367,0.92506,1.51952,0.74702,0.70429,0.75876,0.43018,1.44981,1.36427,1.77019,0.0017228,0.0017228,0.0017228
17
+ 16,159.711,1.33458,0.9361,1.52009,0.79633,0.74152,0.79632,0.44726,1.46422,0.97671,1.79903,0.001703,0.001703,0.001703
18
+ 17,169.009,1.33564,0.90869,1.51485,0.81631,0.75862,0.82679,0.4696,1.43843,1.05103,1.72393,0.0016832,0.0016832,0.0016832
19
+ 18,178.246,1.28412,0.89108,1.49408,0.82793,0.71264,0.8006,0.4442,1.43701,0.95523,1.7293,0.0016634,0.0016634,0.0016634
20
+ 19,187.464,1.28701,0.86921,1.4931,0.83612,0.73707,0.82354,0.46959,1.41304,0.96802,1.68182,0.0016436,0.0016436,0.0016436
21
+ 20,196.87,1.30954,0.8977,1.50703,0.83514,0.64224,0.73875,0.43168,1.44469,1.71238,1.72024,0.0016238,0.0016238,0.0016238
22
+ 21,206.066,1.28829,0.85136,1.48794,0.80174,0.75531,0.8216,0.47292,1.45957,0.88306,1.74459,0.001604,0.001604,0.001604
23
+ 22,215.263,1.26816,0.82855,1.47801,0.87358,0.73132,0.83031,0.48843,1.40613,0.8322,1.66489,0.0015842,0.0015842,0.0015842
24
+ 23,224.393,1.26563,0.83278,1.46431,0.83835,0.73851,0.82292,0.46319,1.4283,0.93245,1.71639,0.0015644,0.0015644,0.0015644
25
+ 24,233.49,1.23352,0.83621,1.47764,0.82671,0.74714,0.82897,0.46656,1.45663,0.88323,1.75119,0.0015446,0.0015446,0.0015446
26
+ 25,242.575,1.27675,0.84155,1.47655,0.85771,0.72751,0.80845,0.48153,1.34852,0.89774,1.62704,0.0015248,0.0015248,0.0015248
27
+ 26,251.861,1.26962,0.85174,1.46887,0.81484,0.7227,0.80219,0.44964,1.49611,1.05222,1.72045,0.001505,0.001505,0.001505
28
+ 27,261.046,1.26039,0.83363,1.45365,0.79862,0.70115,0.7683,0.43101,1.44362,1.0756,1.69047,0.0014852,0.0014852,0.0014852
29
+ 28,270.247,1.23176,0.812,1.45681,0.83439,0.75283,0.83321,0.48824,1.39671,0.87402,1.64789,0.0014654,0.0014654,0.0014654
30
+ 29,279.486,1.24705,0.81315,1.44412,0.85197,0.78161,0.85692,0.49504,1.39353,0.82323,1.6556,0.0014456,0.0014456,0.0014456
31
+ 30,288.78,1.21214,0.76577,1.43305,0.86983,0.68103,0.80354,0.47767,1.35875,0.89614,1.63701,0.0014258,0.0014258,0.0014258
32
+ 31,297.938,1.21478,0.78961,1.45384,0.83424,0.75203,0.83028,0.48701,1.3712,0.82356,1.64657,0.001406,0.001406,0.001406
33
+ 32,307.095,1.19187,0.75587,1.4258,0.85148,0.78255,0.86251,0.50338,1.41945,0.81681,1.68101,0.0013862,0.0013862,0.0013862
34
+ 33,316.262,1.20461,0.76063,1.43,0.87687,0.7773,0.86761,0.5073,1.34822,0.80951,1.63719,0.0013664,0.0013664,0.0013664
35
+ 34,325.313,1.22136,0.77185,1.43615,0.87866,0.78033,0.87201,0.53042,1.33272,0.73397,1.59803,0.0013466,0.0013466,0.0013466
36
+ 35,334.312,1.22415,0.76294,1.42664,0.88187,0.76151,0.86111,0.5163,1.30572,0.7679,1.58532,0.0013268,0.0013268,0.0013268
37
+ 36,343.223,1.20012,0.76441,1.44981,0.85396,0.78132,0.85211,0.49677,1.37212,0.82542,1.62502,0.001307,0.001307,0.001307
38
+ 37,352.111,1.18504,0.74126,1.41723,0.89592,0.76679,0.87334,0.50315,1.37316,0.78947,1.64466,0.0012872,0.0012872,0.0012872
39
+ 38,361.055,1.18145,0.73862,1.43024,0.86038,0.79741,0.87002,0.52315,1.33745,0.76658,1.59865,0.0012674,0.0012674,0.0012674
40
+ 39,369.979,1.20491,0.75743,1.43277,0.89337,0.79452,0.87423,0.52227,1.32691,0.75799,1.59203,0.0012476,0.0012476,0.0012476
41
+ 40,379.011,1.19919,0.76606,1.42629,0.85822,0.8046,0.86971,0.51239,1.33807,0.83766,1.61195,0.0012278,0.0012278,0.0012278
42
+ 41,387.987,1.1714,0.73459,1.39192,0.82284,0.80078,0.86155,0.51024,1.3221,0.80162,1.6101,0.001208,0.001208,0.001208
43
+ 42,396.993,1.15496,0.72,1.39963,0.88585,0.76149,0.8665,0.51621,1.31819,0.79478,1.58923,0.0011882,0.0011882,0.0011882
44
+ 43,405.879,1.15589,0.72051,1.40744,0.87341,0.80293,0.88627,0.5276,1.30008,0.74587,1.59072,0.0011684,0.0011684,0.0011684
45
+ 44,414.782,1.15475,0.71808,1.39647,0.84534,0.79315,0.87182,0.51865,1.32603,0.77432,1.59142,0.0011486,0.0011486,0.0011486
46
+ 45,423.69,1.11749,0.70529,1.37211,0.85518,0.80891,0.87089,0.52712,1.31412,0.71649,1.60242,0.0011288,0.0011288,0.0011288
47
+ 46,432.731,1.15758,0.70853,1.40534,0.85844,0.78416,0.85957,0.51918,1.29942,0.73175,1.58805,0.001109,0.001109,0.001109
48
+ 47,441.683,1.15205,0.70456,1.39473,0.83032,0.79454,0.86095,0.50576,1.3436,0.76751,1.61208,0.0010892,0.0010892,0.0010892
49
+ 48,450.625,1.15489,0.71196,1.3947,0.888,0.79739,0.88118,0.52879,1.31004,0.70951,1.58558,0.0010694,0.0010694,0.0010694
50
+ 49,459.532,1.16004,0.70066,1.39395,0.90366,0.80603,0.88656,0.52927,1.32499,0.73298,1.61454,0.0010496,0.0010496,0.0010496
51
+ 50,468.674,1.12451,0.6972,1.37378,0.88496,0.80891,0.88835,0.53037,1.33841,0.72024,1.58109,0.0010298,0.0010298,0.0010298
52
+ 51,477.694,1.13092,0.69826,1.37114,0.87964,0.81466,0.89166,0.53816,1.29777,0.70978,1.56241,0.00101,0.00101,0.00101
53
+ 52,486.714,1.13202,0.68891,1.38571,0.91241,0.76334,0.88793,0.53329,1.30787,0.68498,1.58078,0.0009902,0.0009902,0.0009902
54
+ 53,495.585,1.14829,0.69267,1.37079,0.8676,0.81034,0.88322,0.53904,1.2779,0.70309,1.52988,0.0009704,0.0009704,0.0009704
55
+ 54,504.501,1.11265,0.67412,1.36788,0.88769,0.80631,0.89518,0.53961,1.28471,0.68697,1.54183,0.0009506,0.0009506,0.0009506
56
+ 55,513.41,1.10971,0.6644,1.36832,0.86442,0.81531,0.88368,0.53115,1.30123,0.70865,1.5646,0.0009308,0.0009308,0.0009308
57
+ 56,522.417,1.12478,0.69525,1.36596,0.87107,0.80316,0.88103,0.53139,1.32475,0.68443,1.57693,0.000911,0.000911,0.000911
58
+ 57,531.321,1.12128,0.6858,1.36895,0.88645,0.80755,0.89561,0.54348,1.29064,0.70149,1.56279,0.0008912,0.0008912,0.0008912
59
+ 58,540.281,1.11225,0.66493,1.35338,0.88424,0.82328,0.89206,0.54512,1.29405,0.67281,1.55546,0.0008714,0.0008714,0.0008714
60
+ 59,549.243,1.12317,0.64961,1.3707,0.90466,0.81034,0.89969,0.54178,1.296,0.68092,1.55595,0.0008516,0.0008516,0.0008516
61
+ 60,558.057,1.10956,0.66645,1.34448,0.87257,0.82184,0.90156,0.5492,1.30798,0.68484,1.57208,0.0008318,0.0008318,0.0008318
62
+ 61,566.868,1.0801,0.64496,1.35292,0.86603,0.81733,0.88608,0.54388,1.28578,0.67129,1.56101,0.000812,0.000812,0.000812
63
+ 62,575.756,1.09556,0.65399,1.34063,0.87683,0.81828,0.89486,0.5422,1.31018,0.68329,1.57863,0.0007922,0.0007922,0.0007922
64
+ 63,584.687,1.08098,0.64331,1.33514,0.88919,0.81857,0.89541,0.53876,1.32305,0.69905,1.57895,0.0007724,0.0007724,0.0007724
65
+ 64,593.636,1.0744,0.62799,1.33583,0.86197,0.82548,0.88713,0.53877,1.29449,0.69357,1.58622,0.0007526,0.0007526,0.0007526
66
+ 65,602.532,1.08017,0.63794,1.3324,0.87687,0.82328,0.89512,0.54273,1.26991,0.67261,1.54143,0.0007328,0.0007328,0.0007328
67
+ 66,611.532,1.08568,0.63682,1.32461,0.90453,0.79741,0.88771,0.543,1.27872,0.66633,1.54653,0.000713,0.000713,0.000713
68
+ 67,620.556,1.08803,0.64008,1.34567,0.89697,0.80603,0.89338,0.54439,1.3042,0.66803,1.57031,0.0006932,0.0006932,0.0006932
69
+ 68,629.61,1.07021,0.63241,1.34447,0.86759,0.81903,0.89622,0.55259,1.26938,0.65999,1.5548,0.0006734,0.0006734,0.0006734
70
+ 69,638.625,1.06471,0.62188,1.32169,0.87621,0.80338,0.87964,0.54265,1.27434,0.67419,1.5633,0.0006536,0.0006536,0.0006536
71
+ 70,647.786,1.06133,0.62654,1.32127,0.87543,0.80891,0.89045,0.55088,1.27151,0.66992,1.5413,0.0006338,0.0006338,0.0006338
72
+ 71,656.764,1.04262,0.61259,1.32792,0.85963,0.84914,0.89293,0.55641,1.2757,0.65316,1.55236,0.000614,0.000614,0.000614
73
+ 72,665.717,1.05059,0.61391,1.32244,0.88853,0.82459,0.89797,0.55794,1.26634,0.65468,1.53784,0.0005942,0.0005942,0.0005942
74
+ 73,674.651,1.04741,0.61274,1.31436,0.87464,0.81202,0.88532,0.53623,1.29201,0.68375,1.54849,0.0005744,0.0005744,0.0005744
75
+ 74,683.67,1.0381,0.60949,1.30447,0.85858,0.82615,0.89306,0.54723,1.27596,0.6583,1.5454,0.0005546,0.0005546,0.0005546
76
+ 75,692.673,1.05436,0.61843,1.3065,0.87759,0.82902,0.89526,0.55004,1.26772,0.64665,1.53873,0.0005348,0.0005348,0.0005348
77
+ 76,701.605,1.02521,0.59577,1.30349,0.90262,0.81233,0.90157,0.5582,1.25012,0.63955,1.52344,0.000515,0.000515,0.000515
78
+ 77,710.635,1.0328,0.60114,1.31823,0.88014,0.8204,0.89513,0.54846,1.2727,0.642,1.54586,0.0004952,0.0004952,0.0004952
79
+ 78,719.744,1.02943,0.60148,1.30689,0.85873,0.84716,0.9006,0.55379,1.2554,0.63666,1.53813,0.0004754,0.0004754,0.0004754
80
+ 79,728.689,1.03962,0.60224,1.317,0.89872,0.79885,0.89555,0.55257,1.28129,0.64105,1.55011,0.0004556,0.0004556,0.0004556
81
+ 80,737.7,1.03836,0.60384,1.31311,0.87591,0.83333,0.90251,0.55079,1.29846,0.64041,1.57134,0.0004358,0.0004358,0.0004358
82
+ 81,746.66,1.02534,0.59162,1.29921,0.88864,0.82549,0.89548,0.55443,1.28166,0.64788,1.54675,0.000416,0.000416,0.000416
83
+ 82,755.744,1.01967,0.58088,1.29571,0.88926,0.82471,0.90436,0.55973,1.29134,0.62065,1.5604,0.0003962,0.0003962,0.0003962
84
+ 83,764.798,1.02391,0.59178,1.30464,0.87742,0.83304,0.90432,0.55691,1.27565,0.62943,1.5392,0.0003764,0.0003764,0.0003764
85
+ 84,773.836,1.02328,0.5812,1.30753,0.89393,0.82759,0.90098,0.55469,1.26313,0.62907,1.5305,0.0003566,0.0003566,0.0003566
86
+ 85,782.869,1.01358,0.58473,1.29279,0.86161,0.8498,0.90439,0.56136,1.26475,0.64935,1.53702,0.0003368,0.0003368,0.0003368
87
+ 86,791.965,0.98393,0.57121,1.28357,0.89583,0.81034,0.89578,0.55461,1.27453,0.63692,1.54605,0.000317,0.000317,0.000317
88
+ 87,800.944,0.99825,0.57013,1.28459,0.88379,0.83048,0.90143,0.55464,1.27258,0.63372,1.55326,0.0002972,0.0002972,0.0002972
89
+ 88,810,0.98807,0.56522,1.27838,0.83558,0.86163,0.89748,0.55547,1.25452,0.63512,1.52652,0.0002774,0.0002774,0.0002774
90
+ 89,818.939,0.97548,0.56016,1.26533,0.87616,0.84367,0.89697,0.56146,1.25059,0.62305,1.52675,0.0002576,0.0002576,0.0002576
91
+ 90,827.901,0.9744,0.55607,1.27263,0.88596,0.82603,0.89564,0.5547,1.26498,0.64746,1.53333,0.0002378,0.0002378,0.0002378
92
+ 91,884.942,0.92671,0.52201,1.29192,0.87547,0.81178,0.88805,0.55023,1.27226,0.63147,1.55035,0.000218,0.000218,0.000218
93
+ 92,893.926,0.90089,0.48884,1.26584,0.87392,0.82328,0.89115,0.55426,1.26719,0.63781,1.54612,0.0001982,0.0001982,0.0001982
94
+ 93,902.74,0.89226,0.47465,1.2594,0.8651,0.8319,0.89398,0.55581,1.25007,0.63482,1.53608,0.0001784,0.0001784,0.0001784
95
+ 94,911.678,0.88249,0.4689,1.24724,0.8614,0.83043,0.88979,0.5576,1.25283,0.62962,1.53073,0.0001586,0.0001586,0.0001586
96
+ 95,920.722,0.86839,0.4597,1.23495,0.87535,0.82471,0.88724,0.55608,1.2495,0.63337,1.52602,0.0001388,0.0001388,0.0001388
97
+ 96,929.607,0.858,0.45001,1.23184,0.87742,0.84052,0.89678,0.56137,1.23627,0.62023,1.52651,0.000119,0.000119,0.000119
98
+ 97,938.459,0.87823,0.45806,1.24071,0.86908,0.84483,0.89604,0.56029,1.24568,0.61641,1.52889,9.92e-05,9.92e-05,9.92e-05
99
+ 98,947.329,0.86539,0.45854,1.24316,0.89238,0.83394,0.90198,0.5668,1.24677,0.60989,1.52438,7.94e-05,7.94e-05,7.94e-05
100
+ 99,956.143,0.85282,0.44364,1.23117,0.88983,0.83554,0.90129,0.56341,1.24388,0.6103,1.5297,5.96e-05,5.96e-05,5.96e-05
101
+ 100,965.133,0.86017,0.45087,1.249,0.88482,0.83882,0.90043,0.56651,1.24629,0.60814,1.52904,3.98e-05,3.98e-05,3.98e-05
train/results.png ADDED
train/train_batch0.jpg ADDED
train/train_batch1.jpg ADDED
train/train_batch2.jpg ADDED
train/train_batch3330.jpg ADDED
train/train_batch3331.jpg ADDED
train/train_batch3332.jpg ADDED
train/val_batch0_labels.jpg ADDED
train/val_batch0_pred.jpg ADDED
train/val_batch1_labels.jpg ADDED
train/val_batch1_pred.jpg ADDED
train/val_batch2_labels.jpg ADDED
train/val_batch2_pred.jpg ADDED
train/weights/best.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ddb6b5be5ed48c1a3ee3f9b2b1c99300642d9cbc53ea660580f7a2a98919e05
3
+ size 10583465
train/weights/best.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f20bd38ee3fb42fa6ec5578904d7d58c417df842d75bb24b0e6c0f17c16c0627
3
+ size 5477331
train/weights/best.torchscript ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd4a201acfd7895bcb5eb5e83bf1f4a9c39ec603cde5f48a670bea9fd84e5f9f
3
+ size 10930926
train/weights/best_ncnn_model/metadata.yaml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ description: Ultralytics YOLO11n model trained on O:\OTHER\AI_DATASETS\yolo\datasets\urchin_datasetv2\split_dataset\data.yaml
2
+ author: Ultralytics
3
+ date: '2024-10-21T07:59:55.948712'
4
+ version: 8.3.17
5
+ license: AGPL-3.0 License (https://ultralytics.com/license)
6
+ docs: https://docs.ultralytics.com
7
+ stride: 32
8
+ task: detect
9
+ batch: 1
10
+ imgsz:
11
+ - 640
12
+ - 640
13
+ names:
14
+ 0: urchin
train/weights/best_ncnn_model/model.ncnn.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f30d497a9b0070f1bbeade23abbad0b2f90eed2f298affcfc36782e5ebd38cfa
3
+ size 10497740
train/weights/best_ncnn_model/model.ncnn.param ADDED
@@ -0,0 +1,277 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 7767517
2
+ 275 327
3
+ Input in0 0 1 in0
4
+ Convolution conv_0 1 1 in0 1 0=16 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=432
5
+ Swish silu_84 1 1 1 2
6
+ Convolution conv_1 1 1 2 3 0=32 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=4608
7
+ Swish silu_85 1 1 3 4
8
+ Convolution conv_2 1 1 4 5 0=32 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=1024
9
+ Swish silu_86 1 1 5 6
10
+ Slice split_0 1 2 6 7 8 -23300=2,16,16 1=0
11
+ Split splitncnn_0 1 3 8 9 10 11
12
+ Convolution conv_3 1 1 11 12 0=8 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=1152
13
+ Swish silu_87 1 1 12 13
14
+ Convolution conv_4 1 1 13 14 0=16 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=1152
15
+ Swish silu_88 1 1 14 15
16
+ BinaryOp add_0 2 1 10 15 16 0=0
17
+ Concat cat_0 3 1 7 9 16 17 0=0
18
+ Convolution conv_5 1 1 17 18 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=3072
19
+ Swish silu_89 1 1 18 19
20
+ Convolution conv_6 1 1 19 20 0=64 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=36864
21
+ Swish silu_90 1 1 20 21
22
+ Convolution conv_7 1 1 21 22 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
23
+ Swish silu_91 1 1 22 23
24
+ Slice split_1 1 2 23 24 25 -23300=2,32,32 1=0
25
+ Split splitncnn_1 1 3 25 26 27 28
26
+ Convolution conv_8 1 1 28 29 0=16 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608
27
+ Swish silu_92 1 1 29 30
28
+ Convolution conv_9 1 1 30 31 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608
29
+ Swish silu_93 1 1 31 32
30
+ BinaryOp add_1 2 1 27 32 33 0=0
31
+ Concat cat_1 3 1 24 26 33 34 0=0
32
+ Convolution conv_10 1 1 34 35 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=12288
33
+ Swish silu_94 1 1 35 36
34
+ Split splitncnn_2 1 2 36 37 38
35
+ Convolution conv_11 1 1 38 39 0=128 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=147456
36
+ Swish silu_95 1 1 39 40
37
+ Convolution conv_12 1 1 40 41 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
38
+ Swish silu_96 1 1 41 42
39
+ Slice split_2 1 2 42 43 44 -23300=2,64,64 1=0
40
+ Split splitncnn_3 1 3 44 45 46 47
41
+ Convolution conv_13 1 1 47 48 0=32 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=2048
42
+ Swish silu_97 1 1 48 49
43
+ Split splitncnn_4 1 2 49 50 51
44
+ Convolution conv_14 1 1 51 52 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
45
+ Swish silu_98 1 1 52 53
46
+ Convolution conv_15 1 1 53 54 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
47
+ Swish silu_99 1 1 54 55
48
+ BinaryOp add_2 2 1 50 55 56 0=0
49
+ Split splitncnn_5 1 2 56 57 58
50
+ Convolution conv_16 1 1 58 59 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
51
+ Swish silu_100 1 1 59 60
52
+ Convolution conv_17 1 1 60 61 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=9216
53
+ Swish silu_101 1 1 61 62
54
+ BinaryOp add_3 2 1 57 62 63 0=0
55
+ Convolution conv_18 1 1 46 64 0=32 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=2048
56
+ Swish silu_102 1 1 64 65
57
+ Concat cat_2 2 1 63 65 66 0=0
58
+ Convolution conv_19 1 1 66 67 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
59
+ Swish silu_103 1 1 67 68
60
+ Concat cat_3 3 1 43 45 68 69 0=0
61
+ Convolution conv_20 1 1 69 70 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=24576
62
+ Swish silu_104 1 1 70 71
63
+ Split splitncnn_6 1 2 71 72 73
64
+ Convolution conv_21 1 1 73 74 0=256 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=294912
65
+ Swish silu_105 1 1 74 75
66
+ Convolution conv_22 1 1 75 76 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
67
+ Swish silu_106 1 1 76 77
68
+ Slice split_3 1 2 77 78 79 -23300=2,128,128 1=0
69
+ Split splitncnn_7 1 3 79 80 81 82
70
+ Convolution conv_23 1 1 82 83 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
71
+ Swish silu_107 1 1 83 84
72
+ Split splitncnn_8 1 2 84 85 86
73
+ Convolution conv_24 1 1 86 87 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
74
+ Swish silu_108 1 1 87 88
75
+ Convolution conv_25 1 1 88 89 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
76
+ Swish silu_109 1 1 89 90
77
+ BinaryOp add_4 2 1 85 90 91 0=0
78
+ Split splitncnn_9 1 2 91 92 93
79
+ Convolution conv_26 1 1 93 94 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
80
+ Swish silu_110 1 1 94 95
81
+ Convolution conv_27 1 1 95 96 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
82
+ Swish silu_111 1 1 96 97
83
+ BinaryOp add_5 2 1 92 97 98 0=0
84
+ Convolution conv_28 1 1 81 99 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
85
+ Swish silu_112 1 1 99 100
86
+ Concat cat_4 2 1 98 100 101 0=0
87
+ Convolution conv_29 1 1 101 102 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
88
+ Swish silu_113 1 1 102 103
89
+ Concat cat_5 3 1 78 80 103 104 0=0
90
+ Convolution conv_30 1 1 104 105 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=98304
91
+ Swish silu_114 1 1 105 106
92
+ Convolution conv_31 1 1 106 107 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
93
+ Swish silu_115 1 1 107 108
94
+ Split splitncnn_10 1 2 108 109 110
95
+ Pooling maxpool2d_81 1 1 110 111 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
96
+ Split splitncnn_11 1 2 111 112 113
97
+ Pooling maxpool2d_82 1 1 113 114 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
98
+ Split splitncnn_12 1 2 114 115 116
99
+ Pooling maxpool2d_83 1 1 116 117 0=0 1=5 11=5 12=1 13=2 2=1 3=2 5=1
100
+ Concat cat_6 4 1 109 112 115 117 118 0=0
101
+ Convolution conv_32 1 1 118 119 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=131072
102
+ Swish silu_116 1 1 119 120
103
+ Convolution conv_33 1 1 120 121 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
104
+ Swish silu_117 1 1 121 122
105
+ Slice split_4 1 2 122 123 124 -23300=2,128,128 1=0
106
+ Split splitncnn_13 1 2 124 125 126
107
+ Convolution conv_34 1 1 126 127 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
108
+ Reshape view_168 1 1 127 128 0=400 1=128 2=2
109
+ Slice split_5 1 3 128 129 130 131 -23300=3,32,32,64 1=1
110
+ Split splitncnn_14 1 2 131 132 133
111
+ Permute transpose_177 1 1 129 134 0=1
112
+ MatMul matmul_175 2 1 134 130 135
113
+ BinaryOp mul_6 1 1 135 136 0=2 1=1 2=1.767767e-01
114
+ Softmax softmax_164 1 1 136 137 0=2 1=1
115
+ MatMul matmultransb_0 2 1 133 137 138 0=1
116
+ Reshape view_169 1 1 138 139 0=20 1=20 2=128
117
+ Reshape reshape_166 1 1 132 140 0=20 1=20 2=128
118
+ ConvolutionDepthWise convdw_180 1 1 140 141 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=1152 7=128
119
+ BinaryOp add_7 2 1 139 141 142 0=0
120
+ Convolution conv_35 1 1 142 143 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
121
+ BinaryOp add_8 2 1 125 143 144 0=0
122
+ Split splitncnn_15 1 2 144 145 146
123
+ Convolution conv_36 1 1 146 147 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
124
+ Swish silu_118 1 1 147 148
125
+ Convolution conv_37 1 1 148 149 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=32768
126
+ BinaryOp add_9 2 1 145 149 150 0=0
127
+ Concat cat_7 2 1 123 150 151 0=0
128
+ Convolution conv_38 1 1 151 152 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=65536
129
+ Swish silu_119 1 1 152 153
130
+ Split splitncnn_16 1 2 153 154 155
131
+ Interp upsample_161 1 1 155 156 0=1 1=2.000000e+00 2=2.000000e+00 6=0
132
+ Concat cat_8 2 1 156 72 157 0=0
133
+ Convolution conv_39 1 1 157 158 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=49152
134
+ Swish silu_120 1 1 158 159
135
+ Slice split_6 1 2 159 160 161 -23300=2,64,64 1=0
136
+ Split splitncnn_17 1 3 161 162 163 164
137
+ Convolution conv_40 1 1 164 165 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=18432
138
+ Swish silu_121 1 1 165 166
139
+ Convolution conv_41 1 1 166 167 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=18432
140
+ Swish silu_122 1 1 167 168
141
+ BinaryOp add_10 2 1 163 168 169 0=0
142
+ Concat cat_9 3 1 160 162 169 170 0=0
143
+ Convolution conv_42 1 1 170 171 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=24576
144
+ Swish silu_123 1 1 171 172
145
+ Split splitncnn_18 1 2 172 173 174
146
+ Interp upsample_162 1 1 174 175 0=1 1=2.000000e+00 2=2.000000e+00 6=0
147
+ Concat cat_10 2 1 175 37 176 0=0
148
+ Convolution conv_43 1 1 176 177 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
149
+ Swish silu_124 1 1 177 178
150
+ Slice split_7 1 2 178 179 180 -23300=2,32,32 1=0
151
+ Split splitncnn_19 1 3 180 181 182 183
152
+ Convolution conv_44 1 1 183 184 0=16 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608
153
+ Swish silu_125 1 1 184 185
154
+ Convolution conv_45 1 1 185 186 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=4608
155
+ Swish silu_126 1 1 186 187
156
+ BinaryOp add_11 2 1 182 187 188 0=0
157
+ Concat cat_11 3 1 179 181 188 189 0=0
158
+ Convolution conv_46 1 1 189 190 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=6144
159
+ Swish silu_127 1 1 190 191
160
+ Split splitncnn_20 1 3 191 192 193 194
161
+ Convolution conv_47 1 1 193 195 0=64 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=36864
162
+ Swish silu_128 1 1 195 196
163
+ Concat cat_12 2 1 196 173 197 0=0
164
+ Convolution conv_48 1 1 197 198 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=24576
165
+ Swish silu_129 1 1 198 199
166
+ Slice split_8 1 2 199 200 201 -23300=2,64,64 1=0
167
+ Split splitncnn_21 1 3 201 202 203 204
168
+ Convolution conv_49 1 1 204 205 0=32 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=18432
169
+ Swish silu_130 1 1 205 206
170
+ Convolution conv_50 1 1 206 207 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=18432
171
+ Swish silu_131 1 1 207 208
172
+ BinaryOp add_12 2 1 203 208 209 0=0
173
+ Concat cat_13 3 1 200 202 209 210 0=0
174
+ Convolution conv_51 1 1 210 211 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=24576
175
+ Swish silu_132 1 1 211 212
176
+ Split splitncnn_22 1 3 212 213 214 215
177
+ Convolution conv_52 1 1 214 216 0=128 1=3 11=3 12=1 13=2 14=1 2=1 3=2 4=1 5=1 6=147456
178
+ Swish silu_133 1 1 216 217
179
+ Concat cat_14 2 1 217 154 218 0=0
180
+ Convolution conv_53 1 1 218 219 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=98304
181
+ Swish silu_134 1 1 219 220
182
+ Slice split_9 1 2 220 221 222 -23300=2,128,128 1=0
183
+ Split splitncnn_23 1 3 222 223 224 225
184
+ Convolution conv_54 1 1 225 226 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
185
+ Swish silu_135 1 1 226 227
186
+ Split splitncnn_24 1 2 227 228 229
187
+ Convolution conv_55 1 1 229 230 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
188
+ Swish silu_136 1 1 230 231
189
+ Convolution conv_56 1 1 231 232 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
190
+ Swish silu_137 1 1 232 233
191
+ BinaryOp add_13 2 1 228 233 234 0=0
192
+ Split splitncnn_25 1 2 234 235 236
193
+ Convolution conv_57 1 1 236 237 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
194
+ Swish silu_138 1 1 237 238
195
+ Convolution conv_58 1 1 238 239 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
196
+ Swish silu_139 1 1 239 240
197
+ BinaryOp add_14 2 1 235 240 241 0=0
198
+ Convolution conv_59 1 1 224 242 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
199
+ Swish silu_140 1 1 242 243
200
+ Concat cat_15 2 1 241 243 244 0=0
201
+ Convolution conv_60 1 1 244 245 0=128 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
202
+ Swish silu_141 1 1 245 246
203
+ Concat cat_16 3 1 221 223 246 247 0=0
204
+ Convolution conv_61 1 1 247 248 0=256 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=98304
205
+ Swish silu_142 1 1 248 249
206
+ Split splitncnn_26 1 2 249 250 251
207
+ MemoryData pnnx_188 0 1 252 0=8400
208
+ Convolution conv_62 1 1 192 253 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
209
+ Swish silu_143 1 1 253 254
210
+ Convolution conv_63 1 1 254 255 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
211
+ Swish silu_144 1 1 255 256
212
+ Convolution conv_64 1 1 256 257 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
213
+ ConvolutionDepthWise convdw_181 1 1 194 258 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=576 7=64
214
+ Swish silu_145 1 1 258 259
215
+ Convolution conv_65 1 1 259 260 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
216
+ Swish silu_146 1 1 260 261
217
+ ConvolutionDepthWise convdw_182 1 1 261 262 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=576 7=64
218
+ Swish silu_147 1 1 262 263
219
+ Convolution conv_66 1 1 263 264 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
220
+ Swish silu_148 1 1 264 265
221
+ Convolution conv_67 1 1 265 266 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=64
222
+ Concat cat_17 2 1 257 266 267 0=0
223
+ Convolution conv_68 1 1 213 268 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=73728
224
+ Swish silu_149 1 1 268 269
225
+ Convolution conv_69 1 1 269 270 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
226
+ Swish silu_150 1 1 270 271
227
+ Convolution conv_70 1 1 271 272 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
228
+ ConvolutionDepthWise convdw_183 1 1 215 273 0=128 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=1152 7=128
229
+ Swish silu_151 1 1 273 274
230
+ Convolution conv_71 1 1 274 275 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=8192
231
+ Swish silu_152 1 1 275 276
232
+ ConvolutionDepthWise convdw_184 1 1 276 277 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=576 7=64
233
+ Swish silu_153 1 1 277 278
234
+ Convolution conv_72 1 1 278 279 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
235
+ Swish silu_154 1 1 279 280
236
+ Convolution conv_73 1 1 280 281 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=64
237
+ Concat cat_18 2 1 272 281 282 0=0
238
+ Convolution conv_74 1 1 250 283 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=147456
239
+ Swish silu_155 1 1 283 284
240
+ Convolution conv_75 1 1 284 285 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=36864
241
+ Swish silu_156 1 1 285 286
242
+ Convolution conv_76 1 1 286 287 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
243
+ ConvolutionDepthWise convdw_185 1 1 251 288 0=256 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=2304 7=256
244
+ Swish silu_157 1 1 288 289
245
+ Convolution conv_77 1 1 289 290 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=16384
246
+ Swish silu_158 1 1 290 291
247
+ ConvolutionDepthWise convdw_186 1 1 291 292 0=64 1=3 11=3 12=1 13=1 14=1 2=1 3=1 4=1 5=1 6=576 7=64
248
+ Swish silu_159 1 1 292 293
249
+ Convolution conv_78 1 1 293 294 0=64 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=4096
250
+ Swish silu_160 1 1 294 295
251
+ Convolution conv_79 1 1 295 296 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=1 6=64
252
+ Concat cat_19 2 1 287 296 297 0=0
253
+ Reshape view_170 1 1 267 298 0=6400 1=65
254
+ Reshape view_171 1 1 282 299 0=1600 1=65
255
+ Reshape view_172 1 1 297 300 0=400 1=65
256
+ Concat cat_20 3 1 298 299 300 301 0=1
257
+ Slice split_10 1 2 301 302 303 -23300=2,64,1 1=0
258
+ Reshape view_173 1 1 302 304 0=8400 1=16 2=4
259
+ Permute transpose_179 1 1 304 305 0=2
260
+ Softmax softmax_165 1 1 305 306 0=0 1=1
261
+ Convolution conv_80 1 1 306 307 0=1 1=1 11=1 12=1 13=1 14=0 2=1 3=1 4=0 5=0 6=16
262
+ Reshape view_174 1 1 307 308 0=8400 1=4
263
+ MemoryData pnnx_fold_anchor_points.1 0 1 309 0=8400 1=2
264
+ MemoryData pnnx_fold_anchor_points.1_1 0 1 310 0=8400 1=2
265
+ Slice chunk_0 1 2 308 311 312 -23300=2,-233,-233 1=0
266
+ BinaryOp sub_15 2 1 309 311 313 0=1
267
+ Split splitncnn_27 1 2 313 314 315
268
+ BinaryOp add_16 2 1 310 312 316 0=0
269
+ Split splitncnn_28 1 2 316 317 318
270
+ BinaryOp add_17 2 1 314 317 319 0=0
271
+ BinaryOp div_18 1 1 319 320 0=3 1=1 2=2.000000e+00
272
+ BinaryOp sub_19 2 1 318 315 321 0=1
273
+ Concat cat_21 2 1 320 321 322 0=0
274
+ Reshape reshape_167 1 1 252 323 0=8400 1=1
275
+ BinaryOp mul_20 2 1 322 323 324 0=2
276
+ Sigmoid sigmoid_163 1 1 303 325
277
+ Concat cat_22 2 1 324 325 out0 0=0
train/weights/best_ncnn_model/model_ncnn.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import ncnn
3
+ import torch
4
+
5
+ def test_inference():
6
+ torch.manual_seed(0)
7
+ in0 = torch.rand(1, 3, 640, 640, dtype=torch.float)
8
+ out = []
9
+
10
+ with ncnn.Net() as net:
11
+ net.load_param("runs\detect\train\weights\best_ncnn_model\model.ncnn.param")
12
+ net.load_model("runs\detect\train\weights\best_ncnn_model\model.ncnn.bin")
13
+
14
+ with net.create_extractor() as ex:
15
+ ex.input("in0", ncnn.Mat(in0.squeeze(0).numpy()).clone())
16
+
17
+ _, out0 = ex.extract("out0")
18
+ out.append(torch.from_numpy(np.array(out0)).unsqueeze(0))
19
+
20
+ if len(out) == 1:
21
+ return out[0]
22
+ else:
23
+ return tuple(out)
24
+
25
+ if __name__ == "__main__":
26
+ print(test_inference())
train/weights/last.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18951498e21fd6bf1d01a3301c006f7564d827bce72265ec1fd414b980adc4a4
3
+ size 5477331
yolo11n_urchin_trained.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:437d501dfda4e90ea5989dae8f9b2cc7c61f68aa042753edc905e27d71b72c84
3
+ size 5540153