[WIP] Upload folder using huggingface_hub (multi-commit 4c76aea3e91bf44420a5c3fb05cc61fc0821d64f4abf66f812fa5d643fa0826d)

#1
.gitattributes CHANGED
@@ -80,3 +80,4 @@ Library/PackageCache/com.unity.xr.oculus@3.3.0/Runtime/x86/OVRPlugin.dll filter=
80
  Library/ScriptAssemblies/Unity.InputSystem.dll filter=lfs diff=lfs merge=lfs -text
81
  Library/ScriptAssemblies/Unity.VisualScripting.Core.Editor.dll filter=lfs diff=lfs merge=lfs -text
82
  Library/SourceAssetDB filter=lfs diff=lfs merge=lfs -text
 
 
80
  Library/ScriptAssemblies/Unity.InputSystem.dll filter=lfs diff=lfs merge=lfs -text
81
  Library/ScriptAssemblies/Unity.VisualScripting.Core.Editor.dll filter=lfs diff=lfs merge=lfs -text
82
  Library/SourceAssetDB filter=lfs diff=lfs merge=lfs -text
83
+ demo_virtual.mp4 filter=lfs diff=lfs merge=lfs -text
DisplayPos.m ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ function out = DisplayPos(obj)
2
+ pos = getJointAndFingerPos(obj);
3
+ out = ['End Effector Position - X: ' sprintf('%.2f', obj.EndEffectorPose(1)) ...
4
+ ', Y: ' sprintf('%.2f', obj.EndEffectorPose(2)) ...
5
+ ', Z: ' sprintf('%.2f', obj.EndEffectorPose(3)) ...
6
+ '. Twist Angle (in radians): ' sprintf('%.2f', pos(7)) ...
7
+ ', Finger 1 Strength: ' sprintf('%.2f', pos(8)) ...
8
+ ', Finger 2 Strength: ' sprintf('%.2f', pos(9)) ...
9
+ ', Finger 3 Strength: ' sprintf('%.2f', pos(10)) '.'];
10
+ end
EEGClassification.m ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ function step = EEGClassification(net, data, step)
2
+
3
+ classNames = {'Left', 'Right', 'Grasp', 'Twist', 'Idle'};
4
+
5
+ gt = {'Right', 'Grasp', 'Left', 'Twist', 'Left'};
6
+ output = forward(net, dlarray(data, 'SSCB'));
7
+ [~, predictedIndex] = max(output, [], 'all');
8
+ if strcmp(classNames{predictedIndex}, gt{step})
9
+ step = step + 1;
10
+ disp([' 디코딩 결과: ' classNames{predictedIndex} '. 맞습니다. 다음 단계로 갑니다.']);
11
+ else
12
+ step = 1;
13
+ disp([' 디코딩 결과: ' classNames{predictedIndex} '. 틀렸습니다. 단계 1로 갑니다.']);
14
+ end
15
+ pause(5);
16
+ end
EEGInitialization.m ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ function [inlet, fs, chan] = EEGInitialization()
2
+ % Load the LSL library
3
+ lib = lsl_loadlib();
4
+
5
+ % Find a stream of type 'EEG'
6
+ result = {};
7
+ maxAttempts = 1; % Set a maximum number of attempts to find the stream
8
+ attempts = 0;
9
+
10
+ while isempty(result) && attempts < maxAttempts
11
+ result = lsl_resolve_byprop(lib, 'type', 'EEG');
12
+ pause(1); % Optional: wait for a second before trying again
13
+ attempts = attempts + 1;
14
+ end
15
+
16
+ if isempty(result)
17
+ error('Failed to find an EEG stream');
18
+ end
19
+
20
+ % Connect to the first found stream
21
+ inlet = lsl_inlet(result{1});
22
+ fs = result{1}.nominal_srate();
23
+ chan = result{1}.channel_count();
24
+ end
EEGNetModel.m ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ function layers = EEGNetModel(in_chans, n_classes, varargin)
2
+ % EEGNetv4 creation function for MATLAB
3
+
4
+ % Default parameters
5
+ p = inputParser;
6
+ addRequired(p, 'in_chans');
7
+ addRequired(p, 'n_classes');
8
+ addRequired(p, 'input_window_samples');
9
+ addParameter(p, 'pool_mode', 'mean');
10
+ addParameter(p, 'F1', 8);
11
+ addParameter(p, 'D', 2);
12
+ addParameter(p, 'F2', 16);
13
+ addParameter(p, 'kernel_length', 64);
14
+ addParameter(p, 'third_kernel_size', [8, 4]);
15
+ addParameter(p, 'drop_prob', 0.25);
16
+ parse(p, in_chans, n_classes, varargin{:});
17
+
18
+ % Extract parameters from parsed input
19
+ params = p.Results;
20
+
21
+ % EEGNetv4 Layers
22
+
23
+ % First set of layers
24
+ layers = [
25
+ imageInputLayer([params.in_chans, params.input_window_samples, 1], 'Normalization', 'none')
26
+ convolution2dLayer([1, params.kernel_length], params.F1, 'Stride', [1, 1], 'Padding',[0, floor(params.kernel_length / 2)])
27
+ batchNormalizationLayer()
28
+ convolution2dLayer([params.in_chans, 1], params.F1*params.D, 'Stride', [1, 1], 'Padding', [0, 0])
29
+ batchNormalizationLayer()
30
+ reluLayer()
31
+ averagePooling2dLayer([1, 4], 'Stride', [1, 4])
32
+ dropoutLayer(params.drop_prob)
33
+ ];
34
+
35
+ % Second set of layers (Depthwise Separable Convolution)
36
+ layers = [
37
+ layers
38
+ convolution2dLayer([1, 16], params.F1*params.D, 'Stride', [1, 1], 'Padding', [0, 8])
39
+ convolution2dLayer([1, 1], params.F2, 'Stride', [1, 1], 'Padding', [0, 0])
40
+ batchNormalizationLayer()
41
+ reluLayer()
42
+ averagePooling2dLayer([1, 8], 'Stride', [1, 8])
43
+ dropoutLayer(params.drop_prob)
44
+ ];
45
+
46
+ % Third set of layers
47
+ layers = [
48
+ layers
49
+ convolution2dLayer([1, 23], params.n_classes)
50
+ softmaxLayer()
51
+ classificationLayer()
52
+ ];
53
+
54
+ % Convert layers to layerGraph
55
+ % lgraph = layerGraph(layers);
56
+
57
+ % Convert layerGraph to dlnetwork
58
+ % net = dlnetwork(lgraph);
59
+ end
60
+
EEGNetParams.mat ADDED
Binary file (42.3 kB). View file
 
EEGNetTraining.m ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ function trainedNet = EEGNetTraining(data, labels, layers)
2
+
3
+ options = trainingOptions('sgdm', ...
4
+ 'InitialLearnRate',0.001, ...
5
+ 'MaxEpochs',100, ...
6
+ 'MiniBatchSize',1, ...
7
+ 'Shuffle','every-epoch', ...
8
+ 'Verbose',false, ...
9
+ 'Plots','none');
10
+
11
+ % data = dlarray(data, 'SSCB');
12
+ data = permute(data, [2 3 4 1]);
13
+ labels = categorical(labels)';
14
+ trainedNet = trainNetwork(data, labels, layers, options);
15
+
16
+ end
demo_virtual.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:646b8844fbd1a002a02571abab9312d06d5392194dd45b0f64abdeb2a68a3b84
3
+ size 151099245