Hugging Face
Models
Datasets
Spaces
Posts
Docs
Enterprise
Pricing
Log In
Sign Up
yfyeung
/
icefall-asr-gigaspeech2-th-zipformer-2024-06-20
like
4
ONNX
License:
apache-2.0
Model card
Files
Files and versions
Community
27202e3
icefall-asr-gigaspeech2-th-zipformer-2024-06-20
/
exp
2 contributors
History:
3 commits
This model has 2 files scanned as suspicious.
Show
files
yfyeung
update
f8ab89b
5 months ago
.gitattributes
Safe
451 Bytes
update
5 months ago
decoder-epoch-12-avg-5.int8.onnx
Safe
1.31 MB
LFS
update
5 months ago
decoder-epoch-12-avg-5.onnx
Safe
5.17 MB
LFS
update
5 months ago
encoder-epoch-12-avg-5.int8.onnx
Suspicious
155 MB
LFS
update
5 months ago
encoder-epoch-12-avg-5.onnx
Suspicious
592 MB
LFS
update
5 months ago
epoch-12-avg-5.pt
Safe
pickle
Detected Pickle imports (3)
"collections.OrderedDict"
,
"torch._utils._rebuild_tensor_v2"
,
"torch.FloatStorage"
What is a pickle import?
608 MB
LFS
update
5 months ago
epoch-12.pt
pickle
Detected Pickle imports (4)
"pathlib.PosixPath"
,
"collections.OrderedDict"
,
"torch._utils._rebuild_tensor_v2"
,
"torch.FloatStorage"
How to fix it?
2.43 GB
LFS
update
5 months ago
jit_script.pt
pickle
Detected Pickle imports (124)
"__torch__.zipformer.FeedforwardModule"
,
"__torch__.zipformer.SelfAttention"
,
"__torch__.zipformer.___torch_mangle_54.Zipformer2EncoderLayer"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_58.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_30.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_24.Linear"
,
"__torch__.torch.nn.modules.activation.Tanh"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_70.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_49.Linear"
,
"__torch__.zipformer.___torch_mangle_42.SelfAttention"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_47.Linear"
,
"__torch__.torch.nn.modules.linear.Identity"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_3.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_11.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_61.Linear"
,
"__torch__.zipformer.___torch_mangle_15.FeedforwardModule"
,
"collections.OrderedDict"
,
"__torch__.zipformer.___torch_mangle_44.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_71.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_81.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_19.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_64.Linear"
,
"__torch__.zipformer.ConvolutionModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_51.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_66.Linear"
,
"__torch__.zipformer.___torch_mangle_27.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_9.Linear"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_4.Conv2d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_7.Linear"
,
"__torch__.scaling.___torch_mangle_18.ActivationDropoutAndLinear"
,
"__torch__.zipformer.___torch_mangle_75.Zipformer2EncoderLayer"
,
"__torch__.zipformer.___torch_mangle_77.Zipformer2Encoder"
,
"__torch__.torch.nn.modules.sparse.Embedding"
,
"__torch__.torch.nn.modules.container.___torch_mangle_76.ModuleList"
,
"__torch__.scaling.Dropout2"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_16.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_12.Linear"
,
"__torch__.zipformer.___torch_mangle_57.DownsampledZipformer2Encoder"
,
"__torch__.torch.nn.modules.container.___torch_mangle_79.ModuleList"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_2.Conv2d"
,
"__torch__.zipformer.___torch_mangle_34.ConvolutionModule"
,
"__torch__.torch.nn.modules.container.ModuleList"
,
"__torch__.zipformer.___torch_mangle_74.ConvolutionModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_62.Linear"
,
"__torch__.zipformer.CompactRelPositionalEncoding"
,
"__torch__.joiner.Joiner"
,
"__torch__.zipformer.___torch_mangle_13.FeedforwardModule"
,
"__torch__.scaling.ScheduledFloat"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_22.Linear"
,
"__torch__.torch.nn.modules.container.___torch_mangle_55.ModuleList"
,
"__torch__.zipformer.RelPositionMultiheadAttentionWeights"
,
"__torch__.zipformer.___torch_mangle_25.FeedforwardModule"
,
"__torch__.scaling.___torch_mangle_5.ScheduledFloat"
,
"__torch__.zipformer.___torch_mangle_48.FeedforwardModule"
,
"__torch__.scaling.SwooshR"
,
"__torch__.zipformer.___torch_mangle_29.FeedforwardModule"
,
"__torch__.torch.nn.modules.activation.Sigmoid"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_14.Linear"
,
"__torch__.zipformer.___torch_mangle_60.RelPositionMultiheadAttentionWeights"
,
"__torch__.EncoderModel"
,
"__torch__.zipformer.SimpleDownsample"
,
"__torch__.model.AsrModel"
,
"torch._utils._rebuild_tensor_v2"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_82.Linear"
,
"__torch__.zipformer.___torch_mangle_46.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.Linear"
,
"__torch__.zipformer.___torch_mangle_65.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_41.Linear"
,
"__torch__.scaling.Identity"
,
"__torch__.zipformer.___torch_mangle_37.Zipformer2Encoder"
,
"__torch__.zipformer.___torch_mangle_72.NonlinAttention"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_17.Linear"
,
"__torch__.torch.nn.modules.conv.Conv1d"
,
"__torch__.zipformer.___torch_mangle_20.RelPositionMultiheadAttentionWeights"
,
"__torch__.zipformer.___torch_mangle_50.NonlinAttention"
,
"__torch__.zipformer.___torch_mangle_53.ConvolutionModule"
,
"__torch__.torch.nn.modules.conv.Conv2d"
,
"__torch__.zipformer.Zipformer2Encoder"
,
"__torch__.zipformer.Zipformer2EncoderLayer"
,
"__torch__.subsampling.Conv2dSubsampling"
,
"__torch__.torch.nn.modules.container.Sequential"
,
"__torch__.scaling.SwooshL"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_45.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_38.Linear"
,
"__torch__.zipformer.___torch_mangle_23.SelfAttention"
,
"__torch__.zipformer.DownsampledZipformer2Encoder"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_68.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_28.Linear"
,
"__torch__.scaling.ActivationDropoutAndLinear"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_33.Conv1d"
,
"torch.FloatStorage"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_73.Conv1d"
,
"__torch__.zipformer.___torch_mangle_67.FeedforwardModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_40.Linear"
,
"__torch__.torch.nn.modules.container.___torch_mangle_36.ModuleList"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_84.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_10.Linear"
,
"__torch__.zipformer.___torch_mangle_78.DownsampledZipformer2Encoder"
,
"__torch__.scaling.BiasNorm"
,
"__torch__.zipformer.NonlinAttention"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_59.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_83.Linear"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_43.Linear"
,
"torch.jit._pickle.build_intlist"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_1.Conv2d"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_80.Conv1d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_21.Linear"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_0.Conv2d"
,
"__torch__.zipformer.___torch_mangle_69.FeedforwardModule"
,
"__torch__.zipformer.BypassModule"
,
"__torch__.subsampling.ConvNeXt"
,
"__torch__.zipformer.Zipformer2"
,
"__torch__.zipformer.___torch_mangle_63.SelfAttention"
,
"__torch__.decoder.Decoder"
,
"__torch__.torch.nn.modules.conv.___torch_mangle_52.Conv1d"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_32.Linear"
,
"__torch__.zipformer.___torch_mangle_35.Zipformer2EncoderLayer"
,
"__torch__.zipformer.___torch_mangle_56.Zipformer2Encoder"
,
"__torch__.zipformer.___torch_mangle_31.NonlinAttention"
,
"__torch__.zipformer.___torch_mangle_6.BypassModule"
,
"__torch__.torch.nn.modules.linear.___torch_mangle_26.Linear"
,
"__torch__.zipformer.SimpleUpsample"
,
"__torch__.zipformer.___torch_mangle_39.RelPositionMultiheadAttentionWeights"
How to fix it?
610 MB
LFS
update
5 months ago
joiner-epoch-12-avg-5.int8.onnx
Safe
1.03 MB
LFS
update
5 months ago
joiner-epoch-12-avg-5.onnx
Safe
4.1 MB
LFS
update
5 months ago