Fetching metadata from the HF Docker repository...
Upload 38 files
d8dd7fb
verified
efficient_sam_vitt_torchscript.pt
Detected Pickle imports (55)
- "__torch__.efficient_sam.efficient_sam_encoder.PatchEmbed",
- "__torch__.efficient_sam.efficient_sam_decoder.MaskDecoder",
- "__torch__.efficient_sam.mlp.___torch_mangle_23.MLPBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_19.ModuleList",
- "__torch__.torch.nn.modules.container.___torch_mangle_11.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_16.Sequential",
- "__torch__.efficient_sam.efficient_sam.EfficientSam",
- "__torch__.efficient_sam.efficient_sam_encoder.ImageEncoderViT",
- "__torch__.efficient_sam.two_way_transformer.AttentionForTwoWayAttentionBlock",
- "__torch__.torch.nn.modules.conv.___torch_mangle_3.Conv2d",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_6.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_10.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_2.Linear",
- "torch.FloatStorage",
- "__torch__.efficient_sam.efficient_sam_decoder.PromptEncoder",
- "__torch__.torch.nn.modules.container.___torch_mangle_14.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_13.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_24.ModuleList",
- "__torch__.efficient_sam.mlp.MLPBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_20.Sequential",
- "__torch__.efficient_sam.mlp.___torch_mangle_26.MLPBlock",
- "__torch__.efficient_sam.efficient_sam_encoder.Attention",
- "__torch__.torch.nn.modules.conv.ConvTranspose2d",
- "__torch__.efficient_sam.efficient_sam_encoder.Mlp",
- "__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_15.Embedding",
- "__torch__.torch.nn.modules.conv.Conv2d",
- "__torch__.torch.nn.modules.conv.___torch_mangle_4.Conv2d",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.efficient_sam.two_way_transformer.___torch_mangle_9.AttentionForTwoWayAttentionBlock",
- "__torch__.efficient_sam.efficient_sam_encoder.Block",
- "__torch__.torch.nn.modules.linear.Linear",
- "__torch__.torch.nn.modules.linear.Identity",
- "__torch__.torch.nn.modules.container.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_25.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_5.Linear",
- "__torch__.torch.nn.modules.conv.___torch_mangle_17.ConvTranspose2d",
- "__torch__.torch.nn.modules.container.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_0.Linear",
- "__torch__.efficient_sam.two_way_transformer.TwoWayTransformer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_22.Linear",
- "__torch__.efficient_sam.efficient_sam_decoder.PositionEmbeddingRandom",
- "__torch__.efficient_sam.efficient_sam_encoder.LayerNorm2d",
- "__torch__.torch.nn.modules.activation.GELU",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.torch.nn.modules.container.___torch_mangle_21.ModuleList",
- "__torch__.torch.nn.modules.container.___torch_mangle_12.ModuleList",
- "__torch__.torch.nn.modules.linear.___torch_mangle_7.Linear",
- "torch.jit._pickle.build_intlist",
- "__torch__.torch.nn.modules.normalization.LayerNorm",
- "collections.OrderedDict",
- "__torch__.efficient_sam.two_way_transformer.TwoWayAttentionBlock",
- "__torch__.torch.nn.modules.normalization.GroupNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_18.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_1.Linear"
How to fix it?
41.1 MB
Upload 38 files