Ctrl+K
-
1.52 kB
-
2.77 kB
- sescore2_en.ckpt2.28 GB
Detected Pickle imports (30)
- "transformers.models.roberta.modeling_roberta.RobertaEncoder",
- "transformers.models.roberta.modeling_roberta.RobertaAttention",
- "torch.nn.modules.sparse.Embedding",
- "transformers.models.roberta.modeling_roberta.RobertaSelfOutput",
- "torch.nn.functional.gelu",
- "transformers.models.roberta.modeling_roberta.RobertaModel",
- "train.feedforward.FeedForward",
- "transformers.models.roberta.modeling_roberta.RobertaIntermediate",
- "torch.LongStorage",
- "__builtin__.set",
- "torch.nn.modules.container.ModuleList",
- "torch.float32",
- "transformers.models.roberta.modeling_roberta.RobertaLayer",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaForMaskedLM",
- "transformers.models.roberta.modeling_roberta.RobertaOutput",
- "collections.OrderedDict",
- "transformers.activations.GELUActivation",
- "transformers.models.roberta.modeling_roberta.RobertaEmbeddings",
- "transformers.models.xlm_roberta.configuration_xlm_roberta.XLMRobertaConfig",
- "torch.nn.modules.activation.Tanh",
- "torch.FloatStorage",
- "torch.nn.modules.dropout.Dropout",
- "transformers.models.roberta.modeling_roberta.RobertaSelfAttention",
- "torch._utils._rebuild_parameter",
- "transformers.models.roberta.modeling_roberta.RobertaLMHead",
- "torch._utils._rebuild_tensor_v2",
- "torch.nn.modules.linear.Linear",
- "torch.nn.modules.container.Sequential",
- "torch.nn.modules.normalization.LayerNorm",
- "__main__.Regression_XLM_Roberta"
LFS - sescore2_en_original.ckpt2.28 GB
Detected Pickle imports (30)
- "transformers.models.roberta.modeling_roberta.RobertaEncoder",
- "transformers.models.roberta.modeling_roberta.RobertaAttention",
- "torch.nn.modules.sparse.Embedding",
- "transformers.models.roberta.modeling_roberta.RobertaSelfOutput",
- "torch.nn.functional.gelu",
- "transformers.models.roberta.modeling_roberta.RobertaModel",
- "train.feedforward.FeedForward",
- "transformers.models.roberta.modeling_roberta.RobertaIntermediate",
- "torch.LongStorage",
- "__builtin__.set",
- "torch.nn.modules.container.ModuleList",
- "torch.float32",
- "transformers.models.roberta.modeling_roberta.RobertaLayer",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaForMaskedLM",
- "transformers.models.roberta.modeling_roberta.RobertaOutput",
- "collections.OrderedDict",
- "transformers.activations.GELUActivation",
- "transformers.models.roberta.modeling_roberta.RobertaEmbeddings",
- "transformers.models.xlm_roberta.configuration_xlm_roberta.XLMRobertaConfig",
- "torch.nn.modules.activation.Tanh",
- "torch.FloatStorage",
- "torch.nn.modules.dropout.Dropout",
- "transformers.models.roberta.modeling_roberta.RobertaSelfAttention",
- "torch._utils._rebuild_parameter",
- "transformers.models.roberta.modeling_roberta.RobertaLMHead",
- "torch._utils._rebuild_tensor_v2",
- "torch.nn.modules.linear.Linear",
- "torch.nn.modules.container.Sequential",
- "torch.nn.modules.normalization.LayerNorm",
- "__main__.Regression_XLM_Roberta"
LFS - sescorex_seg.ckpt2.28 GB
Detected Pickle imports (28)
- "torch.nn.modules.sparse.Embedding",
- "train.feedforward.FeedForward",
- "torch.LongStorage",
- "__builtin__.set",
- "torch.nn.modules.container.ModuleList",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaModel",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaEmbeddings",
- "torch._C._nn.gelu",
- "collections.OrderedDict",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaEncoder",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaPooler",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaOutput",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaIntermediate",
- "transformers.activations.GELUActivation",
- "transformers.models.xlm_roberta.configuration_xlm_roberta.XLMRobertaConfig",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaSelfAttention",
- "torch.nn.modules.activation.Tanh",
- "torch.FloatStorage",
- "torch.nn.modules.dropout.Dropout",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaSelfOutput",
- "torch._utils._rebuild_parameter",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaLayer",
- "torch._utils._rebuild_tensor_v2",
- "torch.nn.modules.linear.Linear",
- "torch.nn.modules.normalization.LayerNorm",
- "torch.nn.modules.container.Sequential",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaAttention",
- "__main__.Regression_XLM_Roberta"
LFS - sescorex_sys.ckpt2.28 GB
Detected Pickle imports (30)
- "transformers.models.roberta.modeling_roberta.RobertaEmbeddings",
- "torch.nn.modules.linear.Linear",
- "torch.nn.modules.normalization.LayerNorm",
- "torch.nn.modules.container.Sequential",
- "transformers.models.roberta.modeling_roberta.RobertaSelfOutput",
- "torch._utils._rebuild_parameter",
- "transformers.models.roberta.modeling_roberta.RobertaLayer",
- "transformers.models.roberta.modeling_roberta.RobertaOutput",
- "torch.FloatStorage",
- "transformers.models.roberta.modeling_roberta.RobertaAttention",
- "torch.LongStorage",
- "transformers.activations.GELUActivation",
- "train.feedforward.FeedForward",
- "transformers.models.roberta.modeling_roberta.RobertaModel",
- "torch.nn.modules.sparse.Embedding",
- "torch.nn.modules.activation.Tanh",
- "transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaForMaskedLM",
- "torch._C._nn.gelu",
- "transformers.models.roberta.modeling_roberta.RobertaLMHead",
- "__main__.Regression_XLM_Roberta",
- "transformers.models.xlm_roberta.configuration_xlm_roberta.XLMRobertaConfig",
- "torch.nn.modules.dropout.Dropout",
- "transformers.models.roberta.modeling_roberta.RobertaSelfAttention",
- "torch.nn.modules.container.ModuleList",
- "torch.float32",
- "transformers.models.roberta.modeling_roberta.RobertaEncoder",
- "transformers.models.roberta.modeling_roberta.RobertaIntermediate",
- "collections.OrderedDict",
- "__builtin__.set",
- "torch._utils._rebuild_tensor_v2"
LFS