Fetching metadata from the HF Docker repository...
download history blame contribute delete
Detected Pickle imports (300)
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "collections.defaultdict",
- "builtins.int",
- "builtins.set",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "__builtin__.long",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters",
- "nltk.tokenize.punkt.PunktSentenceTokenizer",
- "__builtin__.int",
- "copy_reg._reconstructor",
- "nltk.tokenize.punkt.PunktLanguageVars",
- "__builtin__.set",
- "__builtin__.object",
- "collections.defaultdict",
- "nltk.tokenize.punkt.PunktToken",
- "nltk.tokenize.punkt.PunktParameters"
27.9 MB
- Xet hash:
- 37d8bcbc4ee5ec9267f1ab3dd9f1a3f94072f13fe43e0662fb36c4d7e57cc78e
- Size of remote file:
- 27.9 MB
- SHA256:
- b5e4dea19fcac4ab100eb0af2f650e6b4e0f774905f311cd8d62885ca6897e79
·
Xet efficiently stores Large Files inside Git, intelligently splitting files into unique chunks and accelerating uploads and downloads. More info.