fixed safetensors.py; moved scripts into scripts/
Browse files
_safetensors.py
CHANGED
|
@@ -17,8 +17,6 @@ class WritingSafeTensors:
|
|
| 17 |
self.weight_map[name] = self.file
|
| 18 |
self.file.add(name, tensor)
|
| 19 |
if self.file.size >= self.file_size:
|
| 20 |
-
import pdb; pdb.set_trace()
|
| 21 |
-
'''BUG WHERE ONE TENSOR IS DROPPED'''
|
| 22 |
self.file.undo(name, tensor)
|
| 23 |
ct = len(self.files)
|
| 24 |
if len(self.files) == 1:
|
|
@@ -31,7 +29,7 @@ class WritingSafeTensors:
|
|
| 31 |
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct)
|
| 32 |
self.files[self.file.filename] = self.file
|
| 33 |
self.file.add(name, tensor)
|
| 34 |
-
|
| 35 |
def finalize(self):
|
| 36 |
self.file.finalize()
|
| 37 |
if len(self.files) > 1:
|
|
@@ -43,18 +41,23 @@ class WritingSafeTensors:
|
|
| 43 |
ct += 1
|
| 44 |
file.rename(f'{self.name}-{ct:05}-of-{tot:06}.safetensors')
|
| 45 |
with open(index_name, 'w') as fh:
|
| 46 |
-
json.dump(
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
name: file.filename
|
| 53 |
for name, file in self.weight_map.items()
|
| 54 |
-
}
|
| 55 |
},
|
|
|
|
| 56 |
indent = '\t',
|
| 57 |
-
|
| 58 |
del self.file
|
| 59 |
del self.files
|
| 60 |
del self.metadata
|
|
|
|
| 17 |
self.weight_map[name] = self.file
|
| 18 |
self.file.add(name, tensor)
|
| 19 |
if self.file.size >= self.file_size:
|
|
|
|
|
|
|
| 20 |
self.file.undo(name, tensor)
|
| 21 |
ct = len(self.files)
|
| 22 |
if len(self.files) == 1:
|
|
|
|
| 29 |
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct)
|
| 30 |
self.files[self.file.filename] = self.file
|
| 31 |
self.file.add(name, tensor)
|
| 32 |
+
self.weight_map[name] = self.file
|
| 33 |
def finalize(self):
|
| 34 |
self.file.finalize()
|
| 35 |
if len(self.files) > 1:
|
|
|
|
| 41 |
ct += 1
|
| 42 |
file.rename(f'{self.name}-{ct:05}-of-{tot:06}.safetensors')
|
| 43 |
with open(index_name, 'w') as fh:
|
| 44 |
+
json.dump(
|
| 45 |
+
{
|
| 46 |
+
'metadata': {
|
| 47 |
+
**{
|
| 48 |
+
k: v if type(v) in [int, float, str, list, tuple, dict] else str(v)
|
| 49 |
+
for k, v in self.metadata.items()
|
| 50 |
+
},
|
| 51 |
+
'total_size': self.total_size,
|
| 52 |
+
},
|
| 53 |
+
'weight_map': {
|
| 54 |
name: file.filename
|
| 55 |
for name, file in self.weight_map.items()
|
| 56 |
+
},
|
| 57 |
},
|
| 58 |
+
fh,
|
| 59 |
indent = '\t',
|
| 60 |
+
)
|
| 61 |
del self.file
|
| 62 |
del self.files
|
| 63 |
del self.metadata
|
scripts/README.md
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Note that most of this functionality is available in the huggingface-cli
|
| 2 |
+
command and normative huggingface workflows.
|
compare_safetensors.py → scripts/compare_safetensors.py
RENAMED
|
File without changes
|
compress_weights.bash → scripts/compress_weights.bash
RENAMED
|
File without changes
|
download_weights.bash → scripts/download_weights.bash
RENAMED
|
File without changes
|
scripts/lfs_add.bash
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# manually add a passed list of files to the local LFS store in a parallel manner bypassing filter-process
|
| 2 |
+
|
| 3 |
+
export GIT_DIR=$(git rev-parse --git-dir)
|
| 4 |
+
[ -d "$GIT_DIR" ] || exit -1
|
| 5 |
+
|
| 6 |
+
parallel -j1 -i bash -vx -c '\
|
| 7 |
+
sha=$(pv -c "{}" 2>/dev/tty | sha256sum | cut -d \ -f 1);\
|
| 8 |
+
size=$(($(stat -c %s "{}")));\
|
| 9 |
+
if (( size > 1024 )); then\
|
| 10 |
+
dir="$GITDIR"/lfs/objects${sha:0:2}/${sha:2:2}/;\
|
| 11 |
+
mkdir -p "$dir";\
|
| 12 |
+
mv "{}" "$dir"/${sha} && {\
|
| 13 |
+
echo version https://git-lfs.github.com/spec/v1;\
|
| 14 |
+
echo oid sha256:"$sha";\
|
| 15 |
+
echo size "$size";\
|
| 16 |
+
} > "{}" &&\
|
| 17 |
+
git add "{}" || { mv "$dir"/${sha} "{}"; false; };\
|
| 18 |
+
fi'\
|
| 19 |
+
-- "$@"
|
scripts/verify_index_tensors.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
import json, safetensors, os, sys, tqdm
|
| 3 |
+
|
| 4 |
+
index_filename, = sys.argv[1:]
|
| 5 |
+
|
| 6 |
+
with open(index_filename) as fh:
|
| 7 |
+
index = json.load(fh)
|
| 8 |
+
|
| 9 |
+
weight_map = index['weight_map']
|
| 10 |
+
|
| 11 |
+
filenames = set(weight_map.values())
|
| 12 |
+
total_size = sum([os.stat(fn).st_size for fn in filenames])
|
| 13 |
+
|
| 14 |
+
PASS = True
|
| 15 |
+
|
| 16 |
+
if total_size != index['metadata']['total_size']:
|
| 17 |
+
PASS = False
|
| 18 |
+
print(f"Wrong total_size in index. stored={index['metadata']['total_size']} correct={total_size}")
|
| 19 |
+
|
| 20 |
+
for filename in tqdm.tqdm(filenames, desc=index_filename, unit='f'):
|
| 21 |
+
f = safetensors.safe_open(filename, framework='pt')
|
| 22 |
+
index_keys = set([k for k, fn in weight_map.items() if fn == filename])
|
| 23 |
+
file_keys = set(f.keys())
|
| 24 |
+
if file_keys != index_keys:
|
| 25 |
+
PASS = False
|
| 26 |
+
for in_file in (file_keys - index_keys):
|
| 27 |
+
print()
|
| 28 |
+
print('{in_file} present in {filename} but not {index_filename}')
|
| 29 |
+
print()
|
| 30 |
+
for in_index in (index_keys - file_keys):
|
| 31 |
+
print()
|
| 32 |
+
print('{in_file} present in {index_filename} but not {index_filename}')
|
| 33 |
+
print()
|
| 34 |
+
|
| 35 |
+
if PASS:
|
| 36 |
+
print('index content matches safetensors content.')
|
| 37 |
+
sys.exit(0)
|
| 38 |
+
else:
|
| 39 |
+
print('index content MISMSATCHES safetensors content.')
|
| 40 |
+
sys.exit(-1)
|