Marlon Wiprud commited on
Commit ·
9df03e0
1
Parent(s): 18097d1
update path
Browse files- handler.py +7 -1
handler.py
CHANGED
|
@@ -18,6 +18,9 @@ logging.basicConfig(level=logging.INFO)
|
|
| 18 |
hf_logging.set_verbosity_debug()
|
| 19 |
|
| 20 |
|
|
|
|
|
|
|
|
|
|
| 21 |
def list_files(directory, depth, max_depth=5):
|
| 22 |
# Lists all files and directories in the given directory
|
| 23 |
for filename in os.listdir(directory):
|
|
@@ -74,11 +77,14 @@ class EndpointHandler:
|
|
| 74 |
},
|
| 75 |
no_split_module_classes=["CogVLMDecoderLayer"],
|
| 76 |
)
|
|
|
|
|
|
|
| 77 |
self.model = load_checkpoint_and_dispatch(
|
| 78 |
self.model,
|
|
|
|
| 79 |
# "~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730",
|
| 80 |
# "/home/ec2-user/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730", # typical, '~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/balabala'
|
| 81 |
-
"~/.cache/huggingface/modules/transformers_modules/THUDM/cogvlm-chat-hf/8abca878c4257412c4c38eeafaed3fe27a036730", # typical, '~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/balabala'
|
| 82 |
device_map=device_map,
|
| 83 |
no_split_module_classes=["CogVLMDecoderLayer"],
|
| 84 |
)
|
|
|
|
| 18 |
hf_logging.set_verbosity_debug()
|
| 19 |
|
| 20 |
|
| 21 |
+
# 2023/11/28 06:41:40 ~ loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730/generation_config.json
|
| 22 |
+
|
| 23 |
+
|
| 24 |
def list_files(directory, depth, max_depth=5):
|
| 25 |
# Lists all files and directories in the given directory
|
| 26 |
for filename in os.listdir(directory):
|
|
|
|
| 77 |
},
|
| 78 |
no_split_module_classes=["CogVLMDecoderLayer"],
|
| 79 |
)
|
| 80 |
+
|
| 81 |
+
# .cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730
|
| 82 |
self.model = load_checkpoint_and_dispatch(
|
| 83 |
self.model,
|
| 84 |
+
"/root/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730",
|
| 85 |
# "~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730",
|
| 86 |
# "/home/ec2-user/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/8abca878c4257412c4c38eeafaed3fe27a036730", # typical, '~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/balabala'
|
| 87 |
+
# "~/.cache/huggingface/modules/transformers_modules/THUDM/cogvlm-chat-hf/8abca878c4257412c4c38eeafaed3fe27a036730", # typical, '~/.cache/huggingface/hub/models--THUDM--cogvlm-chat-hf/snapshots/balabala'
|
| 88 |
device_map=device_map,
|
| 89 |
no_split_module_classes=["CogVLMDecoderLayer"],
|
| 90 |
)
|