| | import sys |
| | import os |
| | from safetensors.torch import save_file |
| | import json |
| |
|
| | |
| | model_dir = "/Users/Goekdeniz.Guelmez@computacenter.com/Library/CloudStorage/OneDrive-COMPUTACENTER/Desktop/mlx-lm/dev" |
| | sys.path.append(model_dir) |
| |
|
| | |
| | from modeling_longcat_flash import LongcatFlashForCausalLM |
| | from configuration_longcat_flash import LongcatFlashConfig |
| |
|
| | |
| | config_path = os.path.join(model_dir, "config.json") |
| | with open(config_path, 'r') as f: |
| | config_dict = json.load(f) |
| |
|
| | |
| | config = LongcatFlashConfig(**config_dict) |
| |
|
| | |
| | small_model = LongcatFlashForCausalLM(config) |
| |
|
| | |
| | param_count = sum(p.numel() for p in small_model.parameters()) |
| | print(f"Model has {param_count:,} parameters") |
| |
|
| | |
| | model_state_dict = small_model.state_dict() |
| |
|
| | |
| | save_file(model_state_dict, os.path.join(model_dir, "model.safetensors")) |
| |
|
| | print("Model saved in safetensors format") |