|
|
import sys |
|
|
import os |
|
|
from safetensors.torch import save_file |
|
|
import json |
|
|
|
|
|
|
|
|
model_dir = "/Users/gokdenizgulmez/Desktop/mlx-lm/MiniMaxM1-Dev" |
|
|
sys.path.append(model_dir) |
|
|
|
|
|
|
|
|
from modeling_minimax_m1 import MiniMaxM1ForCausalLM |
|
|
from configuration_minimax_m1 import MiniMaxM1Config |
|
|
|
|
|
|
|
|
config_path = os.path.join(model_dir, "config.json") |
|
|
with open(config_path, 'r') as f: |
|
|
config_dict = json.load(f) |
|
|
|
|
|
|
|
|
config = MiniMaxM1Config(**config_dict) |
|
|
|
|
|
|
|
|
small_model = MiniMaxM1ForCausalLM(config) |
|
|
|
|
|
|
|
|
param_count = sum(p.numel() for p in small_model.parameters()) |
|
|
print(f"Model has {param_count:,} parameters") |
|
|
|
|
|
|
|
|
model_state_dict = small_model.state_dict() |
|
|
|
|
|
|
|
|
save_file(model_state_dict, os.path.join(model_dir, "model.safetensors")) |
|
|
|
|
|
print("Model saved in safetensors format") |