import sys import os from safetensors.torch import save_file import json import torch # Add the directory containing your modeling.py and configuration.py to the Python path model_dir = "/Users/Goekdeniz.Guelmez@computacenter.com/Library/CloudStorage/OneDrive-COMPUTACENTER/Desktop/MiniMax01Text-Dev" sys.path.append(model_dir) # Import your custom model and configuration classes from modeling_minimax import MiniMaxForCausalLM from configuration_minimax import MiniMaxConfig # Load the configuration config_path = os.path.join(model_dir, "config.json") with open(config_path, 'r') as f: config_dict = json.load(f) # Create the configuration object config = MiniMaxConfig(**config_dict) # Print attention layout info if getattr(config, "linear_attention", False): print("Using linear attention layout from config.") else: print("Using full attention layout from config.") # Set random seed for reproducibility torch.manual_seed(42) # Create the model small_model = MiniMaxForCausalLM(config) # Set model to evaluation mode small_model.eval() # Print parameter count to verify param_count = sum(p.numel() for p in small_model.parameters()) print(f"Model has {param_count:,} parameters") # Convert model to state dict model_state_dict = small_model.state_dict() # Save the config used for reproducibility used_config_path = os.path.join(model_dir, "config.used.json") with open(used_config_path, 'w') as f: json.dump(config_dict, f, indent=2) # Save as safetensors save_file(model_state_dict, os.path.join(model_dir, "model.safetensors")) print("Model saved in safetensors format") print(small_model)