ariG23498 HF Staff commited on
Commit
ef5e2e5
·
verified ·
1 Parent(s): c4aba36

Upload MiniMaxAI_MiniMax-M2.1_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. MiniMaxAI_MiniMax-M2.1_1.py +34 -4
MiniMaxAI_MiniMax-M2.1_1.py CHANGED
@@ -21,8 +21,23 @@
21
 
22
  try:
23
  # Load model directly
24
- from transformers import AutoModelForCausalLM
25
- model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True, dtype="auto")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  with open('MiniMaxAI_MiniMax-M2.1_1.txt', 'w', encoding='utf-8') as f:
27
  f.write('Everything was good in MiniMaxAI_MiniMax-M2.1_1.txt')
28
  except Exception as e:
@@ -38,8 +53,23 @@ except Exception as e:
38
  import traceback
39
  f.write('''```CODE:
40
  # Load model directly
41
- from transformers import AutoModelForCausalLM
42
- model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True, dtype="auto")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  ```
44
 
45
  ERROR:
 
21
 
22
  try:
23
  # Load model directly
24
+ from transformers import AutoTokenizer, AutoModelForCausalLM
25
+
26
+ tokenizer = AutoTokenizer.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True)
27
+ model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True)
28
+ messages = [
29
+ {"role": "user", "content": "Who are you?"},
30
+ ]
31
+ inputs = tokenizer.apply_chat_template(
32
+ messages,
33
+ add_generation_prompt=True,
34
+ tokenize=True,
35
+ return_dict=True,
36
+ return_tensors="pt",
37
+ ).to(model.device)
38
+
39
+ outputs = model.generate(**inputs, max_new_tokens=40)
40
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
41
  with open('MiniMaxAI_MiniMax-M2.1_1.txt', 'w', encoding='utf-8') as f:
42
  f.write('Everything was good in MiniMaxAI_MiniMax-M2.1_1.txt')
43
  except Exception as e:
 
53
  import traceback
54
  f.write('''```CODE:
55
  # Load model directly
56
+ from transformers import AutoTokenizer, AutoModelForCausalLM
57
+
58
+ tokenizer = AutoTokenizer.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True)
59
+ model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2.1", trust_remote_code=True)
60
+ messages = [
61
+ {"role": "user", "content": "Who are you?"},
62
+ ]
63
+ inputs = tokenizer.apply_chat_template(
64
+ messages,
65
+ add_generation_prompt=True,
66
+ tokenize=True,
67
+ return_dict=True,
68
+ return_tensors="pt",
69
+ ).to(model.device)
70
+
71
+ outputs = model.generate(**inputs, max_new_tokens=40)
72
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
73
  ```
74
 
75
  ERROR: