ariG23498 HF Staff commited on
Commit
982a31b
·
verified ·
1 Parent(s): 3bdd336

Upload deepseek-ai_DeepSeek-R1_0.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. deepseek-ai_DeepSeek-R1_0.py +1 -26
deepseek-ai_DeepSeek-R1_0.py CHANGED
@@ -1,14 +1,8 @@
1
  # /// script
2
  # requires-python = ">=3.12"
3
  # dependencies = [
4
- # "numpy",
5
- # "einops",
6
- # "torch",
7
  # "transformers",
8
- # "diffusers",
9
- # "datasets",
10
- # "accelerate",
11
- # "timm",
12
  # ]
13
  # ///
14
 
@@ -21,25 +15,6 @@ try:
21
  {"role": "user", "content": "Who are you?"},
22
  ]
23
  pipe(messages)
24
-
25
- # Load model directly
26
- from transformers import AutoTokenizer, AutoModelForCausalLM
27
-
28
- tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1", trust_remote_code=True)
29
- model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-R1", trust_remote_code=True)
30
- messages = [
31
- {"role": "user", "content": "Who are you?"},
32
- ]
33
- inputs = tokenizer.apply_chat_template(
34
- messages,
35
- add_generation_prompt=True,
36
- tokenize=True,
37
- return_dict=True,
38
- return_tensors="pt",
39
- ).to(model.device)
40
-
41
- outputs = model.generate(**inputs, max_new_tokens=40)
42
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
43
  with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:
44
  f.write('Everything was good in deepseek-ai_DeepSeek-R1_0.txt')
45
  except Exception as e:
 
1
  # /// script
2
  # requires-python = ">=3.12"
3
  # dependencies = [
 
 
 
4
  # "transformers",
5
+ # "torch",
 
 
 
6
  # ]
7
  # ///
8
 
 
15
  {"role": "user", "content": "Who are you?"},
16
  ]
17
  pipe(messages)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:
19
  f.write('Everything was good in deepseek-ai_DeepSeek-R1_0.txt')
20
  except Exception as e: