zipin commited on
Commit
612bacc
·
verified ·
1 Parent(s): 88e355c
Files changed (1) hide show
  1. index.html +41 -8
index.html CHANGED
@@ -5,15 +5,48 @@
5
  <meta name="viewport" content="width=device-width" />
6
  <title>My static Space</title>
7
  <link rel="stylesheet" href="style.css" />
8
- </head>
 
 
9
  <body>
10
- <div class="card">
11
- <h1>Welcome to your static Space!</h1>
12
- <p>You can modify this app directly by editing <i>index.html</i> in the Files and versions tab.</p>
13
- <p>
14
- Also don't forget to check the
15
- <a href="https://huggingface.co/docs/hub/spaces" target="_blank">Spaces documentation</a>.
16
- </p>
 
 
17
  </div>
 
 
 
18
  </body>
19
  </html>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  <meta name="viewport" content="width=device-width" />
6
  <title>My static Space</title>
7
  <link rel="stylesheet" href="style.css" />
8
+ <script src="https://cdn.tailwindcss.com"></script>
9
+ <script src="https://cdn.jsdelivr.net/npm/feather-icons/dist/feather.min.js"></script>
10
+ </head>
11
  <body>
12
+ <div class="max-w-md mx-auto bg-white rounded-xl shadow-md overflow-hidden md:max-w-2xl m-4">
13
+ <div class="p-8">
14
+ <h1 class="text-2xl font-bold text-gray-800 mb-4">Welcome to your static Space!</h1>
15
+ <p class="text-gray-600 mb-4">You can modify this app directly by editing <i class="font-mono">index.html</i> in the Files and versions tab.</p>
16
+ <div class="flex items-center">
17
+ <i data-feather="book" class="mr-2 text-blue-500"></i>
18
+ <a href="https://huggingface.co/docs/hub/spaces" target="_blank" class="text-blue-500 hover:underline">Spaces documentation</a>
19
+ </div>
20
+ </div>
21
  </div>
22
+ <script>
23
+ feather.replace();
24
+ </script>
25
  </body>
26
  </html>
27
+ # Use a pipeline as a high-level helper
28
+ from transformers import pipeline
29
+
30
+ pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-78b")
31
+ messages = [
32
+ {"role": "user", "content": "Who are you?"},
33
+ ]
34
+ pipe(messages)
35
+ # Load model directly
36
+ from transformers import AutoTokenizer, AutoModelForCausalLM
37
+
38
+ tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-78b")
39
+ model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-78b")
40
+ messages = [
41
+ {"role": "user", "content": "Who are you?"},
42
+ ]
43
+ inputs = tokenizer.apply_chat_template(
44
+ messages,
45
+ add_generation_prompt=True,
46
+ tokenize=True,
47
+ return_dict=True,
48
+ return_tensors="pt",
49
+ ).to(model.device)
50
+
51
+ outputs = model.generate(**inputs, max_new_tokens=40)
52
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))