mourningdove commited on
Commit
78e79c1
·
verified ·
1 Parent(s): ab377f7

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. notebook.ipynb +3 -6
notebook.ipynb CHANGED
@@ -22,12 +22,9 @@
22
  " {\"role\": \"user\", \"content\": \"Audit this Circom circuit for vulnerabilities: template Test() { signal input a; signal output b; b <-- a * 2; }\"}\n",
23
  "]\n",
24
  "\n",
25
- "chat_template = \"{% for message in messages %}{{'<|im_start|>' + message['role'] + '\\\\n' + message['content'] + '<|im_end|>\\\\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\\\n' }}{% endif %}\"\n",
26
- "\n",
27
- "prompt = tokenizer.apply_chat_template(messages, chat_template=chat_template, tokenize=False, add_generation_prompt=True)\n",
28
- "\n",
29
- "inputs = tokenizer(prompt, return_tensors=\"pt\")\n",
30
- "outputs = model.generate(**inputs, max_new_tokens=300)\n",
31
  "print(tokenizer.decode(outputs[0], skip_special_tokens=True))\n"
32
  ]
33
  }
 
22
  " {\"role\": \"user\", \"content\": \"Audit this Circom circuit for vulnerabilities: template Test() { signal input a; signal output b; b <-- a * 2; }\"}\n",
23
  "]\n",
24
  "\n",
25
+ "prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)\n",
26
+ "inputs = tokenizer(prompt, return_tensors=\"pt\").to(model.device)\n",
27
+ "outputs = model.generate(**inputs, max_new_tokens=200)\n",
 
 
 
28
  "print(tokenizer.decode(outputs[0], skip_special_tokens=True))\n"
29
  ]
30
  }