{ "cells": [ { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Token was found\n", "<|im_start|>system\n", "You are an AI assistant with access to various tools.<|im_end|>\n", "<|im_start|>user\n", "Hi !<|im_end|>\n", "<|im_start|>assistant\n", "Hi human, what can help you with ?<|im_end|>\n", "<|im_start|>assistant\n", "\n" ] } ], "source": [ "import os\n", "\n", "from transformers import AutoTokenizer\n", "\n", "messages = [\n", " {\"role\": \"system\", \"content\": \"You are an AI assistant with access to various tools.\"},\n", " {\"role\": \"user\", \"content\": \"Hi !\"},\n", " {\"role\": \"assistant\", \"content\": \"Hi human, what can help you with ?\"},\n", "]\n", "\n", "token = os.getenv('HF_TOKEN')\n", "if token is None:\n", " raise ValueError('You must set the HF_TOKEN environment variable')\n", "else:\n", " print('Token was found')\n", " #print('Token:', token)\n", "\n", "\n", "tokenizer = AutoTokenizer.from_pretrained(\"HuggingFaceTB/SmolLM2-1.7B-Instruct\", use_auth_token=token)\n", "rendered_prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)\n", "\n", "print(rendered_prompt)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.13.2" } }, "nbformat": 4, "nbformat_minor": 2 }