# -*- coding: utf-8 -*- """huggingface.ipynb Automatically generated by Colab. Original file is located at https://colab.research.google.com/drive/149y_UsrqsIDR7_OwRgNnRfs-KkGhFvtx """ from transformers import pipeline generator = pipeline("text-generation", model= "Qwen/Qwen3-4B-Instruct-2507", torch_dtype="auto", device_map="auto") output = generator("Hi how are you?") print(output[0]["generated_text"]) messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Tell me a short joke"}] outputs = generator( messages, max_new_tokens=100, do_sample=True, temperature=0.7, return_full_text=False ) print(outputs[0]["generated_text"]) messages = [ { "role": "system", "content": "Sen Türkçe cevap veren, yardımcı bir yapay zeka asistansın." }, { "role": "user", "content": "Üretken yapay zeka nedir kısaca cevap ver" } ] outputs = generator( messages, max_new_tokens=100, do_sample=True, temperature=0.7, return_full_text=False ) print(outputs[0]["generated_text"]) import gradio as gr def generate_text(prompt): messages = [ {"role": "system", "content": "Sen Türkçe cevap veren, yardımcı bir yapay zeka asistansın."}, {"role": "user", "content": prompt}] outputs = generator( messages, max_new_tokens=200, do_sample=True, temperature=0.7, return_full_text=False ) return outputs[0]["generated_text"] demo = gr.Interface( fn=generate_text, inputs=gr.Textbox(label="Give an input"), outputs=gr.Textbox(label="Output"), title="Text Generation",) demo.launch()