File size: 552 Bytes
4d92cf9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import streamlit as st

st.title("💬 DeepSeek-R1 Playground")

model_id = "deepseek-ai/DeepSeek-R1"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)

generator = pipeline("text-generation", model=model, tokenizer=tokenizer)

prompt = st.text_area("Enter your prompt here:")
if prompt:
    output = generator(prompt, max_length=100, temperature=0.7)
    st.write("### Output:")
    st.success(output[0]['generated_text'])