Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -31,6 +31,21 @@ generator = pipeline(
|
|
| 31 |
tokenizer=tokenizer,
|
| 32 |
return_full_text=False
|
| 33 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
# Function to generate stories
|
| 36 |
def generate_story(prompt, max_tokens=300, temperature=0.8):
|
|
|
|
| 31 |
tokenizer=tokenizer,
|
| 32 |
return_full_text=False
|
| 33 |
)
|
| 34 |
+
def chat_prompt(user_text: str) -> str:
|
| 35 |
+
messages = [
|
| 36 |
+
{"role": "system", "content": "You are a helpful storyteller that writes engaging prose."},
|
| 37 |
+
{"role": "user", "content": (user_text or "").strip()}
|
| 38 |
+
]
|
| 39 |
+
# Use chat template if available, otherwise fall back to a simple format
|
| 40 |
+
if hasattr(tokenizer, "apply_chat_template"):
|
| 41 |
+
return tokenizer.apply_chat_template(
|
| 42 |
+
messages,
|
| 43 |
+
tokenize=False,
|
| 44 |
+
add_generation_prompt=True
|
| 45 |
+
)
|
| 46 |
+
# Fallback prompt format
|
| 47 |
+
return "System: You are a helpful storyteller that writes engaging prose.\nUser: " + (user_text or "").strip() + "\nAssistant:"
|
| 48 |
+
|
| 49 |
|
| 50 |
# Function to generate stories
|
| 51 |
def generate_story(prompt, max_tokens=300, temperature=0.8):
|