jsakshi commited on
Commit
bb50953
·
verified ·
1 Parent(s): 150b4cf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +80 -29
app.py CHANGED
@@ -1,65 +1,116 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
- # Load the TinyLlama model for text generation
5
  generator = pipeline("text-generation", model="TinyLlama/TinyLlama-1.1B-Chat-v1.0")
6
 
7
- # Store the current story in a global variable
8
  current_story = ""
 
9
 
10
  def start_story(prompt):
11
- global current_story
12
- # Generate the initial story segment, ensuring the prompt is the starting point
13
- result = generator(
14
- prompt,
15
- max_new_tokens=150, # Increased to ~100-150 words (approx. 10 lines)
16
- temperature=0.7, # Lower temperature for less randomness
17
- top_k=50, # Filter to top 50 likely next words
18
- do_sample=True, # Enable sampling for creativity
 
19
  truncation=True
20
  )[0]["generated_text"]
21
- current_story = result
22
- return current_story
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- def continue_story():
25
- global current_story
26
  if not current_story:
27
  return "Please start a story first!"
28
- # Continue the story, using the current story as the prompt
29
- result = generator(
30
- current_story,
31
- max_new_tokens=150, # Increased to ~100-150 words (approx. 10 lines)
 
 
 
 
 
32
  temperature=0.7,
33
  top_k=50,
34
  do_sample=True,
35
  truncation=True
36
  )[0]["generated_text"]
37
- current_story = result
38
- return current_story
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  def reset_story():
41
- global current_story
42
- # Reset the story to empty
43
  current_story = ""
 
44
  return "Story reset. Enter a new prompt to begin!"
45
 
46
  # Create the Gradio interface
47
- with gr.Blocks(title="AI Story Generator") as demo:
48
- gr.Markdown("# AI Story Generator")
49
- gr.Markdown("Enter a prompt to start your story, then continue or reset it!")
50
 
51
  # Input for the initial prompt
52
- prompt_input = gr.Textbox(label="Start your story with a prompt", placeholder="E.g., 'A knight rode into a dark forest...'")
53
- output = gr.Textbox(label="Your Story", lines=15) # Increased lines to fit longer output
54
 
55
- # Buttons for starting, continuing, and resetting
 
56
  start_button = gr.Button("Start Story")
57
  continue_button = gr.Button("Continue Story")
58
  reset_button = gr.Button("Reset Story")
59
 
60
  # Connect buttons to functions
61
  start_button.click(fn=start_story, inputs=prompt_input, outputs=output)
62
- continue_button.click(fn=continue_story, inputs=None, outputs=output)
63
  reset_button.click(fn=reset_story, inputs=None, outputs=output)
64
 
65
  # Launch the app
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # Load TinyLlama model for text generation
5
  generator = pipeline("text-generation", model="TinyLlama/TinyLlama-1.1B-Chat-v1.0")
6
 
7
+ # Store the current story and last options in global variables
8
  current_story = ""
9
+ last_options = []
10
 
11
  def start_story(prompt):
12
+ global current_story, last_options
13
+ # Generate the initial story segment
14
+ formatted_prompt = f"Human: Write a creative story starting with: '{prompt}' in about 100-150 words."
15
+ story_result = generator(
16
+ formatted_prompt,
17
+ max_new_tokens=150, # ~10 lines (100-150 words)
18
+ temperature=0.7,
19
+ top_k=50,
20
+ do_sample=True,
21
  truncation=True
22
  )[0]["generated_text"]
23
+ current_story = story_result.replace(formatted_prompt, "").strip()
24
+
25
+ # Generate story-specific options
26
+ options_prompt = f"Human: Based on this story: '{current_story}', suggest three distinct options for what happens next. Each option should be 10-20 words and continue the narrative logically."
27
+ options_result = generator(
28
+ options_prompt,
29
+ max_new_tokens=100, # Enough for 3 options
30
+ temperature=0.8, # Slightly higher for variety
31
+ top_k=50,
32
+ do_sample=True,
33
+ truncation=True
34
+ )[0]["generated_text"]
35
+ options_text = options_result.replace(options_prompt, "").strip()
36
+
37
+ # Parse options, with fallback if generation is messy
38
+ options_lines = [line.strip() for line in options_text.split("\n") if line.strip()]
39
+ last_options = options_lines[:3] if len(options_lines) >= 3 else [
40
+ "The story took an unexpected twist.",
41
+ "A new challenge emerged from the shadows.",
42
+ "The journey led to an unlikely ally."
43
+ ]
44
+
45
+ options_display = "\n\n**Choose an option:**\n" + "\n".join([f"{i+1}. {opt}" for i, opt in enumerate(last_options)])
46
+ return current_story + options_display
47
 
48
+ def continue_story(option_num):
49
+ global current_story, last_options
50
  if not current_story:
51
  return "Please start a story first!"
52
+ if not last_options or not (1 <= int(option_num) <= 3):
53
+ return "Invalid option! Choose 1, 2, or 3."
54
+
55
+ # Continue the story with the chosen option
56
+ chosen_option = last_options[int(option_num) - 1]
57
+ formatted_prompt = f"Human: Continue this story: '{current_story}' with this direction: '{chosen_option}' in about 100-150 words."
58
+ story_result = generator(
59
+ formatted_prompt,
60
+ max_new_tokens=150, # ~10 lines
61
  temperature=0.7,
62
  top_k=50,
63
  do_sample=True,
64
  truncation=True
65
  )[0]["generated_text"]
66
+ current_story = story_result.replace(formatted_prompt, "").strip()
67
+
68
+ # Generate new story-specific options
69
+ options_prompt = f"Human: Based on this story: '{current_story}', suggest three distinct options for what happens next. Each option should be 10-20 words and continue the narrative logically."
70
+ options_result = generator(
71
+ options_prompt,
72
+ max_new_tokens=100,
73
+ temperature=0.8,
74
+ top_k=50,
75
+ do_sample=True,
76
+ truncation=True
77
+ )[0]["generated_text"]
78
+ options_text = options_result.replace(options_prompt, "").strip()
79
+
80
+ options_lines = [line.strip() for line in options_text.split("\n") if line.strip()]
81
+ last_options = options_lines[:3] if len(options_lines) >= 3 else [
82
+ "The story took an unexpected twist.",
83
+ "A new challenge emerged from the shadows.",
84
+ "The journey led to an unlikely ally."
85
+ ]
86
+
87
+ options_display = "\n\n**Choose an option:**\n" + "\n".join([f"{i+1}. {opt}" for i, opt in enumerate(last_options)])
88
+ return current_story + options_display
89
 
90
  def reset_story():
91
+ global current_story, last_options
 
92
  current_story = ""
93
+ last_options = []
94
  return "Story reset. Enter a new prompt to begin!"
95
 
96
  # Create the Gradio interface
97
+ with gr.Blocks(title="AI Story Game") as demo:
98
+ gr.Markdown("# AI Story Game")
99
+ gr.Markdown("Start your adventure with a prompt, then guide the story with options or reset it!")
100
 
101
  # Input for the initial prompt
102
+ prompt_input = gr.Textbox(label="Start your story with a prompt", placeholder="E.g., 'A thief crept through the shadows...'")
103
+ output = gr.Textbox(label="Your Story", lines=15)
104
 
105
+ # Option input and buttons
106
+ option_input = gr.Textbox(label="Enter option number (1-3)", placeholder="E.g., 1", lines=1)
107
  start_button = gr.Button("Start Story")
108
  continue_button = gr.Button("Continue Story")
109
  reset_button = gr.Button("Reset Story")
110
 
111
  # Connect buttons to functions
112
  start_button.click(fn=start_story, inputs=prompt_input, outputs=output)
113
+ continue_button.click(fn=continue_story, inputs=option_input, outputs=output)
114
  reset_button.click(fn=reset_story, inputs=None, outputs=output)
115
 
116
  # Launch the app