Rahatara commited on
Commit
07de933
Β·
verified Β·
1 Parent(s): 5b9e52b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -66
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  from groq import Groq
4
 
5
  # -----------------------------
6
- # 1. Initialize Groq client
7
  # -----------------------------
8
  client = Groq(api_key=os.getenv("GROQ_API_KEY"))
9
 
@@ -13,13 +13,12 @@ SYSTEM_PROMPT = (
13
  )
14
 
15
  # -----------------------------
16
- # 2. Chat function
17
  # -----------------------------
18
  def chat_with_bot(user_input, history):
19
  if history is None:
20
  history = []
21
 
22
- # Add system message once
23
  if not history:
24
  history.append({"role": "system", "content": SYSTEM_PROMPT})
25
 
@@ -28,103 +27,67 @@ def chat_with_bot(user_input, history):
28
  completion = client.chat.completions.create(
29
  model="llama-3.3-70b-versatile",
30
  messages=history,
 
31
  temperature=1,
32
- max_tokens=1024,
33
  )
34
 
35
- assistant_reply = completion.choices[0].message.content
36
- history.append({"role": "assistant", "content": assistant_reply})
37
 
38
  return history, history
39
 
40
 
41
  # -----------------------------
42
- # 3. Storyboard generator
43
  # -----------------------------
44
  def generate_storyboard(scenario):
45
  if not scenario.strip():
46
- return "❗ Please enter a scenario."
47
-
48
- messages = [
49
- {
50
- "role": "system",
51
- "content": "Generate a 6-scene storyboard in a table format."
52
- },
53
- {
54
- "role": "user",
55
- "content": scenario
56
- }
57
- ]
58
 
59
  completion = client.chat.completions.create(
60
  model="llama-3.3-70b-versatile",
61
- messages=messages,
62
- temperature=1,
63
- max_tokens=1024,
 
 
64
  )
65
 
66
  return completion.choices[0].message.content
67
 
68
 
69
  # -----------------------------
70
- # 4. Gradio UI
71
  # -----------------------------
72
- with gr.Blocks(
73
- theme=gr.themes.Glass(),
74
- title="Storyboard Assistant"
75
- ) as demo:
76
-
77
- gr.Markdown(
78
- "<h1 style='text-align:center;'>πŸ“– Storyboard Assistant</h1>"
79
- )
80
 
81
  with gr.Tabs():
82
-
83
- # -------- Chat Tab --------
84
  with gr.Tab("πŸ’¬ Chat"):
85
- chatbot = gr.Chatbot(
86
- label="Storyboard Chatbot",
87
- type="messages"
88
- )
89
-
90
  state = gr.State([])
91
 
92
- user_input = gr.Textbox(
93
- placeholder="Ask about storyboards, scenes, pacing...",
94
- label="Your Message"
95
- )
96
-
97
- send_btn = gr.Button("Ask")
98
 
99
- send_btn.click(
100
- fn=chat_with_bot,
101
  inputs=[user_input, state],
102
  outputs=[chatbot, state],
103
  )
104
 
105
- # -------- Storyboard Tab --------
106
  with gr.Tab("πŸ“– Generate Storyboard"):
107
- scenario_input = gr.Textbox(
108
- label="Scenario",
109
- placeholder="A child discovers a hidden robot in the attic..."
110
- )
111
 
112
- generate_btn = gr.Button("Generate Storyboard")
113
-
114
- storyboard_output = gr.Textbox(
115
- label="Generated Storyboard",
116
- lines=14
117
- )
118
-
119
- generate_btn.click(
120
- fn=generate_storyboard,
121
- inputs=scenario_input,
122
- outputs=storyboard_output
123
- )
124
 
125
 
126
  # -----------------------------
127
- # 5. Launch app
128
  # -----------------------------
129
- if __name__ == "__main__":
130
- demo.launch()
 
 
 
3
  from groq import Groq
4
 
5
  # -----------------------------
6
+ # Initialize Groq
7
  # -----------------------------
8
  client = Groq(api_key=os.getenv("GROQ_API_KEY"))
9
 
 
13
  )
14
 
15
  # -----------------------------
16
+ # Chat function
17
  # -----------------------------
18
  def chat_with_bot(user_input, history):
19
  if history is None:
20
  history = []
21
 
 
22
  if not history:
23
  history.append({"role": "system", "content": SYSTEM_PROMPT})
24
 
 
27
  completion = client.chat.completions.create(
28
  model="llama-3.3-70b-versatile",
29
  messages=history,
30
+ max_tokens=512,
31
  temperature=1,
 
32
  )
33
 
34
+ reply = completion.choices[0].message.content
35
+ history.append({"role": "assistant", "content": reply})
36
 
37
  return history, history
38
 
39
 
40
  # -----------------------------
41
+ # Storyboard generator
42
  # -----------------------------
43
  def generate_storyboard(scenario):
44
  if not scenario.strip():
45
+ return "Please enter a scenario."
 
 
 
 
 
 
 
 
 
 
 
46
 
47
  completion = client.chat.completions.create(
48
  model="llama-3.3-70b-versatile",
49
+ messages=[
50
+ {"role": "system", "content": "Generate a 6-scene storyboard in table format."},
51
+ {"role": "user", "content": scenario},
52
+ ],
53
+ max_tokens=700,
54
  )
55
 
56
  return completion.choices[0].message.content
57
 
58
 
59
  # -----------------------------
60
+ # UI
61
  # -----------------------------
62
+ with gr.Blocks(title="Storyboard Assistant") as demo:
63
+ gr.Markdown("## πŸ“– Storyboard Assistant")
 
 
 
 
 
 
64
 
65
  with gr.Tabs():
 
 
66
  with gr.Tab("πŸ’¬ Chat"):
67
+ chatbot = gr.Chatbot(type="messages")
 
 
 
 
68
  state = gr.State([])
69
 
70
+ user_input = gr.Textbox(label="Your Message")
71
+ send = gr.Button("Ask")
 
 
 
 
72
 
73
+ send.click(
74
+ chat_with_bot,
75
  inputs=[user_input, state],
76
  outputs=[chatbot, state],
77
  )
78
 
 
79
  with gr.Tab("πŸ“– Generate Storyboard"):
80
+ scenario = gr.Textbox(label="Scenario")
81
+ generate = gr.Button("Generate")
82
+ output = gr.Textbox(lines=12)
 
83
 
84
+ generate.click(generate_storyboard, scenario, output)
 
 
 
 
 
 
 
 
 
 
 
85
 
86
 
87
  # -----------------------------
88
+ # 🚨 CRITICAL LAUNCH FIX
89
  # -----------------------------
90
+ demo.launch(
91
+ ssr=False, # βœ… disables schema bug
92
+ show_api=False # βœ… prevents /api/info crash
93
+ )