baconnier commited on
Commit
ac7873b
·
verified ·
1 Parent(s): 81a41a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -21
app.py CHANGED
@@ -91,21 +91,18 @@ Try to be funny, use smart format to answer using bullet points.
91
  Begin the tutorial by introducing yourself and asking for the first prompt as described above.
92
  """
93
 
94
- if __name__ == '__main__':
95
- api_token = os.getenv('HF_API_TOKEN2')
96
- if not api_token:
97
- raise ValueError("HF_API_TOKEN not found in environment variables")
98
-
99
  client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=api_token)
100
 
101
- def respond(message, history: list[tuple[str, str]]):
102
  messages = [{"role": "system", "content": system_message}]
103
 
104
- for val in history:
105
- if val[0]:
106
- messages.append({"role": "user", "content": val[0]})
107
- if val[1]:
108
- messages.append({"role": "assistant", "content": val[1]})
109
 
110
  messages.append({"role": "user", "content": message})
111
 
@@ -119,12 +116,12 @@ def respond(message, history: list[tuple[str, str]]):
119
  top_p=0.95,
120
  ):
121
  token = message.choices[0].delta.content
122
-
123
  response += token
124
  yield response
125
 
126
  demo = gr.ChatInterface(
127
  respond,
 
128
  theme=gr.themes.Soft(primary_hue="indigo", secondary_hue="blue", neutral_hue="gray",font=[gr.themes.GoogleFont("Exo"), "ui-sans-serif", "system-ui", "sans-serif"]).set(
129
  body_background_fill_dark="#0f172a",
130
  block_background_fill_dark="#0f172a",
@@ -136,15 +133,9 @@ demo = gr.ChatInterface(
136
  color_accent_soft_dark="transparent"
137
  ),
138
  css=css,
139
- description="AI Prompt Engineering Tutor: Master the art of crafting effective prompts",
140
- chatbot=gr.Chatbot(scale=1, placeholder=PLACEHOLDER),
141
- additional_inputs=[
142
- gr.Button("Retry", variant="secondary"),
143
- gr.Button("Undo", variant="secondary"),
144
- gr.Button("Clear", variant="secondary"),
145
- ],
146
- submit_btn=gr.Button("Send", variant="primary"),
147
  )
148
 
149
  if __name__ == "__main__":
150
- demo.launch()
 
91
  Begin the tutorial by introducing yourself and asking for the first prompt as described above.
92
  """
93
 
94
+ api_token = os.getenv('HF_API_TOKEN2')
95
+ if not api_token:
96
+ raise ValueError("HF_API_TOKEN not found in environment variables")
97
+
 
98
  client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct", token=api_token)
99
 
100
+ def respond(message, chat_history):
101
  messages = [{"role": "system", "content": system_message}]
102
 
103
+ for user_msg, assistant_msg in chat_history:
104
+ messages.append({"role": "user", "content": user_msg})
105
+ messages.append({"role": "assistant", "content": assistant_msg})
 
 
106
 
107
  messages.append({"role": "user", "content": message})
108
 
 
116
  top_p=0.95,
117
  ):
118
  token = message.choices[0].delta.content
 
119
  response += token
120
  yield response
121
 
122
  demo = gr.ChatInterface(
123
  respond,
124
+ chatbot=gr.Chatbot(scale=1, placeholder=PLACEHOLDER),
125
  theme=gr.themes.Soft(primary_hue="indigo", secondary_hue="blue", neutral_hue="gray",font=[gr.themes.GoogleFont("Exo"), "ui-sans-serif", "system-ui", "sans-serif"]).set(
126
  body_background_fill_dark="#0f172a",
127
  block_background_fill_dark="#0f172a",
 
133
  color_accent_soft_dark="transparent"
134
  ),
135
  css=css,
136
+ title="AI Prompt Engineering Tutor",
137
+ description="Master the art of crafting effective prompts",
 
 
 
 
 
 
138
  )
139
 
140
  if __name__ == "__main__":
141
+ demo.launch(share=True)