Antonis Bast commited on
Commit
fc917d7
·
1 Parent(s): 475e24a

Simplify paraphrase query and update examples format

Browse files
Files changed (1) hide show
  1. app.py +7 -11
app.py CHANGED
@@ -71,13 +71,9 @@ def generate_text(prompt, max_tokens=256, temperature=0.7):
71
 
72
  def paraphrase_query(query):
73
  """Generate 2 paraphrases of the query."""
74
- styles = [
75
- ("formal and academic", "a precise, scholarly manner"),
76
- ("simple and inquisitive", "simple, curious language"),
77
- ]
78
  paraphrases = []
79
- for style_name, style_desc in styles:
80
- prompt = f"""Paraphrase the following question in {style_desc}.
81
  Return ONLY the paraphrased question, nothing else.
82
 
83
  Original: {query}
@@ -189,12 +185,12 @@ def rag_pipeline(query):
189
  # =============================================================================
190
 
191
  EXAMPLES = [
192
- "What is dropout and why do we use it?",
193
  "Explain backpropagation.",
194
- "What is the vanishing gradient problem?",
195
- "Why do transformers use attention?",
196
- "What is batch normalization?",
197
- "Why do we use ReLU instead of sigmoid?",
198
  ]
199
 
200
  with gr.Blocks(
 
71
 
72
  def paraphrase_query(query):
73
  """Generate 2 paraphrases of the query."""
 
 
 
 
74
  paraphrases = []
75
+ for i in range(2):
76
+ prompt = f"""Paraphrase the following question. Use different wording while keeping the same meaning.
77
  Return ONLY the paraphrased question, nothing else.
78
 
79
  Original: {query}
 
185
  # =============================================================================
186
 
187
  EXAMPLES = [
188
+ "Explain dropout and why we use it.",
189
  "Explain backpropagation.",
190
+ "Explain the vanishing gradient problem.",
191
+ "Explain why transformers use attention.",
192
+ "Explain batch normalization.",
193
+ "Explain why we use ReLU instead of sigmoid.",
194
  ]
195
 
196
  with gr.Blocks(