Shreyas094 commited on
Commit
602e30b
·
verified ·
1 Parent(s): e13157e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -62
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import gradio as gr
2
  from duckduckgo_search import DDGS
3
- from typing import List, Dict, Tuple
4
- from huggingface_hub import InferenceClient
5
  import os
6
 
7
  # Environment variables and configurations
@@ -36,87 +35,32 @@ def summarize_results(query: str, search_results: List[Dict[str, str]], model: s
36
 
37
  Article:"""
38
 
39
-
40
  summary = DDGS().chat(prompt, model="llama-3-70b")
41
  return summary
42
  except Exception as e:
43
  return f"An error occurred during summarization: {str(e)}"
44
 
45
- def rephrase_query(previous_response: str, new_query: str) -> str:
46
- client = InferenceClient(
47
- "mistralai/Mistral-7B-Instruct-v0.3",
48
- token=huggingface_token,
49
- )
50
-
51
- if previous_response:
52
- prompt = f"""Analyze the following previous response to understand the context:
53
-
54
- {previous_response}
55
-
56
- Now, given the new query:
57
-
58
- {new_query}
59
-
60
- Rephrase the new query to ensure it aligns with the context of the previous response. The rephrased query should be specific, concise, and optimized for a web search. Provide the rephrased query as a single-line response:"""
61
- else:
62
- prompt = f"""Given the new query:
63
-
64
- {new_query}
65
-
66
- Rephrase this query to make it more specific, concise, and optimized for a web search. Provide the rephrased query as a single-line response:"""
67
-
68
-
69
- response = client.text_generation(prompt, max_new_tokens=30, temperature=0.3)
70
-
71
- # Extract only the rephrased query from the response
72
- rephrased_query = response.split("\n")[-1].strip()
73
- return rephrased_query
74
-
75
  def respond(message, chat_history, model, temperature, num_api_calls):
76
- rephrased_query = message
77
- if chat_history:
78
- previous_response = chat_history[-1][1]
79
- rephrased_query = rephrase_query(previous_response, message)
80
-
81
- print(f"Initial Rephrased Query: {rephrased_query}")
82
-
83
  final_summary = ""
84
  for _ in range(num_api_calls):
85
- if not rephrased_query or not isinstance(rephrased_query, str):
86
- print(f"Invalid rephrased query: {rephrased_query}")
87
- break
88
-
89
- search_results = get_web_search_results(rephrased_query)
90
-
91
- # If no results or error, try rephrasing and searching again
92
- if not search_results or "error" in search_results[0]:
93
- print(f"No results found for: {rephrased_query}. Attempting to rephrase.")
94
- rephrased_query = rephrase_query("", rephrased_query) # Rephrase without context
95
- print(f"New Rephrased Query: {rephrased_query}")
96
- search_results = get_web_search_results(rephrased_query)
97
 
98
  if not search_results:
99
- final_summary += f"No search results found for the query: {rephrased_query}\n\n"
100
  elif "error" in search_results[0]:
101
  final_summary += search_results[0]["error"] + "\n\n"
102
  else:
103
- summary = summarize_results(rephrased_query, search_results, model)
104
  final_summary += summary + "\n\n"
105
 
106
  return final_summary if final_summary else "Unable to generate a response. Please try a different query."
107
 
108
- #def initial_conversation():
109
- # return [
110
- # (None, "Welcome! I'm your AI-powered Web Search and PDF Chat Assistant. I can help you find information on the web, summarize content, and analyze PDF documents. What would you like to know?")
111
- # ]
112
-
113
  css = """
114
  Your custom CSS here
115
  """
116
 
117
  custom_placeholder = "Ask me anything about web content"
118
 
119
- # Define the theme
120
  theme = gr.themes.Soft(
121
  primary_hue="orange",
122
  secondary_hue="amber",
@@ -136,7 +80,6 @@ theme = gr.themes.Soft(
136
  code_background_fill_dark="#140b0b"
137
  )
138
 
139
- # Apply the theme to the ChatInterface
140
  demo = gr.ChatInterface(
141
  respond,
142
  additional_inputs=[
@@ -161,7 +104,6 @@ demo = gr.ChatInterface(
161
  likeable=True,
162
  layout="bubble",
163
  height=400,
164
- # value=initial_conversation()
165
  )
166
  )
167
 
 
1
  import gradio as gr
2
  from duckduckgo_search import DDGS
3
+ from typing import List, Dict
 
4
  import os
5
 
6
  # Environment variables and configurations
 
35
 
36
  Article:"""
37
 
 
38
  summary = DDGS().chat(prompt, model="llama-3-70b")
39
  return summary
40
  except Exception as e:
41
  return f"An error occurred during summarization: {str(e)}"
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  def respond(message, chat_history, model, temperature, num_api_calls):
 
 
 
 
 
 
 
44
  final_summary = ""
45
  for _ in range(num_api_calls):
46
+ search_results = get_web_search_results(message)
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  if not search_results:
49
+ final_summary += f"No search results found for the query: {message}\n\n"
50
  elif "error" in search_results[0]:
51
  final_summary += search_results[0]["error"] + "\n\n"
52
  else:
53
+ summary = summarize_results(message, search_results, model)
54
  final_summary += summary + "\n\n"
55
 
56
  return final_summary if final_summary else "Unable to generate a response. Please try a different query."
57
 
 
 
 
 
 
58
  css = """
59
  Your custom CSS here
60
  """
61
 
62
  custom_placeholder = "Ask me anything about web content"
63
 
 
64
  theme = gr.themes.Soft(
65
  primary_hue="orange",
66
  secondary_hue="amber",
 
80
  code_background_fill_dark="#140b0b"
81
  )
82
 
 
83
  demo = gr.ChatInterface(
84
  respond,
85
  additional_inputs=[
 
104
  likeable=True,
105
  layout="bubble",
106
  height=400,
 
107
  )
108
  )
109