JayBene1 commited on
Commit
ae0eb4c
·
verified ·
1 Parent(s): 7fa4aa4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -8
app.py CHANGED
@@ -214,7 +214,11 @@ def extract_domain(url):
214
  """Extract domain from URL"""
215
  try:
216
  parsed = urlparse(url)
217
- return parsed.netlify.lower()
 
 
 
 
218
  except:
219
  return ""
220
 
@@ -224,26 +228,30 @@ def find_contacts_by_website(website_url):
224
  if not target_domain:
225
  return []
226
 
 
 
227
  matching_contacts = []
228
  for contact in CONTACTS_DB:
229
  contact_domain = extract_domain(contact['website'])
230
- if target_domain in contact_domain or contact_domain in target_domain:
 
 
 
231
  matching_contacts.append(contact)
232
 
 
233
  return matching_contacts
234
 
235
  def simulate_website_scraping(url):
236
  """Simulate scraping a website and finding contact information"""
237
  # Add some delay to simulate real scraping
238
- time.sleep(random.uniform(1, 3))
239
 
240
  # Find matching contacts from our database
241
  contacts = find_contacts_by_website(url)
242
 
243
- if not contacts:
244
- # If no exact match, return a random subset as if found on the website
245
- contacts = random.sample(CONTACTS_DB, min(3, len(CONTACTS_DB)))
246
-
247
  return contacts
248
 
249
  def search_website_contacts(website_url, max_results=10):
@@ -260,7 +268,7 @@ def search_website_contacts(website_url, max_results=10):
260
  contacts = simulate_website_scraping(website_url)
261
 
262
  if not contacts:
263
- return "No contacts found on this website.", ""
264
 
265
  # Limit results
266
  contacts = contacts[:max_results]
 
214
  """Extract domain from URL"""
215
  try:
216
  parsed = urlparse(url)
217
+ domain = parsed.netloc.lower()
218
+ # Remove www. if present
219
+ if domain.startswith('www.'):
220
+ domain = domain[4:]
221
+ return domain
222
  except:
223
  return ""
224
 
 
228
  if not target_domain:
229
  return []
230
 
231
+ print(f"Searching for domain: {target_domain}") # Debug info
232
+
233
  matching_contacts = []
234
  for contact in CONTACTS_DB:
235
  contact_domain = extract_domain(contact['website'])
236
+ print(f"Comparing with: {contact_domain}") # Debug info
237
+
238
+ # Exact domain match or subdomain match
239
+ if target_domain == contact_domain or target_domain in contact_domain or contact_domain in target_domain:
240
  matching_contacts.append(contact)
241
 
242
+ print(f"Found {len(matching_contacts)} matching contacts") # Debug info
243
  return matching_contacts
244
 
245
  def simulate_website_scraping(url):
246
  """Simulate scraping a website and finding contact information"""
247
  # Add some delay to simulate real scraping
248
+ time.sleep(random.uniform(1, 2))
249
 
250
  # Find matching contacts from our database
251
  contacts = find_contacts_by_website(url)
252
 
253
+ # Only return contacts if we found exact matches
254
+ # Don't return random contacts if no match found
 
 
255
  return contacts
256
 
257
  def search_website_contacts(website_url, max_results=10):
 
268
  contacts = simulate_website_scraping(website_url)
269
 
270
  if not contacts:
271
+ return f"No contacts found on {website_url}. \n\nThis website is not in our contact database. Try one of the sample websites listed below, or the website might not have publicly available contact information.", ""
272
 
273
  # Limit results
274
  contacts = contacts[:max_results]