MatchHive-ai / backend /agents /rapidapi_linkedin.py
Alpha108's picture
Update backend/agents/rapidapi_linkedin.py
e6f086e verified
import requests
import os
# Relative import (same folder)
from .normalizer import normalize_job_data
# --- CONFIGURATION FOR SCRAPINGDOG API ---
try:
import streamlit as st
SCRAPINGDOG_API_KEY = st.secrets.get("SCRAPINGDOG_API_KEY")
except (ImportError, KeyError, FileNotFoundError):
SCRAPINGDOG_API_KEY = os.getenv("SCRAPINGDOG_API_KEY")
def fetch_linkedin_jobs_stub(query="python developer", location="remote", limit=5):
print("LinkedIn agent is a stub. Returning mock data.")
mock_linkedin_jobs = [
{
'title': 'Senior Python Engineer (Stub)',
'company_name': 'Innovate Inc.',
'location': 'Remote',
'description': 'Looking for a senior python developer with experience in Django and cloud services. This is a mock job from a stub function.',
'posted_at': '2025-09-27',
'job_url': 'https://www.linkedin.com/jobs'
}
]
return [normalize_job_data(job, "LinkedIn (Stub)") for job in mock_linkedin_jobs]
def fetch_linkedin_jobs_real(query="python developer", location="remote", limit=10):
if not SCRAPINGDOG_API_KEY:
print("SCRAPINGDOG_API_KEY not set. Cannot fetch real data from LinkedIn.")
return []
url = "https://api.scrapingdog.com/linkedinjobs/"
params = {
"api_key": SCRAPINGDOG_API_KEY,
"query": query,
"location": location,
"num": limit
}
try:
response = requests.get(url, params=params)
response.raise_for_status()
jobs = response.json()
if not isinstance(jobs, list):
print("Unexpected API response format.")
return []
adapted_jobs = []
for job in jobs:
adapted_job = {
'title': job.get('job_title', 'N/A'),
'company_name': job.get('company_name', 'N/A'),
'location': job.get('job_location', 'Remote'),
'description': job.get('job_description', ''),
'posted_at': job.get('posted_at', 'N/A'),
'job_url': job.get('job_url', '')
}
adapted_jobs.append(adapted_job)
return [normalize_job_data(job, "LinkedIn") for job in adapted_jobs]
except requests.exceptions.RequestException as e:
print(f"Error fetching jobs from ScrapingDog API: {e}")
return []
except ValueError as e:
print(f"Error parsing JSON: {e}")
return []