import gradio as gr import logging import pandas as pd from jobspy import scrape_jobs # Configure logging logging.basicConfig(filename="job_scraper.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") # Scraper function def scrape_jobs_func(query, locations, time_filter, country): hrs = 0 if time_filter=="From Past Week": hrs = 168 else: hrs = 72 jobs = scrape_jobs( site_name=["linkedin", "glassdoor", "naukri", "google", "indeed"], search_term= f"{query} job at {locations}", google_search_term= f"{query} job at {locations}", location= f"{locations}", results_wanted=3, hours_old=hrs, country_indeed = f"{country}" ) message = f"Found {len(jobs)} jobs" initial_df = pd.DataFrame(jobs) df = initial_df[['site','title','company','location','date_posted','is_remote','job_url','description']] return df, message def gradio_interface(query, locations, time_filter, country): df, message = scrape_jobs_func(query, locations, time_filter, country) return df, message custom_css = """ .big-table .wrap.svelte-1ipelgc { max-width: 100% !important; overflow-x: auto; } .big-table table { min-width: 1000px; /* Adjust width as needed */ } """ # App Layout iface = gr.Interface( fn=gradio_interface, inputs=[ gr.Textbox(label="Job Query", placeholder="e.g., Data Scientist", value="Software Engineer"), gr.Textbox(label="Location", placeholder="e.g., Delhi, Bangalore", value="Bangalore"), gr.Dropdown( label="Time Filter", choices=["From Past Week", "From Past 3 Days"], value="From Past 3 Days", # Default option type="value", ), gr.Dropdown( label="Country", choices=["India", "USA", "Canada", "UK", "Australia"], value="India", type="value" ) ], outputs=[ gr.Dataframe(label="Job Results", headers=['Date','Company', 'ApplyLink'], interactive=True, elem_classes="big-table"), gr.Textbox(label="Message"), ], #title="Job Scraper", description="Enter a job query and locations to scrape job postings and display the results in a table.", css=custom_css ) if __name__ == "__main__": iface.launch()