Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os, requests, asyncio | |
| from crawl4ai import AsyncWebCrawler, BrowserConfig, CrawlerRunConfig | |
| from crawl4ai.content_filter_strategy import PruningContentFilter | |
| from crawl4ai.markdown_generation_strategy import DefaultMarkdownGenerator | |
| from openai import AzureOpenAI | |
| from dotenv import load_dotenv | |
| # ---------------- ENV ---------------- | |
| load_dotenv() | |
| def must_env(name): | |
| v = os.getenv(name) | |
| if not v: | |
| raise RuntimeError(f"Missing env var: {name}") | |
| return v | |
| client = AzureOpenAI( | |
| api_key=must_env("AZURE_OPENAI_KEY").strip(), | |
| api_version="2025-01-01-preview".strip(), | |
| azure_endpoint=must_env("AZURE_OPENAI_ENDPOINT").strip(), | |
| ) | |
| DEPLOYMENT_NAME = must_env("AZURE_OPENAI_DEPLOYMENT").strip() | |
| SERPER_API_KEY = must_env("SERPER_API_KEY").strip() | |
| # ========================================================= | |
| # =============== INTERVIEW INSIGHTS MODULE ============== | |
| # ========================================================= | |
| def search_company_interviews(company): | |
| headers = { | |
| "X-API-KEY": SERPER_API_KEY, | |
| "Content-Type": "application/json" | |
| } | |
| query = ( | |
| f"{company} interview experience " | |
| "site:glassdoor.com OR site:geeksforgeeks.org OR site:prepinsta.com" | |
| ) | |
| r = requests.post( | |
| "https://google.serper.dev/search", | |
| headers=headers, | |
| json={"q": query, "num": 5}, | |
| timeout=15 | |
| ) | |
| r.raise_for_status() | |
| return [res["link"] for res in r.json().get("organic", [])[:3]] | |
| async def crawl_url(url): | |
| browser_conf = BrowserConfig(headless=True) | |
| filter_strategy = PruningContentFilter(threshold=0.48) # Remove min_words parameter | |
| md_gen = DefaultMarkdownGenerator(content_filter=filter_strategy) | |
| run_conf = CrawlerRunConfig(markdown_generator=md_gen) | |
| async with AsyncWebCrawler(config=browser_conf) as crawler: | |
| result = await crawler.arun(url=url, config=run_conf) | |
| return (result.markdown.fit_markdown or "")[:2500] | |
| async def fetch_and_summarize(company): | |
| urls = search_company_interviews(company) | |
| pages = await asyncio.gather(*[crawl_url(u) for u in urls]) | |
| context = "\n\n".join(pages) | |
| messages = [ | |
| {"role": "system", "content": "Summarize interview experiences concisely."}, | |
| {"role": "user", "content": f""" | |
| Summarize interview process for {company}: | |
| - Rounds | |
| - Difficulty | |
| - Topics asked | |
| - Preparation tips | |
| Content: | |
| {context} | |
| """} | |
| ] | |
| response = client.chat.completions.create( | |
| model=DEPLOYMENT_NAME, | |
| messages=messages, | |
| max_tokens=700 | |
| ) | |
| sources = "\n".join(f"- {u}" for u in urls) | |
| return f"{response.choices[0].message.content}\n\n🔗 Sources:\n{sources}" | |
| # ========================================================= | |
| # ========== ADAPTIVE LEARNING ECOSYSTEM MODULE =========== | |
| # ========================================================= | |
| def fetch_github_stats(username): | |
| url = f"https://github-readme-stats-fast.vercel.app/api/top-langs/?username={username}&layout=compact" | |
| r = requests.get(url, timeout=10) | |
| return r.text[:2000] # SVG summary | |
| def fetch_leetcode_data(username): | |
| base = f"https://leetcode-api-vercel.vercel.app/{username}" | |
| endpoints = { | |
| "profile": "", | |
| "solved": "/solved", | |
| "skill": "/skill", | |
| "progress": "/progress", | |
| } | |
| data = {} | |
| for k, path in endpoints.items(): | |
| r = requests.get(base + path, timeout=10) | |
| if r.ok: | |
| data[k] = r.json() | |
| return data | |
| def generate_learning_plan(github_user, leetcode_user): | |
| github_data = fetch_github_stats(github_user) | |
| leetcode_data = fetch_leetcode_data(leetcode_user) | |
| prompt = f""" | |
| You are an adaptive learning ecosystem focused on India's job market. | |
| GitHub language usage (SVG): | |
| {github_data} | |
| LeetCode performance (JSON): | |
| {leetcode_data} | |
| Tasks: | |
| 1. Infer aptitude level | |
| 2. Identify strong & weak skills | |
| 3. Suggest 3 suitable job roles in India | |
| 4. Create a 6-week adaptive learning roadmap | |
| 5. Recommend LeetCode topics to focus next | |
| Be structured and practical. | |
| """ | |
| resp = client.chat.completions.create( | |
| model=DEPLOYMENT_NAME, | |
| messages=[ | |
| {"role": "system", "content": "Design personalized learning paths. Make it practical. and use only the provided data. give correct output within 900 words or below"}, | |
| {"role": "user", "content": prompt}, | |
| ], | |
| max_tokens=900, | |
| ) | |
| return resp.choices[0].message.content | |
| # ========================================================= | |
| # ======================= UI ============================= | |
| # ========================================================= | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# 🚀 AI Career Intelligence Platform") | |
| with gr.Tabs(): | |
| # -------- TAB 1 -------- | |
| with gr.Tab("💼 Interview Insights"): | |
| company = gr.Textbox(label="Company Name", placeholder="Amazon, Infosys") | |
| interview_output = gr.Textbox(lines=18, label="Interview Summary") | |
| btn1 = gr.Button("Fetch Interview Experience") | |
| btn1.click(fetch_and_summarize, company, interview_output) | |
| # -------- TAB 2 -------- | |
| with gr.Tab("🎓 Adaptive Learning Ecosystem"): | |
| github_user = gr.Textbox(label="GitHub Username") | |
| leetcode_user = gr.Textbox(label="LeetCode Username") | |
| learning_output = gr.Textbox(lines=20, label="Personalized Learning Plan") | |
| btn2 = gr.Button("Generate Learning Roadmap") | |
| btn2.click( | |
| generate_learning_plan, | |
| inputs=[github_user, leetcode_user], | |
| outputs=learning_output | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch(share=False, server_name="0.0.0.0", server_port=7860, pwa=True) | |