Kaito117's picture
hf spaces setup files
4ea17aa
#!/usr/bin/env python3
"""
FastAPI application for LinkedIn Candidate Sourcing Agent
Deployable to HuggingFace Spaces
"""
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field
from typing import List, Optional
import asyncio
import logging
from datetime import datetime
# Import your existing components
from app.models.schemas import JobProcessingRequest, JobDescription
from app.services.agent import LinkedInSourcingAgent
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# FastAPI app
app = FastAPI(
title="LinkedIn Sourcing Agent API",
description="AI-powered candidate sourcing and scoring system",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc"
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Initialize the agent
agent = LinkedInSourcingAgent()
# API Models
class JobInput(BaseModel):
title: str = Field(..., description="Job title", example="Software Engineer, ML Research")
company: str = Field(..., description="Company name", example="Windsurf")
location: Optional[str] = Field(None, description="Job location", example="Mountain View, CA")
requirements: List[str] = Field(
default_factory=list,
description="List of job requirements",
example=[
"Experience with large language models (LLMs)",
"Strong background in machine learning and AI",
"PhD or Master's in Computer Science or related field"
]
)
description: Optional[str] = Field(
None,
description="Detailed job description",
example="We are looking for a talented ML Research Engineer to join our team working on cutting-edge AI technologies."
)
max_candidates: int = Field(10, ge=1, le=50, description="Maximum number of candidates to find")
confidence_threshold: float = Field(0.3, ge=0, le=1, description="Minimum confidence threshold")
class CandidateOutput(BaseModel):
name: str
linkedin_url: str
fit_score: float
confidence: float
adjusted_score: float
key_highlights: List[str]
outreach_message: str
profile_summary: dict
class SourcingResponse(BaseModel):
job_id: str
job_title: str
company: str
candidates_found: int
candidates_scored: int
top_candidates: List[CandidateOutput]
processing_time: float
status: str
timestamp: datetime
# Helper function to convert ScoredCandidate to API format
def convert_scored_candidate(candidate) -> CandidateOutput:
"""Convert internal ScoredCandidate to API response format"""
# Extract key highlights from profile
key_highlights = []
profile = candidate.profile
# Add education highlights
if profile.education:
for edu in profile.education[:2]: # Top 2 education entries
if edu.institution and edu.degree:
key_highlights.append(f"{edu.degree} from {edu.institution}")
# Add experience highlights
if profile.experience:
current_exp = profile.experience[0]
key_highlights.append(f"Current: {current_exp.title} at {current_exp.company}")
if len(profile.experience) > 1:
prev_exp = profile.experience[1]
key_highlights.append(f"Previous: {prev_exp.title} at {prev_exp.company}")
# Add skills highlight
if profile.skills:
top_skills = profile.skills[:5] # Top 5 skills
key_highlights.append(f"Skills: {', '.join(top_skills)}")
# Add location if available
if profile.location:
key_highlights.append(f"Location: {profile.location}")
# Create profile summary
profile_summary = {
"name": profile.name,
"headline": profile.headline,
"current_company": profile.current_company,
"current_position": profile.current_position,
"location": profile.location,
"education_count": len(profile.education),
"experience_count": len(profile.experience),
"skills_count": len(profile.skills),
"score_breakdown": {
"education": candidate.score_breakdown.education,
"career_trajectory": candidate.score_breakdown.career_trajectory,
"company_relevance": candidate.score_breakdown.company_relevance,
"experience_match": candidate.score_breakdown.experience_match,
}
}
return CandidateOutput(
name=profile.name,
linkedin_url=profile.linkedin_url,
fit_score=candidate.fit_score,
confidence=candidate.confidence,
adjusted_score=candidate.adjusted_score,
key_highlights=key_highlights,
outreach_message=candidate.outreach_message,
profile_summary=profile_summary
)
@app.get("/")
async def root():
"""Health check endpoint"""
return {
"message": "LinkedIn Sourcing Agent API",
"status": "active",
"version": "1.0.0",
"docs": "/docs"
}
@app.get("/health")
async def health_check():
"""Detailed health check"""
return {
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"service": "linkedin-sourcing-agent"
}
@app.post("/source-candidates", response_model=SourcingResponse)
async def source_candidates(job_input: JobInput):
"""
Source and score candidates for a given job description
This endpoint:
1. Searches for LinkedIn candidates based on job requirements
2. Extracts and analyzes candidate profiles
3. Scores candidates using AI-powered algorithms
4. Generates personalized outreach messages
5. Returns top candidates ranked by fit score
"""
try:
logger.info(f"Processing job request: {job_input.title} at {job_input.company}")
# Convert API input to internal format
job_desc = JobDescription(
title=job_input.title,
company=job_input.company,
location=job_input.location,
requirements=job_input.requirements,
description=job_input.description or f"Join {job_input.company} as a {job_input.title}"
)
# Create processing request
request = JobProcessingRequest(
job_description=job_desc,
max_candidates=job_input.max_candidates,
confidence_threshold=job_input.confidence_threshold
)
# Process the job
result = await agent.process_job(request)
# Convert candidates to API format
api_candidates = []
for candidate in result.candidates[:10]: # Top 10 candidates
try:
api_candidate = convert_scored_candidate(candidate)
api_candidates.append(api_candidate)
except Exception as e:
logger.warning(f"Failed to convert candidate: {e}")
continue
response = SourcingResponse(
job_id=result.job_id,
job_title=job_input.title,
company=job_input.company,
candidates_found=result.candidates_found,
candidates_scored=len(result.candidates),
top_candidates=api_candidates,
processing_time=result.processing_time,
status=result.status,
timestamp=datetime.now()
)
logger.info(f"Successfully processed job. Found {len(api_candidates)} candidates")
return response
except Exception as e:
logger.error(f"Error processing job request: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Failed to process job request: {str(e)}"
)
@app.get("/example")
async def get_example():
"""Get an example job input for testing"""
return {
"example_input": {
"title": "Software Engineer, ML Research",
"company": "Windsurf",
"location": "Mountain View, CA",
"requirements": [
"Experience with large language models (LLMs)",
"Strong background in machine learning and AI",
"PhD or Master's in Computer Science or related field",
"Experience with search and ranking systems",
"Python and deep learning frameworks"
],
"description": "We are looking for a talented ML Research Engineer to join our team working on cutting-edge AI technologies. You will be responsible for developing and improving large language models, search algorithms, and AI-powered features.",
"max_candidates": 5,
"confidence_threshold": 0.3
}
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) # Port 7860 is standard for HuggingFace Spaces