Parthiban97 commited on
Commit
e024e83
Β·
verified Β·
1 Parent(s): 0315c73

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -6
app.py CHANGED
@@ -423,10 +423,16 @@ def create_consistency_hash(resume_text, job_description, prompt_type):
423
  content = f"{resume_text[:1000]}{job_description[:1000]}{prompt_type}"
424
  return hashlib.md5(content.encode()).hexdigest()
425
 
426
- def get_available_model():
427
  """Get the first available model from the fallback chain"""
 
 
 
 
428
  for model in MODEL_FALLBACK_CHAIN:
429
  try:
 
 
430
  test_model = genai.GenerativeModel(
431
  model,
432
  safety_settings={
@@ -436,6 +442,7 @@ def get_available_model():
436
  genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
437
  }
438
  )
 
439
  # Test with a simple prompt
440
  test_response = test_model.generate_content(
441
  "Say 'OK'",
@@ -444,9 +451,15 @@ def get_available_model():
444
  max_output_tokens=10
445
  )
446
  )
 
447
  if test_response.text:
 
448
  return model
449
- except Exception:
 
 
 
 
450
  continue
451
 
452
  raise Exception("No available Gemini models found")
@@ -675,7 +688,7 @@ def load_system_status():
675
  issues = validate_configuration()
676
  return issues
677
 
678
- def perform_enhanced_analysis(resume_text, job_description, analysis_type, custom_query=None):
679
  """Main analysis function with all improvements"""
680
 
681
  # Initialize cache
@@ -688,7 +701,7 @@ def perform_enhanced_analysis(resume_text, job_description, analysis_type, custo
688
  consistency_hash = create_consistency_hash(optimized_resume, optimized_job, analysis_type)
689
 
690
  # Try to get from cache first
691
- model_id = get_available_model()
692
  cached_response = get_cached_response(consistency_hash, model_id)
693
 
694
  if cached_response:
@@ -802,10 +815,14 @@ with st.sidebar:
802
  api_key = st.text_input("πŸ” Google API Key", type="password", help="Your Gemini API key for AI analysis")
803
  st.session_state["api_key"] = api_key
804
 
 
 
 
 
805
  if api_key:
806
  try:
807
  genai.configure(api_key=api_key)
808
- model_id = get_available_model()
809
  st.success(f"βœ… Connected to {model_id}")
810
  except Exception as e:
811
  st.error(f"❌ API Key Error: {str(e)}")
@@ -902,9 +919,12 @@ if analysis_triggered:
902
  analysis_type = "custom"
903
 
904
  try:
 
 
 
905
  # Perform analysis
906
  response, consistency_hash = perform_enhanced_analysis(
907
- pdf_content, input_text, analysis_type, custom_query
908
  )
909
 
910
  # Display results
 
423
  content = f"{resume_text[:1000]}{job_description[:1000]}{prompt_type}"
424
  return hashlib.md5(content.encode()).hexdigest()
425
 
426
+ def get_available_model(force_primary=True):
427
  """Get the first available model from the fallback chain"""
428
+ if force_primary:
429
+ # Force use of primary model without testing
430
+ return MODEL_FALLBACK_CHAIN[0] # gemini-2.5-flash
431
+
432
  for model in MODEL_FALLBACK_CHAIN:
433
  try:
434
+ st.info(f"πŸ” Testing model: {model}")
435
+
436
  test_model = genai.GenerativeModel(
437
  model,
438
  safety_settings={
 
442
  genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
443
  }
444
  )
445
+
446
  # Test with a simple prompt
447
  test_response = test_model.generate_content(
448
  "Say 'OK'",
 
451
  max_output_tokens=10
452
  )
453
  )
454
+
455
  if test_response.text:
456
+ st.success(f"βœ… {model} - Test successful!")
457
  return model
458
+ else:
459
+ st.warning(f"⚠️ {model} - No response text")
460
+
461
+ except Exception as e:
462
+ st.error(f"❌ {model} - Failed: {str(e)}")
463
  continue
464
 
465
  raise Exception("No available Gemini models found")
 
688
  issues = validate_configuration()
689
  return issues
690
 
691
+ def perform_enhanced_analysis(resume_text, job_description, analysis_type, custom_query=None, force_primary=True):
692
  """Main analysis function with all improvements"""
693
 
694
  # Initialize cache
 
701
  consistency_hash = create_consistency_hash(optimized_resume, optimized_job, analysis_type)
702
 
703
  # Try to get from cache first
704
+ model_id = get_available_model(force_primary=force_primary)
705
  cached_response = get_cached_response(consistency_hash, model_id)
706
 
707
  if cached_response:
 
815
  api_key = st.text_input("πŸ” Google API Key", type="password", help="Your Gemini API key for AI analysis")
816
  st.session_state["api_key"] = api_key
817
 
818
+ # Model selection option
819
+ force_primary = st.checkbox("🎯 Force Primary Model", value=True, help="Use gemini-2.5-flash directly without testing")
820
+ st.session_state["force_primary"] = force_primary
821
+
822
  if api_key:
823
  try:
824
  genai.configure(api_key=api_key)
825
+ model_id = get_available_model(force_primary=force_primary)
826
  st.success(f"βœ… Connected to {model_id}")
827
  except Exception as e:
828
  st.error(f"❌ API Key Error: {str(e)}")
 
919
  analysis_type = "custom"
920
 
921
  try:
922
+ # Get force_primary setting
923
+ force_primary_setting = st.session_state.get("force_primary", True)
924
+
925
  # Perform analysis
926
  response, consistency_hash = perform_enhanced_analysis(
927
+ pdf_content, input_text, analysis_type, custom_query, force_primary_setting
928
  )
929
 
930
  # Display results