Chris commited on
Commit
fc0aaf2
Β·
1 Parent(s): 8430ff4

Final 6.3.3

Browse files
Files changed (1) hide show
  1. src/app.py +21 -13
src/app.py CHANGED
@@ -571,15 +571,16 @@ def format_auth_status(profile: gr.OAuthProfile | None) -> str:
571
  # Check HuggingFace Spaces OAuth configuration
572
  oauth_scopes = os.getenv("OAUTH_SCOPES")
573
  oauth_client_id = os.getenv("OAUTH_CLIENT_ID")
574
- has_inference_scope = oauth_scopes and "inference-api" in oauth_scopes
 
575
 
576
  if not profile:
577
  oauth_status = ""
578
  if oauth_client_id:
579
  if has_inference_scope:
580
- oauth_status = "**πŸ”‘ OAuth Configuration**: βœ… Space configured with `inference-api` scope"
581
  else:
582
- oauth_status = "**⚠️ OAuth Configuration**: Space OAuth enabled but missing `inference-api` scope"
583
  else:
584
  oauth_status = "**❌ OAuth Configuration**: Space not configured for OAuth (missing `hf_oauth: true` in README.md)"
585
 
@@ -591,11 +592,11 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
591
  {oauth_status}
592
 
593
  **What you need:**
594
- - πŸ”‘ HuggingFace login with `read` and `inference-api` permissions
595
  - πŸ€– Access to Qwen 2.5 models via HF Inference API
596
  - 🧠 LangGraph multi-agent system capabilities
597
 
598
- **πŸ”‘ OAuth Scopes**: Login requests `inference-api` scope for Qwen model access.
599
  **πŸ“ˆ Expected Performance**: 30%+ GAIA score with full LangGraph workflow and Qwen models.
600
  **⚠️ No Fallbacks**: System requires proper authentication - no simplified responses.
601
  """
@@ -641,9 +642,9 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
641
  # HuggingFace Spaces OAuth Environment Status
642
  if oauth_client_id:
643
  if has_inference_scope:
644
- status_parts.append("**🏠 Space OAuth**: βœ… Configured with `inference-api` scope")
645
  else:
646
- status_parts.append("**🏠 Space OAuth**: ⚠️ Missing `inference-api` scope in README.md")
647
  status_parts.append(f"**Available Scopes**: {oauth_scopes}")
648
  else:
649
  status_parts.append("**🏠 Space OAuth**: ❌ Not configured (`hf_oauth: true` missing)")
@@ -687,7 +688,7 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
687
  if not has_inference_scope:
688
  status_parts.extend([
689
  "",
690
- "πŸ”§ **Space Configuration Issue**: Add `inference-api` scope to README.md:",
691
  "```yaml",
692
  "hf_oauth_scopes:",
693
  " - inference-api",
@@ -737,8 +738,9 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
737
  if not oauth_client_id:
738
  return "❌ OAuth not configured. Please add 'hf_oauth: true' to README.md", None, format_auth_status(None), None, None, None
739
 
740
- if not oauth_scopes or "inference-api" not in oauth_scopes:
741
- return f"❌ Missing inference-api scope. Current scopes: {oauth_scopes}. Please add 'inference-api' scope to README.md", None, format_auth_status(None), None, None, None
 
742
 
743
  # Get space info for code submission
744
  space_id = os.getenv("SPACE_ID")
@@ -1583,7 +1585,9 @@ Please log in to access GAIA evaluation features with full inference access.
1583
  if oauth_scopes:
1584
  scopes_list = oauth_scopes.split()
1585
  debug_info.append(f"**Available Scopes**: {', '.join(scopes_list)}")
1586
- debug_info.append(f"**Has inference-api scope**: {'inference-api' in scopes_list}")
 
 
1587
  else:
1588
  debug_info.append("**⚠️ No OAuth scopes configured**")
1589
 
@@ -1630,7 +1634,9 @@ Please log in to access GAIA evaluation features with full inference access.
1630
  if oauth_client_id and oauth_scopes:
1631
  debug_info.append("**βœ… OAuth Environment**: Properly configured")
1632
 
1633
- if "inference-api" in oauth_scopes:
 
 
1634
  debug_info.append("**βœ… inference-api Scope**: Available for Qwen model access")
1635
  debug_info.append("**🎯 Expected Behavior**: Login should provide Qwen model access")
1636
  else:
@@ -1647,7 +1653,9 @@ Please log in to access GAIA evaluation features with full inference access.
1647
  else:
1648
  debug_info.append("- ❌ OAuth is not enabled (missing OAUTH_CLIENT_ID)")
1649
 
1650
- if oauth_scopes and "inference-api" in oauth_scopes:
 
 
1651
  debug_info.append("- βœ… inference-api scope is configured")
1652
  debug_info.append("- βœ… Should have Qwen model access when logged in")
1653
  else:
 
571
  # Check HuggingFace Spaces OAuth configuration
572
  oauth_scopes = os.getenv("OAUTH_SCOPES")
573
  oauth_client_id = os.getenv("OAUTH_CLIENT_ID")
574
+ # Accept both 'inference-api' and 'inference' as valid inference scopes
575
+ has_inference_scope = oauth_scopes and ("inference-api" in oauth_scopes or "inference" in oauth_scopes)
576
 
577
  if not profile:
578
  oauth_status = ""
579
  if oauth_client_id:
580
  if has_inference_scope:
581
+ oauth_status = "**πŸ”‘ OAuth Configuration**: βœ… Space configured with inference scope"
582
  else:
583
+ oauth_status = "**⚠️ OAuth Configuration**: Space OAuth enabled but missing inference scope"
584
  else:
585
  oauth_status = "**❌ OAuth Configuration**: Space not configured for OAuth (missing `hf_oauth: true` in README.md)"
586
 
 
592
  {oauth_status}
593
 
594
  **What you need:**
595
+ - πŸ”‘ HuggingFace login with `read` and `inference` permissions
596
  - πŸ€– Access to Qwen 2.5 models via HF Inference API
597
  - 🧠 LangGraph multi-agent system capabilities
598
 
599
+ **πŸ”‘ OAuth Scopes**: Login requests inference scope for Qwen model access.
600
  **πŸ“ˆ Expected Performance**: 30%+ GAIA score with full LangGraph workflow and Qwen models.
601
  **⚠️ No Fallbacks**: System requires proper authentication - no simplified responses.
602
  """
 
642
  # HuggingFace Spaces OAuth Environment Status
643
  if oauth_client_id:
644
  if has_inference_scope:
645
+ status_parts.append("**🏠 Space OAuth**: βœ… Configured with inference scope")
646
  else:
647
+ status_parts.append("**🏠 Space OAuth**: ⚠️ Missing inference scope in README.md")
648
  status_parts.append(f"**Available Scopes**: {oauth_scopes}")
649
  else:
650
  status_parts.append("**🏠 Space OAuth**: ❌ Not configured (`hf_oauth: true` missing)")
 
688
  if not has_inference_scope:
689
  status_parts.extend([
690
  "",
691
+ "πŸ”§ **Space Configuration Issue**: Add inference scope to README.md:",
692
  "```yaml",
693
  "hf_oauth_scopes:",
694
  " - inference-api",
 
738
  if not oauth_client_id:
739
  return "❌ OAuth not configured. Please add 'hf_oauth: true' to README.md", None, format_auth_status(None), None, None, None
740
 
741
+ # Accept both 'inference-api' and 'inference' as valid inference scopes
742
+ if not oauth_scopes or not ("inference-api" in oauth_scopes or "inference" in oauth_scopes):
743
+ return f"❌ Missing inference scope. Current scopes: {oauth_scopes}. Please add inference scope to README.md", None, format_auth_status(None), None, None, None
744
 
745
  # Get space info for code submission
746
  space_id = os.getenv("SPACE_ID")
 
1585
  if oauth_scopes:
1586
  scopes_list = oauth_scopes.split()
1587
  debug_info.append(f"**Available Scopes**: {', '.join(scopes_list)}")
1588
+ # Check for both 'inference-api' and 'inference' as valid inference scopes
1589
+ has_inference = 'inference-api' in scopes_list or 'inference' in scopes_list
1590
+ debug_info.append(f"**Has inference scope**: {has_inference}")
1591
  else:
1592
  debug_info.append("**⚠️ No OAuth scopes configured**")
1593
 
 
1634
  if oauth_client_id and oauth_scopes:
1635
  debug_info.append("**βœ… OAuth Environment**: Properly configured")
1636
 
1637
+ # Check for both scope formats
1638
+ has_inference_scope = "inference-api" in oauth_scopes or "inference" in oauth_scopes
1639
+ if has_inference_scope:
1640
  debug_info.append("**βœ… inference-api Scope**: Available for Qwen model access")
1641
  debug_info.append("**🎯 Expected Behavior**: Login should provide Qwen model access")
1642
  else:
 
1653
  else:
1654
  debug_info.append("- ❌ OAuth is not enabled (missing OAUTH_CLIENT_ID)")
1655
 
1656
+ # Check for both scope formats in success indicators
1657
+ inference_available = oauth_scopes and ("inference-api" in oauth_scopes or "inference" in oauth_scopes)
1658
+ if inference_available:
1659
  debug_info.append("- βœ… inference-api scope is configured")
1660
  debug_info.append("- βœ… Should have Qwen model access when logged in")
1661
  else: