Spaces:
Sleeping
Sleeping
Chris
commited on
Commit
Β·
fc0aaf2
1
Parent(s):
8430ff4
Final 6.3.3
Browse files- src/app.py +21 -13
src/app.py
CHANGED
|
@@ -571,15 +571,16 @@ def format_auth_status(profile: gr.OAuthProfile | None) -> str:
|
|
| 571 |
# Check HuggingFace Spaces OAuth configuration
|
| 572 |
oauth_scopes = os.getenv("OAUTH_SCOPES")
|
| 573 |
oauth_client_id = os.getenv("OAUTH_CLIENT_ID")
|
| 574 |
-
|
|
|
|
| 575 |
|
| 576 |
if not profile:
|
| 577 |
oauth_status = ""
|
| 578 |
if oauth_client_id:
|
| 579 |
if has_inference_scope:
|
| 580 |
-
oauth_status = "**π OAuth Configuration**: β
Space configured with
|
| 581 |
else:
|
| 582 |
-
oauth_status = "**β οΈ OAuth Configuration**: Space OAuth enabled but missing
|
| 583 |
else:
|
| 584 |
oauth_status = "**β OAuth Configuration**: Space not configured for OAuth (missing `hf_oauth: true` in README.md)"
|
| 585 |
|
|
@@ -591,11 +592,11 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
|
|
| 591 |
{oauth_status}
|
| 592 |
|
| 593 |
**What you need:**
|
| 594 |
-
- π HuggingFace login with `read` and `inference
|
| 595 |
- π€ Access to Qwen 2.5 models via HF Inference API
|
| 596 |
- π§ LangGraph multi-agent system capabilities
|
| 597 |
|
| 598 |
-
**π OAuth Scopes**: Login requests
|
| 599 |
**π Expected Performance**: 30%+ GAIA score with full LangGraph workflow and Qwen models.
|
| 600 |
**β οΈ No Fallbacks**: System requires proper authentication - no simplified responses.
|
| 601 |
"""
|
|
@@ -641,9 +642,9 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
|
|
| 641 |
# HuggingFace Spaces OAuth Environment Status
|
| 642 |
if oauth_client_id:
|
| 643 |
if has_inference_scope:
|
| 644 |
-
status_parts.append("**π Space OAuth**: β
Configured with
|
| 645 |
else:
|
| 646 |
-
status_parts.append("**π Space OAuth**: β οΈ Missing
|
| 647 |
status_parts.append(f"**Available Scopes**: {oauth_scopes}")
|
| 648 |
else:
|
| 649 |
status_parts.append("**π Space OAuth**: β Not configured (`hf_oauth: true` missing)")
|
|
@@ -687,7 +688,7 @@ Please log in to access GAIA evaluation with Qwen models and LangGraph workflow.
|
|
| 687 |
if not has_inference_scope:
|
| 688 |
status_parts.extend([
|
| 689 |
"",
|
| 690 |
-
"π§ **Space Configuration Issue**: Add
|
| 691 |
"```yaml",
|
| 692 |
"hf_oauth_scopes:",
|
| 693 |
" - inference-api",
|
|
@@ -737,8 +738,9 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 737 |
if not oauth_client_id:
|
| 738 |
return "β OAuth not configured. Please add 'hf_oauth: true' to README.md", None, format_auth_status(None), None, None, None
|
| 739 |
|
| 740 |
-
|
| 741 |
-
|
|
|
|
| 742 |
|
| 743 |
# Get space info for code submission
|
| 744 |
space_id = os.getenv("SPACE_ID")
|
|
@@ -1583,7 +1585,9 @@ Please log in to access GAIA evaluation features with full inference access.
|
|
| 1583 |
if oauth_scopes:
|
| 1584 |
scopes_list = oauth_scopes.split()
|
| 1585 |
debug_info.append(f"**Available Scopes**: {', '.join(scopes_list)}")
|
| 1586 |
-
|
|
|
|
|
|
|
| 1587 |
else:
|
| 1588 |
debug_info.append("**β οΈ No OAuth scopes configured**")
|
| 1589 |
|
|
@@ -1630,7 +1634,9 @@ Please log in to access GAIA evaluation features with full inference access.
|
|
| 1630 |
if oauth_client_id and oauth_scopes:
|
| 1631 |
debug_info.append("**β
OAuth Environment**: Properly configured")
|
| 1632 |
|
| 1633 |
-
|
|
|
|
|
|
|
| 1634 |
debug_info.append("**β
inference-api Scope**: Available for Qwen model access")
|
| 1635 |
debug_info.append("**π― Expected Behavior**: Login should provide Qwen model access")
|
| 1636 |
else:
|
|
@@ -1647,7 +1653,9 @@ Please log in to access GAIA evaluation features with full inference access.
|
|
| 1647 |
else:
|
| 1648 |
debug_info.append("- β OAuth is not enabled (missing OAUTH_CLIENT_ID)")
|
| 1649 |
|
| 1650 |
-
|
|
|
|
|
|
|
| 1651 |
debug_info.append("- β
inference-api scope is configured")
|
| 1652 |
debug_info.append("- β
Should have Qwen model access when logged in")
|
| 1653 |
else:
|
|
|
|
| 571 |
# Check HuggingFace Spaces OAuth configuration
|
| 572 |
oauth_scopes = os.getenv("OAUTH_SCOPES")
|
| 573 |
oauth_client_id = os.getenv("OAUTH_CLIENT_ID")
|
| 574 |
+
# Accept both 'inference-api' and 'inference' as valid inference scopes
|
| 575 |
+
has_inference_scope = oauth_scopes and ("inference-api" in oauth_scopes or "inference" in oauth_scopes)
|
| 576 |
|
| 577 |
if not profile:
|
| 578 |
oauth_status = ""
|
| 579 |
if oauth_client_id:
|
| 580 |
if has_inference_scope:
|
| 581 |
+
oauth_status = "**π OAuth Configuration**: β
Space configured with inference scope"
|
| 582 |
else:
|
| 583 |
+
oauth_status = "**β οΈ OAuth Configuration**: Space OAuth enabled but missing inference scope"
|
| 584 |
else:
|
| 585 |
oauth_status = "**β OAuth Configuration**: Space not configured for OAuth (missing `hf_oauth: true` in README.md)"
|
| 586 |
|
|
|
|
| 592 |
{oauth_status}
|
| 593 |
|
| 594 |
**What you need:**
|
| 595 |
+
- π HuggingFace login with `read` and `inference` permissions
|
| 596 |
- π€ Access to Qwen 2.5 models via HF Inference API
|
| 597 |
- π§ LangGraph multi-agent system capabilities
|
| 598 |
|
| 599 |
+
**π OAuth Scopes**: Login requests inference scope for Qwen model access.
|
| 600 |
**π Expected Performance**: 30%+ GAIA score with full LangGraph workflow and Qwen models.
|
| 601 |
**β οΈ No Fallbacks**: System requires proper authentication - no simplified responses.
|
| 602 |
"""
|
|
|
|
| 642 |
# HuggingFace Spaces OAuth Environment Status
|
| 643 |
if oauth_client_id:
|
| 644 |
if has_inference_scope:
|
| 645 |
+
status_parts.append("**π Space OAuth**: β
Configured with inference scope")
|
| 646 |
else:
|
| 647 |
+
status_parts.append("**π Space OAuth**: β οΈ Missing inference scope in README.md")
|
| 648 |
status_parts.append(f"**Available Scopes**: {oauth_scopes}")
|
| 649 |
else:
|
| 650 |
status_parts.append("**π Space OAuth**: β Not configured (`hf_oauth: true` missing)")
|
|
|
|
| 688 |
if not has_inference_scope:
|
| 689 |
status_parts.extend([
|
| 690 |
"",
|
| 691 |
+
"π§ **Space Configuration Issue**: Add inference scope to README.md:",
|
| 692 |
"```yaml",
|
| 693 |
"hf_oauth_scopes:",
|
| 694 |
" - inference-api",
|
|
|
|
| 738 |
if not oauth_client_id:
|
| 739 |
return "β OAuth not configured. Please add 'hf_oauth: true' to README.md", None, format_auth_status(None), None, None, None
|
| 740 |
|
| 741 |
+
# Accept both 'inference-api' and 'inference' as valid inference scopes
|
| 742 |
+
if not oauth_scopes or not ("inference-api" in oauth_scopes or "inference" in oauth_scopes):
|
| 743 |
+
return f"β Missing inference scope. Current scopes: {oauth_scopes}. Please add inference scope to README.md", None, format_auth_status(None), None, None, None
|
| 744 |
|
| 745 |
# Get space info for code submission
|
| 746 |
space_id = os.getenv("SPACE_ID")
|
|
|
|
| 1585 |
if oauth_scopes:
|
| 1586 |
scopes_list = oauth_scopes.split()
|
| 1587 |
debug_info.append(f"**Available Scopes**: {', '.join(scopes_list)}")
|
| 1588 |
+
# Check for both 'inference-api' and 'inference' as valid inference scopes
|
| 1589 |
+
has_inference = 'inference-api' in scopes_list or 'inference' in scopes_list
|
| 1590 |
+
debug_info.append(f"**Has inference scope**: {has_inference}")
|
| 1591 |
else:
|
| 1592 |
debug_info.append("**β οΈ No OAuth scopes configured**")
|
| 1593 |
|
|
|
|
| 1634 |
if oauth_client_id and oauth_scopes:
|
| 1635 |
debug_info.append("**β
OAuth Environment**: Properly configured")
|
| 1636 |
|
| 1637 |
+
# Check for both scope formats
|
| 1638 |
+
has_inference_scope = "inference-api" in oauth_scopes or "inference" in oauth_scopes
|
| 1639 |
+
if has_inference_scope:
|
| 1640 |
debug_info.append("**β
inference-api Scope**: Available for Qwen model access")
|
| 1641 |
debug_info.append("**π― Expected Behavior**: Login should provide Qwen model access")
|
| 1642 |
else:
|
|
|
|
| 1653 |
else:
|
| 1654 |
debug_info.append("- β OAuth is not enabled (missing OAUTH_CLIENT_ID)")
|
| 1655 |
|
| 1656 |
+
# Check for both scope formats in success indicators
|
| 1657 |
+
inference_available = oauth_scopes and ("inference-api" in oauth_scopes or "inference" in oauth_scopes)
|
| 1658 |
+
if inference_available:
|
| 1659 |
debug_info.append("- β
inference-api scope is configured")
|
| 1660 |
debug_info.append("- β
Should have Qwen model access when logged in")
|
| 1661 |
else:
|