Upload folder using huggingface_hub
Browse files- app.py +17 -12
- deploy_fresh.py +100 -0
app.py
CHANGED
|
@@ -18,22 +18,27 @@ def predict():
|
|
| 18 |
data = request.json
|
| 19 |
year = data.get("year", "2026")
|
| 20 |
|
| 21 |
-
# Construct technical context for SciBERT
|
| 22 |
-
|
|
|
|
| 23 |
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
-
# This result would then be sent to the Conductor/LangGraph for Econ processing
|
| 34 |
return jsonify({
|
| 35 |
"year": year,
|
| 36 |
-
"
|
| 37 |
"status": "SENT_TO_CONDUCTOR"
|
| 38 |
})
|
| 39 |
|
|
|
|
| 18 |
data = request.json
|
| 19 |
year = data.get("year", "2026")
|
| 20 |
|
| 21 |
+
# Construct technical context for SciBERT for different tech categories
|
| 22 |
+
categories = ["AI/ML", "Quantum", "Biotech", "Computing"]
|
| 23 |
+
tech_scores = {}
|
| 24 |
|
| 25 |
+
for category in categories:
|
| 26 |
+
# Create category-specific input text
|
| 27 |
+
input_text = f"Scientific and technological advancements in {category} emergent in the year {year}."
|
| 28 |
+
|
| 29 |
+
# Tokenization
|
| 30 |
+
inputs = tokenizer(input_text, return_tensors="pt", truncation=True, max_length=512)
|
| 31 |
+
|
| 32 |
+
# Prediction
|
| 33 |
+
with torch.no_grad():
|
| 34 |
+
outputs = model(**inputs)
|
| 35 |
+
# Get the first prediction score for this category
|
| 36 |
+
prediction = torch.softmax(outputs.logits, dim=1).tolist()[0]
|
| 37 |
+
tech_scores[category] = prediction[0]
|
| 38 |
|
|
|
|
| 39 |
return jsonify({
|
| 40 |
"year": year,
|
| 41 |
+
"tech_scores": tech_scores,
|
| 42 |
"status": "SENT_TO_CONDUCTOR"
|
| 43 |
})
|
| 44 |
|
deploy_fresh.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Fresh deployment script - creates a new HF Space from scratch
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import subprocess
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
|
| 10 |
+
def run_command(cmd):
|
| 11 |
+
"""Run a command and return the result"""
|
| 12 |
+
try:
|
| 13 |
+
result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
|
| 14 |
+
if result.returncode != 0:
|
| 15 |
+
print(f"Error: {result.stderr}")
|
| 16 |
+
return False
|
| 17 |
+
print(result.stdout)
|
| 18 |
+
return True
|
| 19 |
+
except Exception as e:
|
| 20 |
+
print(f"Exception: {e}")
|
| 21 |
+
return False
|
| 22 |
+
|
| 23 |
+
def deploy_fresh():
|
| 24 |
+
"""Deploy to a fresh HF Space"""
|
| 25 |
+
hf_token = os.getenv('HF_TOKEN')
|
| 26 |
+
if not hf_token:
|
| 27 |
+
print("HF_TOKEN not set. Please run:")
|
| 28 |
+
print("set HF_TOKEN=your_huggingface_token_here")
|
| 29 |
+
return False
|
| 30 |
+
|
| 31 |
+
print("Creating fresh HF Space deployment...")
|
| 32 |
+
|
| 33 |
+
# Create deployment script
|
| 34 |
+
deploy_script = f'''
|
| 35 |
+
from huggingface_hub import HfApi
|
| 36 |
+
|
| 37 |
+
api = HfApi(token="{hf_token}")
|
| 38 |
+
|
| 39 |
+
# Get username first
|
| 40 |
+
try:
|
| 41 |
+
user_info = api.whoami()
|
| 42 |
+
username = user_info["name"]
|
| 43 |
+
print(f"Username: {{username}}")
|
| 44 |
+
except Exception as e:
|
| 45 |
+
print(f"Could not get username: {{e}}")
|
| 46 |
+
username = "gsstec" # fallback
|
| 47 |
+
|
| 48 |
+
repo_id = f"{{username}}/tec-app"
|
| 49 |
+
print(f"Creating space: {{repo_id}}")
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
# Create the space
|
| 53 |
+
api.create_repo(
|
| 54 |
+
repo_id=repo_id,
|
| 55 |
+
repo_type="space",
|
| 56 |
+
space_sdk="docker",
|
| 57 |
+
exist_ok=True,
|
| 58 |
+
private=False
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
print("Space created successfully!")
|
| 62 |
+
print("Uploading files...")
|
| 63 |
+
|
| 64 |
+
# Upload files
|
| 65 |
+
api.upload_folder(
|
| 66 |
+
folder_path=".",
|
| 67 |
+
repo_id=repo_id,
|
| 68 |
+
repo_type="space",
|
| 69 |
+
ignore_patterns=[".git", "__pycache__", "*.pyc", "temp_deploy.py", "deploy_fresh.py", "redeploy.py"]
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
print(f"Successfully deployed to https://huggingface.co/spaces/{{repo_id}}")
|
| 73 |
+
print("Your app will be available in a few minutes!")
|
| 74 |
+
print("The Docker container will build and start automatically.")
|
| 75 |
+
|
| 76 |
+
except Exception as e:
|
| 77 |
+
print(f"Deployment failed: {{e}}")
|
| 78 |
+
import traceback
|
| 79 |
+
traceback.print_exc()
|
| 80 |
+
'''
|
| 81 |
+
|
| 82 |
+
with open('temp_deploy.py', 'w') as f:
|
| 83 |
+
f.write(deploy_script)
|
| 84 |
+
|
| 85 |
+
success = run_command("python temp_deploy.py")
|
| 86 |
+
|
| 87 |
+
# Cleanup
|
| 88 |
+
if Path('temp_deploy.py').exists():
|
| 89 |
+
Path('temp_deploy.py').unlink()
|
| 90 |
+
|
| 91 |
+
return success
|
| 92 |
+
|
| 93 |
+
if __name__ == "__main__":
|
| 94 |
+
print("Fresh HF Space Deployment")
|
| 95 |
+
print("=" * 25)
|
| 96 |
+
|
| 97 |
+
if deploy_fresh():
|
| 98 |
+
print("Deployment completed!")
|
| 99 |
+
else:
|
| 100 |
+
print("Deployment failed!")
|