Spaces:
Sleeping
Sleeping
| import requests | |
| import boto3 | |
| import json | |
| from botocore.exceptions import NoCredentialsError | |
| import os | |
| from datetime import datetime | |
| import argparse # Added import for command line arguments | |
| def generate_aibom(api_url: str, model_id: str, include_inference: bool = True, use_best_practices: bool = True, hf_token: str = None) -> dict: | |
| """Generate an AI SBOM for the specified Hugging Face model.""" | |
| headers = {"Content-Type": "application/json"} | |
| payload = { | |
| "model_id": model_id, | |
| "include_inference": include_inference, | |
| "use_best_practices": use_best_practices | |
| } | |
| if hf_token: | |
| payload["hf_token"] = hf_token | |
| response = requests.post(api_url, json=payload, headers=headers) | |
| response.raise_for_status() # Raise an error for bad status codes | |
| return response.json() | |
| def upload_to_s3(bucket_name: str, file_name: str, file_content: bytes, aws_access_key: str, aws_secret_key: str): | |
| """Upload the document to an S3 bucket.""" | |
| s3 = boto3.client( | |
| 's3', | |
| aws_access_key_id=aws_access_key, | |
| aws_secret_access_key=aws_secret_key | |
| ) | |
| try: | |
| s3.put_object(Bucket=bucket_name, Key=file_name, Body=file_content) | |
| print(f"File {file_name} uploaded successfully to bucket {bucket_name}.") | |
| except NoCredentialsError: | |
| print("Credentials not available.") | |
| if __name__ == "__main__": | |
| # Set up argument parser | |
| parser = argparse.ArgumentParser(description='Generate AI SBOM for a Hugging Face model') | |
| parser.add_argument('--model_id', type=str, required=True, | |
| help='The Hugging Face model ID (e.g., meta-llama/Llama-2-7b-chat-hf)') | |
| parser.add_argument('--include_inference', action='store_true', default=True, | |
| help='Include inference information in the AIBOM') | |
| parser.add_argument('--use_best_practices', action='store_true', default=True, | |
| help='Use best practices in the AIBOM generation') | |
| parser.add_argument('--hf_token', type=str, default=None, | |
| help='Hugging Face token for accessing private models') | |
| args = parser.parse_args() | |
| API_URL = "https://nothuman2718-aibom.hf.space/api/generate" | |
| BUCKET_NAME = "your-s3-bucket-name" | |
| AWS_ACCESS_KEY = "your-aws-access-key" | |
| AWS_SECRET_KEY = "your-aws-secret-key" | |
| # Model configuration from command line arguments | |
| MODEL_ID = args.model_id | |
| INCLUDE_INFERENCE = args.include_inference | |
| USE_BEST_PRACTICES = args.use_best_practices | |
| HF_TOKEN = args.hf_token | |
| try: | |
| print(f"Generating AIBOM for model: {MODEL_ID}...") | |
| aibom_response = generate_aibom( | |
| API_URL, | |
| MODEL_ID, | |
| include_inference=INCLUDE_INFERENCE, | |
| use_best_practices=USE_BEST_PRACTICES, | |
| hf_token=HF_TOKEN | |
| ) | |
| # Extract model ID for file naming | |
| safe_model_id = MODEL_ID.replace("/", "_") | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| file_name = f"aibom_{safe_model_id}_{timestamp}.json" | |
| # Save locally first | |
| local_file_path = f"/tmp/{file_name}" | |
| with open(local_file_path, 'w') as f: | |
| json.dump(aibom_response, f, indent=2) | |
| print(f"AIBOM saved locally to {local_file_path}") | |
| # Upload to S3 | |
| print("Uploading AIBOM to S3...") | |
| with open(local_file_path, 'rb') as f: | |
| file_content = f.read() | |
| upload_to_s3(BUCKET_NAME, file_name, file_content, AWS_ACCESS_KEY, AWS_SECRET_KEY) | |
| # Optional: Remove the local file after upload | |
| os.remove(local_file_path) | |
| print(f"Local file removed. AIBOM processing completed.") | |
| except requests.exceptions.RequestException as e: | |
| print(f"Error generating AIBOM: {e}") | |
| except Exception as e: | |
| print(f"An error occurred: {e}") |