| import marimo |
|
|
| __generated_with = "0.11.17" |
| app = marimo.App(width="medium") |
|
|
|
|
| @app.cell |
| def _(): |
| import marimo as mo |
| import os |
| return mo, os |
|
|
|
|
| @app.cell |
| def _(os): |
| |
| from typing import ( |
| Any, Dict, List, Optional, Pattern, Set, Union, Tuple |
| ) |
| from pathlib import Path |
| from urllib.request import urlopen |
| |
| from rich.console import Console |
| from rich.theme import Theme |
| from rich.text import Text |
| from rich import print |
| from tqdm import tqdm |
| from enum import Enum |
| import pandas as pd |
| import tempfile |
| import requests |
| import getpass |
| import urllib3 |
| import base64 |
| import time |
| import json |
| import uuid |
| import ssl |
| import ast |
| import re |
|
|
| pd.set_option('display.max_columns', None) |
| pd.set_option('display.max_rows', None) |
| pd.set_option('display.max_colwidth', None) |
| pd.set_option('display.width', None) |
|
|
| custom_theme = Theme({ |
| "info": "blue_violet", "warning": "yellow", "danger": "red", "python": "blue_violet", "string": "cyan", "number": "magenta", "keyword": "bright_blue", "comment": "dim blue_violet", "json":"blue_violet" |
| }) |
|
|
| |
| os.environ['TMPDIR'] = '/tmp' |
|
|
| |
| tempfile.tempdir = '/tmp' |
|
|
| |
| console = Console(width=250, color_system="auto", force_jupyter=True) |
| return ( |
| Any, |
| Console, |
| Dict, |
| Enum, |
| List, |
| Optional, |
| Path, |
| Pattern, |
| Set, |
| Text, |
| Theme, |
| Tuple, |
| Union, |
| ast, |
| base64, |
| console, |
| custom_theme, |
| getpass, |
| json, |
| pd, |
| print, |
| re, |
| requests, |
| ssl, |
| tempfile, |
| time, |
| tqdm, |
| urllib3, |
| urlopen, |
| uuid, |
| ) |
|
|
|
|
| @app.cell |
| def _(mo): |
| |
|
|
| |
| wx_platform_url = "https://api.dataplatform.cloud.ibm.com" |
| regions = { |
| "US": "https://us-south.ml.cloud.ibm.com", |
| "EU": "https://eu-de.ml.cloud.ibm.com", |
| "GB": "https://eu-gb.ml.cloud.ibm.com", |
| "JP": "https://jp-tok.ml.cloud.ibm.com", |
| "AU": "https://au-syd.ml.cloud.ibm.com", |
| "CA": "https://ca-tor.ml.cloud.ibm.com" |
| } |
|
|
| |
| client_instantiation_form = ( |
| mo.md(''' |
| ###**watsonx.ai credentials:** |
| |
| {wx_region} |
| |
| {wx_api_key} |
| |
| {space_id} |
| ''').style(max_height="300px", overflow="auto", border_color="blue") |
| .batch( |
| wx_region = mo.ui.dropdown(regions, label="Select your watsonx.ai region:", value="US", searchable=True), |
| wx_api_key = mo.ui.text(placeholder="Add your IBM Cloud api-key...", label="IBM Cloud Api-key:", kind="password"), |
| |
| space_id = mo.ui.text(placeholder="Add your watsonx.ai space_id...", label="Space_ID:", kind="text") |
| ,) |
| .form(show_clear_button=True, bordered=False) |
| ) |
|
|
|
|
| client_instantiation_form |
| return client_instantiation_form, regions, wx_platform_url |
|
|
|
|
| @app.cell |
| def _(client_instantiation_form, mo): |
| from ibm_watsonx_ai import APIClient, Credentials |
|
|
| def setup_task_credentials(deployment_client): |
| |
| existing_credentials = deployment_client.task_credentials.get_details() |
| |
| |
| if "resources" in existing_credentials and existing_credentials["resources"]: |
| for cred in existing_credentials["resources"]: |
| cred_id = deployment_client.task_credentials.get_id(cred) |
| deployment_client.task_credentials.delete(cred_id) |
| |
| |
| return deployment_client.task_credentials.store() |
|
|
| if client_instantiation_form.value: |
| |
| wx_credentials = Credentials( |
| url=client_instantiation_form.value["wx_region"], |
| api_key=client_instantiation_form.value["wx_api_key"] |
| ) |
|
|
| |
| deployment_client = APIClient(credentials=wx_credentials, space_id=client_instantiation_form.value["space_id"]) |
|
|
| task_credentials_details = setup_task_credentials(deployment_client) |
| else: |
| |
| deployment_client = None |
| task_credentials_details = None |
|
|
| template_variant = mo.ui.dropdown(["Base","Stream Files to IBM COS [Example]"], label="Code Template:", value="Base") |
|
|
| if deployment_client is not None: |
| client_callout_kind = "success" |
| else: |
| client_callout_kind = "neutral" |
|
|
| client_callout = mo.callout(template_variant, kind=client_callout_kind) |
|
|
| client_callout |
| return ( |
| APIClient, |
| Credentials, |
| client_callout, |
| client_callout_kind, |
| deployment_client, |
| |
| task_credentials_details, |
| template_variant, |
| wx_credentials, |
| ) |
|
|
|
|
| @app.cell |
| def _(mo, template_variant): |
| |
| if template_variant.value == "Stream Files to IBM COS [Example]": |
| with open("stream_files_to_cos.py", "r") as file: |
| template = file.read() |
| else: |
| template = '''def your_function_name(): |
| |
| import subprocess |
| subprocess.check_output('pip install gensim', shell=True) |
| import gensim |
| |
| def score(input_data): |
| message_from_input_payload = payload.get("input_data")[0].get("values")[0][0] |
| response_message = "Received message - {0}".format(message_from_input_payload) |
| |
| # Score using the pre-defined model |
| score_response = { |
| 'predictions': [{'fields': ['Response_message_field', 'installed_lib_version'], |
| 'values': [[response_message, gensim.__version__]] |
| }] |
| } |
| return score_response |
| |
| return score |
| |
| score = your_function_name() |
| ''' |
|
|
| function_editor = ( |
| mo.md(''' |
| #### **Create your function by editing the template:** |
| |
| {editor} |
| |
| ''') |
| .batch( |
| editor = mo.ui.code_editor(value=template, language="python", min_height=50) |
| ) |
| .form(show_clear_button=True, bordered=False) |
| ) |
|
|
| function_editor |
| return file, function_editor, template |
|
|
|
|
| @app.cell |
| def _(function_editor, mo, os): |
| if function_editor.value: |
| |
| code = function_editor.value['editor'] |
| |
| namespace = {} |
| |
| exec(code, namespace) |
|
|
| |
| function_name = None |
| for name, obj in namespace.items(): |
| if callable(obj) and name != "__builtins__": |
| function_name = name |
| break |
|
|
| if function_name: |
| |
| deployable_function = namespace[function_name] |
| |
| mo.md(f"Created deployable function from '{function_name}'") |
| |
| save_dir = "/tmp/notebook_functions" |
| os.makedirs(save_dir, exist_ok=True) |
| |
| file_path = os.path.join(save_dir, f"{function_name}.py") |
| with open(file_path, "w") as f: |
| f.write(code) |
| else: |
| mo.md("No function found in the editor code") |
| return ( |
| code, |
| deployable_function, |
| f, |
| file_path, |
| function_name, |
| name, |
| namespace, |
| obj, |
| save_dir, |
| ) |
|
|
|
|
| @app.cell |
| def _(deployment_client, mo, pd): |
| if deployment_client: |
| supported_specs = deployment_client.software_specifications.list()[ |
| deployment_client.software_specifications.list()['STATE'] == 'supported' |
| ] |
|
|
| |
| supported_specs = supported_specs.reset_index(drop=True) |
|
|
| |
| framework_mapping = { |
| "tensorflow_rt24.1-py3.11": "TensorFlow", |
| "pytorch-onnx_rt24.1-py3.11": "PyTorch", |
| "onnxruntime_opset_19": "ONNX or ONNXRuntime", |
| "runtime-24.1-py3.11": "AI Services/Python Functions/Python Scripts", |
| "autoai-ts_rt24.1-py3.11": "AutoAI", |
| "autoai-kb_rt24.1-py3.11": "AutoAI", |
| "runtime-24.1-py3.11-cuda": "CUDA-enabled (GPU) Python Runtime", |
| "runtime-24.1-r4.3": "R Runtime 4.3", |
| "spark-mllib_3.4": "Apache Spark 3.4", |
| "autoai-rag_rt24.1-py3.11": "AutoAI RAG" |
| } |
|
|
| |
| preferred_order = [ |
| "runtime-24.1-py3.11", |
| "runtime-24.1-py3.11-cuda", |
| "runtime-24.1-r4.3", |
| "ai-service-v5-software-specification", |
| "autoai-rag_rt24.1-py3.11", |
| "autoai-ts_rt24.1-py3.11", |
| "autoai-kb_rt24.1-py3.11", |
| "tensorflow_rt24.1-py3.11", |
| "pytorch-onnx_rt24.1-py3.11", |
| "onnxruntime_opset_19", |
| "spark-mllib_3.4", |
| ] |
|
|
| |
| supported_specs['SORT_ORDER'] = supported_specs['NAME'].apply( |
| lambda x: preferred_order.index(x) if x in preferred_order else len(preferred_order) |
| ) |
|
|
| |
| supported_specs = supported_specs.sort_values('SORT_ORDER').reset_index(drop=True) |
|
|
| |
| supported_specs = supported_specs.drop(columns=['SORT_ORDER']) |
|
|
| |
| if 'REPLACEMENT' in supported_specs.columns: |
| supported_specs = supported_specs.drop(columns=['REPLACEMENT']) |
|
|
| |
| supported_specs['NOTES'] = supported_specs['NAME'].map(framework_mapping).fillna("Other") |
|
|
| |
| selection_table = mo.ui.table( |
| supported_specs, |
| selection="single", |
| label="#### **Select a supported software_spec runtime for your function asset** (For Python Functions select - *'runtime-24.1-py3.11'* ):", |
| initial_selection=[0], |
| page_size=6 |
| ) |
| else: |
| sel_df = pd.DataFrame( |
| data=[["ID", "Activate deployment_client."]], |
| columns=["ID", "VALUE"] |
| ) |
|
|
| selection_table = mo.ui.table( |
| sel_df, |
| selection="single", |
| label="You haven't activated the Deployment_Client", |
| initial_selection=[0] |
| ) |
|
|
| |
| mo.md(f"""--- |
| <br> |
| <br> |
| {selection_table} |
| <br> |
| <br> |
| --- |
| <br> |
| <br> |
| """) |
| return ( |
| framework_mapping, |
| preferred_order, |
| sel_df, |
| selection_table, |
| supported_specs, |
| ) |
|
|
|
|
| @app.cell |
| def _(mo): |
| input_schema_checkbox = mo.ui.checkbox(label="Add input schema (optional)") |
| output_schema_checkbox = mo.ui.checkbox(label="Add output schema (optional)") |
| sample_input_checkbox = mo.ui.checkbox(label="Add sample input example (optional)") |
| return input_schema_checkbox, output_schema_checkbox, sample_input_checkbox |
|
|
|
|
| @app.cell |
| def _( |
| input_schema_checkbox, |
| mo, |
| output_schema_checkbox, |
| sample_input_checkbox, |
| selection_table, |
| template_variant, |
| ): |
| if selection_table.value['ID'].iloc[0]: |
| |
| if template_variant.value == "Stream Files to IBM COS [Example]": |
| fnc_nm = "stream_file_to_cos" |
| else: |
| fnc_nm = "your_function_name" |
|
|
| uploaded_function_name = mo.ui.text(placeholder="<Must be the same as the name in editor>", label="Function Name:", kind="text", value=f"{fnc_nm}", full_width=False) |
| tags_editor = mo.ui.array( |
| [mo.ui.text(placeholder="Metadata Tags..."), mo.ui.text(), mo.ui.text()], |
| label="Optional Metadata Tags" |
| ) |
| software_spec = selection_table.value['ID'].iloc[0] |
|
|
| description_input = mo.ui.text_area( |
| placeholder="Write a description for your function...)", |
| label="Description", |
| max_length=256, |
| rows=5, |
| full_width=True |
| ) |
|
|
|
|
| func_metadata=mo.hstack([ |
| description_input, |
| mo.hstack([ |
| uploaded_function_name, |
| tags_editor, |
| ], justify="start", gap=1, align="start", wrap=True) |
| ], |
| widths=[0.6,0.4], |
| gap=2.75 |
| ) |
|
|
| schema_metadata=mo.hstack([ |
| input_schema_checkbox, |
| output_schema_checkbox, |
| sample_input_checkbox |
| ], |
| justify="center", gap=1, align="center", wrap=True |
| ) |
|
|
| |
| mo.vstack([ |
| func_metadata, |
| mo.md("**Make sure to click the checkboxes before filling in descriptions and tags or they will reset.**"), |
| schema_metadata |
| ], |
| align="center", |
| gap=2 |
| ) |
| return ( |
| description_input, |
| fnc_nm, |
| func_metadata, |
| schema_metadata, |
| software_spec, |
| tags_editor, |
| uploaded_function_name, |
| ) |
|
|
|
|
| @app.cell |
| def _(json, mo): |
| if template_variant.value == "Stream Files to IBM COS [Example]": |
| from cos_stream_schema_examples import input_schema, output_schema, sample_input |
| else: |
| input_schema = [ |
| { |
| 'id': '1', |
| 'type': 'struct', |
| 'fields': [ |
| { |
| 'name': '<variable name 1>', |
| 'type': 'string', |
| 'nullable': False, |
| 'metadata': {} |
| }, |
| { |
| 'name': '<variable name 2>', |
| 'type': 'string', |
| 'nullable': False, |
| 'metadata': {} |
| } |
| ] |
| } |
| ] |
|
|
| output_schema = [ |
| { |
| 'id': '1', |
| 'type': 'struct', |
| 'fields': [ |
| { |
| 'name': '<output return name>', |
| 'type': 'string', |
| 'nullable': False, |
| 'metadata': {} |
| } |
| ] |
| } |
| ] |
|
|
| sample_input = { |
| 'input_data': [ |
| { |
| 'fields': ['<variable name 1>', '<variable name 2>'], |
| 'values': [ |
| ['<sample input value for variable 1>', '<sample input value for variable 2>'] |
| ] |
| } |
| ] |
| } |
|
|
|
|
| input_schema_editor = mo.ui.code_editor(value=json.dumps(input_schema, indent=4), language="python", min_height=25) |
| output_schema_editor = mo.ui.code_editor(value=json.dumps(output_schema, indent=4), language="python", min_height=25) |
| sample_input_editor = mo.ui.code_editor(value=json.dumps(sample_input, indent=4), language="python", min_height=25) |
|
|
| schema_editors = mo.accordion( |
| { |
| """**Input Schema Metadata Editor**""": input_schema_editor, |
| """**Output Schema Metadata Editor**""": output_schema_editor, |
| """**Sample Input Metadata Editor**""": sample_input_editor |
| }, multiple=True |
| ) |
|
|
| schema_editors |
| return ( |
| input_schema, |
| input_schema_editor, |
| output_schema, |
| output_schema_editor, |
| sample_input, |
| sample_input_editor, |
| schema_editors, |
| ) |
|
|
|
|
| @app.cell |
| def _( |
| ast, |
| deployment_client, |
| description_input, |
| function_editor, |
| input_schema_checkbox, |
| input_schema_editor, |
| json, |
| mo, |
| os, |
| output_schema_checkbox, |
| output_schema_editor, |
| sample_input_checkbox, |
| sample_input_editor, |
| selection_table, |
| software_spec, |
| tags_editor, |
| uploaded_function_name, |
| ): |
| get_upload_status, set_upload_status = mo.state("No uploads yet") |
|
|
| function_meta = {} |
|
|
| if selection_table.value['ID'].iloc[0] and deployment_client is not None: |
| |
| function_meta = { |
| deployment_client.repository.FunctionMetaNames.NAME: f"{uploaded_function_name.value}" or "your_function_name", |
| deployment_client.repository.FunctionMetaNames.SOFTWARE_SPEC_ID: software_spec or "45f12dfe-aa78-5b8d-9f38-0ee223c47309" |
| } |
|
|
| |
| if tags_editor.value: |
| |
| filtered_tags = [tag for tag in tags_editor.value if tag and tag.strip()] |
| if filtered_tags: |
| function_meta[deployment_client.repository.FunctionMetaNames.TAGS] = filtered_tags |
|
|
|
|
| if description_input.value: |
| function_meta[deployment_client.repository.FunctionMetaNames.DESCRIPTION] = description_input.value |
| |
| |
| if input_schema_checkbox.value: |
| try: |
| function_meta[deployment_client.repository.FunctionMetaNames.INPUT_DATA_SCHEMAS] = json.loads(input_schema_editor.value) |
| except json.JSONDecodeError: |
| |
| function_meta[deployment_client.repository.FunctionMetaNames.INPUT_DATA_SCHEMAS] = ast.literal_eval(input_schema_editor.value) |
|
|
| |
| if output_schema_checkbox.value: |
| try: |
| function_meta[deployment_client.repository.FunctionMetaNames.OUTPUT_DATA_SCHEMAS] = json.loads(output_schema_editor.value) |
| except json.JSONDecodeError: |
| |
| function_meta[deployment_client.repository.FunctionMetaNames.OUTPUT_DATA_SCHEMAS] = ast.literal_eval(output_schema_editor.value) |
|
|
| |
| if sample_input_checkbox.value: |
| try: |
| function_meta[deployment_client.repository.FunctionMetaNames.SAMPLE_SCORING_INPUT] = json.loads(sample_input_editor.value) |
| except json.JSONDecodeError: |
| |
| function_meta[deployment_client.repository.FunctionMetaNames.SAMPLE_SCORING_INPUT] = ast.literal_eval(sample_input_editor.value) |
|
|
| def upload_function(function_meta, use_function_object=True): |
| """ |
| Uploads a Python function to watsonx.ai as a deployable asset. |
| Parameters: |
| function_meta (dict): Metadata for the function |
| use_function_object (bool): Whether to use function object (True) or file path (False) |
| Returns: |
| dict: Details of the uploaded function |
| """ |
| |
| original_dir = os.getcwd() |
|
|
| try: |
| |
| code_to_deploy = function_editor.value['editor'] |
| |
| func_name = uploaded_function_name.value or "your_function_name" |
| |
| function_meta[deployment_client.repository.FunctionMetaNames.NAME] = func_name |
| |
| save_dir = "/tmp/notebook_functions" |
| os.makedirs(save_dir, exist_ok=True) |
| file_path = f"{save_dir}/{func_name}.py" |
| with open(file_path, "w", encoding="utf-8") as f: |
| f.write(code_to_deploy) |
|
|
| if use_function_object: |
| |
| import sys |
| import importlib.util |
| |
| sys.path.append(save_dir) |
| |
| spec = importlib.util.spec_from_file_location(func_name, file_path) |
| module = importlib.util.module_from_spec(spec) |
| spec.loader.exec_module(module) |
| |
| function_object = getattr(module, func_name) |
|
|
| |
| os.chdir('/tmp') |
|
|
| |
| mo.md(f"Uploading function object: {func_name}") |
| func_details = deployment_client.repository.store_function(function_object, function_meta) |
| else: |
| |
| os.chdir('/tmp') |
|
|
| |
| mo.md(f"Uploading function from file: {file_path}") |
| func_details = deployment_client.repository.store_function(file_path, function_meta) |
|
|
| set_upload_status(f"Latest Upload - id - {func_details['metadata']['id']}") |
| return func_details |
| except Exception as e: |
| set_upload_status(f"Error uploading function: {str(e)}") |
| mo.md(f"Detailed error: {str(e)}") |
| raise |
| finally: |
| |
| os.chdir(original_dir) |
|
|
| upload_status = mo.state("No uploads yet") |
|
|
| upload_button = mo.ui.button( |
| label="Upload Function", |
| on_click=lambda _: upload_function(function_meta, use_function_object=True), |
| kind="success", |
| tooltip="Click to upload function to watsonx.ai" |
| ) |
|
|
| function_meta |
| return ( |
| filtered_tags, |
| function_meta, |
| get_upload_status, |
| set_upload_status, |
| upload_button, |
| upload_function, |
| upload_status, |
| ) |
|
|
|
|
| @app.cell |
| def _(get_upload_status, mo, upload_button): |
| |
| if upload_button.value: |
| try: |
| upload_result = upload_button.value |
| artifact_id = upload_result['metadata']['id'] |
| except Exception as e: |
| mo.md(f"Error: {str(e)}") |
|
|
| upload_func = mo.vstack([ |
| upload_button, |
| mo.md(f"**Status:** {get_upload_status()}") |
| ], justify="space-around", align="center") |
| return artifact_id, upload_func, upload_result |
|
|
|
|
| @app.cell |
| def _(deployment_client, mo, pd, upload_button, upload_func, uuid): |
| def reorder_hardware_specifications(df): |
| """ |
| Reorders a hardware specifications dataframe by type and size of environment |
| without hardcoding specific hardware types. |
| |
| Parameters: |
| df (pandas.DataFrame): The hardware specifications dataframe to reorder |
| |
| Returns: |
| pandas.DataFrame: Reordered dataframe with reset index |
| """ |
| |
| result_df = df.copy() |
|
|
| |
| def get_sort_key(name): |
| |
| custom_order = [ |
| "XXS", "XS", "S", "M", "L", "XL", |
| "XS-Spark", "S-Spark", "M-Spark", "L-Spark", "XL-Spark", |
| "K80", "K80x2", "K80x4", |
| "V100", "V100x2", |
| "WXaaS-XS", "WXaaS-S", "WXaaS-M", "WXaaS-L", "WXaaS-XL", |
| "Default Spark", "Notebook Default Spark", "ML" |
| ] |
|
|
| |
| if name in custom_order: |
| return (0, custom_order.index(name)) |
|
|
| |
| return (1, name) |
|
|
| |
| result_df['sort_key'] = result_df['NAME'].apply(get_sort_key) |
|
|
| |
| result_df = result_df.sort_values('sort_key').drop('sort_key', axis=1) |
|
|
| |
| result_df = result_df.reset_index(drop=True) |
|
|
| return result_df |
|
|
| if deployment_client and upload_button.value: |
|
|
| hardware_specs = deployment_client.hardware_specifications.list() |
| hardware_specs_df = reorder_hardware_specifications(hardware_specs) |
|
|
| |
| hw_selection_table = mo.ui.table( |
| hardware_specs_df, |
| selection="single", |
| label="#### **Select a supported hardware_specification for your deployment** *(Default: 'XS' - 1vCPU_4GB Ram)*", |
| initial_selection=[1], |
| page_size=6, |
| wrapped_columns=['DESCRIPTION'] |
| ) |
|
|
| deployment_type = mo.ui.radio( |
| options={"Function":"Online (Function Endpoint)","Runnable Job":"Batch (Runnable Jobs)"}, value="Function", label="Select the Type of Deployment:", inline=True |
| ) |
| uuid_suffix = str(uuid.uuid4())[:4] |
|
|
| deployment_name = mo.ui.text(value=f"deployed_func_{uuid_suffix}", label="Deployment Name:", placeholder="<Must be completely unique>") |
| else: |
| hw_df = pd.DataFrame( |
| data=[["ID", "Activate deployment_client."]], |
| columns=["ID", "VALUE"] |
| ) |
|
|
| hw_selection_table = mo.ui.table( |
| hw_df, |
| selection="single", |
| label="You haven't activated the Deployment_Client", |
| initial_selection=[0] |
| ) |
|
|
|
|
| mo.md(f""" |
| <br> |
| <br> |
| {upload_func} |
| <br> |
| <br> |
| --- |
| {hw_selection_table} |
| <br> |
| <br> |
| |
| |
| """) |
| return ( |
| deployment_name, |
| deployment_type, |
| hardware_specs, |
| hardware_specs_df, |
| hw_df, |
| hw_selection_table, |
| reorder_hardware_specifications, |
| uuid_suffix, |
| ) |
|
|
|
|
| @app.cell |
| def _( |
| artifact_id, |
| deployment_client, |
| deployment_details, |
| deployment_name, |
| deployment_type, |
| hw_selection_table, |
| mo, |
| print, |
| upload_button, |
| ): |
| def deploy_function(artifact_id, deployment_type): |
| """ |
| Deploys a function asset to watsonx.ai. |
| |
| Parameters: |
| artifact_id (str): ID of the function artifact to deploy |
| deployment_type (object): Type of deployment (online or batch) |
| |
| Returns: |
| dict: Details of the deployed function |
| """ |
| if not artifact_id: |
| print("Error: No artifact ID provided. Please upload a function first.") |
| return None |
|
|
| if deployment_type.value == "Online (Function Endpoint)": |
| deployment_props = { |
| deployment_client.deployments.ConfigurationMetaNames.NAME: deployment_name.value, |
| deployment_client.deployments.ConfigurationMetaNames.ONLINE: {}, |
| deployment_client.deployments.ConfigurationMetaNames.HARDWARE_SPEC: {"id": selected_hw_config}, |
| deployment_client.deployments.ConfigurationMetaNames.SERVING_NAME: deployment_name.value, |
| } |
| else: |
| deployment_props = { |
| deployment_client.deployments.ConfigurationMetaNames.NAME: deployment_name.value, |
| deployment_client.deployments.ConfigurationMetaNames.BATCH: {}, |
| deployment_client.deployments.ConfigurationMetaNames.HARDWARE_SPEC: {"id": selected_hw_config}, |
| |
| } |
|
|
| try: |
| print(deployment_props) |
| |
| asset_details = deployment_client.repository.get_details(artifact_id) |
| print(f"Asset found: {asset_details['metadata']['name']} with ID: {asset_details['metadata']['id']}") |
|
|
| |
| deployed_function = deployment_client.deployments.create(artifact_id, deployment_props) |
| print(f"Creating deployment from Asset: {artifact_id} with deployment properties {str(deployment_props)}") |
| return deployed_function |
| except Exception as e: |
| print(f"Deployment error: {str(e)}") |
| return None |
|
|
| def get_deployment_id(deployed_function): |
| deployment_id = deployment_client.deployments.get_uid(deployment_details) |
| return deployment_id |
|
|
| def get_deployment_info(deployment_id): |
| deployment_info = deployment_client.deployments.get_details(deployment_id) |
| return deployment_info |
|
|
| deployment_status = mo.state("No deployments yet") |
|
|
| if hw_selection_table.value['ID'].iloc[0]: |
| selected_hw_config = hw_selection_table.value['ID'].iloc[0] |
|
|
| deploy_button = mo.ui.button( |
| label="Deploy Function", |
| on_click=lambda _: deploy_function(artifact_id, deployment_type), |
| kind="success", |
| tooltip="Click to deploy function to watsonx.ai" |
| ) |
|
|
| if deployment_client and upload_button.value: |
| deployment_definition = mo.hstack([ |
| deployment_type, |
| deployment_name |
| ], justify="space-around") |
| else: |
| deployment_definition = mo.hstack([ |
| "No Deployment Type Selected", |
| "No Deployment Name Provided" |
| ], justify="space-around") |
|
|
| |
| return ( |
| deploy_button, |
| deploy_function, |
| deployment_definition, |
| deployment_status, |
| get_deployment_id, |
| get_deployment_info, |
| selected_hw_config, |
| ) |
|
|
|
|
| @app.cell |
| def _(deploy_button, deployment_definition, mo): |
| _ = deployment_definition |
|
|
| deploy_fnc = mo.vstack([ |
| deploy_button, |
| deploy_button.value |
| ], justify="space-around", align="center") |
|
|
| mo.md(f""" |
| {deployment_definition} |
| <br> |
| <br> |
| {deploy_fnc} |
| |
| --- |
| """) |
| return (deploy_fnc,) |
|
|
|
|
| @app.cell(hide_code=True) |
| def _(deployment_client, mo): |
| |
|
|
| def get_deployment_list(): |
| deployment_df = deployment_client.deployments.list() |
| return deployment_df |
|
|
| def get_deployment_ids(df): |
| dep_list = df['ID'].tolist() |
| return dep_list |
|
|
| def get_data_assets_list(): |
| data_assets_df = deployment_client.data_assets.list() |
| return data_assets_df |
|
|
| def get_data_asset_ids(df): |
| data_asset_list = df['ASSET_ID'].tolist() |
| return data_asset_list |
|
|
| |
| def get_repository_list(): |
| repository_df = deployment_client.repository.list() |
| return repository_df |
|
|
| def get_repository_ids(df): |
| repository_list = df['ID'].tolist() |
| return repository_list |
|
|
| def delete_with_progress(ids_list, delete_function, item_type="items"): |
| """ |
| Generic wrapper that adds a progress bar to any deletion function |
| |
| Parameters: |
| ids_list: List of IDs to delete |
| delete_function: Function that deletes a single ID |
| item_type: String describing what's being deleted (for display) |
| """ |
| with mo.status.progress_bar( |
| total=len(ids_list) or 1, |
| title=f"Purging {item_type}", |
| subtitle=f"Deleting {item_type}...", |
| completion_title="Purge Complete", |
| completion_subtitle=f"Successfully deleted {len(ids_list)} {item_type}" |
| ) as progress: |
| for item_id in ids_list: |
| delete_function(item_id) |
| progress.update(increment=1) |
| return f"Deleted {len(ids_list)} {item_type} successfully" |
|
|
| |
| def delete_deployments(deployment_ids): |
| return delete_with_progress( |
| deployment_ids, |
| lambda id: deployment_client.deployments.delete(id), |
| "deployments" |
| ) |
|
|
| def delete_data_assets(data_asset_ids): |
| return delete_with_progress( |
| data_asset_ids, |
| lambda id: deployment_client.data_assets.delete(id), |
| "data assets" |
| ) |
|
|
| def delete_repository_items(repository_ids): |
| return delete_with_progress( |
| repository_ids, |
| lambda id: deployment_client.repository.delete(id), |
| "repository items" |
| ) |
| return ( |
| delete_data_assets, |
| delete_deployments, |
| delete_repository_items, |
| delete_with_progress, |
| get_data_asset_ids, |
| get_data_assets_list, |
| get_deployment_ids, |
| get_deployment_list, |
| get_repository_ids, |
| get_repository_list, |
| ) |
|
|
|
|
| @app.cell |
| def _(get_deployment_id_list, get_deployments_button, mo, purge_deployments): |
| deployments_purge_stack = mo.hstack([get_deployments_button, get_deployment_id_list, purge_deployments]) |
| deployments_purge_stack_results = mo.vstack([get_deployments_button.value, get_deployment_id_list.value, purge_deployments.value]) |
|
|
| deployments_purge_tab = mo.vstack([deployments_purge_stack, deployments_purge_stack_results]) |
| return ( |
| deployments_purge_stack, |
| deployments_purge_stack_results, |
| deployments_purge_tab, |
| ) |
|
|
|
|
| @app.cell |
| def _(get_repository_button, get_repository_id_list, mo, purge_repository): |
| repository_purge_stack = mo.hstack([get_repository_button, get_repository_id_list, purge_repository]) |
|
|
| repository_purge_stack_results = mo.vstack([get_repository_button.value, get_repository_id_list.value, purge_repository.value]) |
|
|
| repository_purge_tab = mo.vstack([repository_purge_stack, repository_purge_stack_results]) |
| return ( |
| repository_purge_stack, |
| repository_purge_stack_results, |
| repository_purge_tab, |
| ) |
|
|
|
|
| @app.cell |
| def _(get_data_asset_id_list, get_data_assets_button, mo, purge_data_assets): |
| data_assets_purge_stack = mo.hstack([get_data_assets_button, get_data_asset_id_list, purge_data_assets]) |
| data_assets_purge_stack_results = mo.vstack([get_data_assets_button.value, get_data_asset_id_list.value, purge_data_assets.value]) |
|
|
| data_assets_purge_tab = mo.vstack([data_assets_purge_stack, data_assets_purge_stack_results]) |
| return ( |
| data_assets_purge_stack, |
| data_assets_purge_stack_results, |
| data_assets_purge_tab, |
| ) |
|
|
|
|
| @app.cell |
| def _(data_assets_purge_tab, deployments_purge_tab, mo, repository_purge_tab): |
| purge_tabs = mo.ui.tabs( |
| {"Purge Deployments": deployments_purge_tab, "Purge Repository Assets": repository_purge_tab,"Purge Data Assets": data_assets_purge_tab }, lazy=False |
| ) |
|
|
| asset_purge = mo.accordion( |
| { |
| """<br> |
| #### **Supporting Cleanup Functionality, lists of different assets and purge them if needed** *(purges all detected)* |
| <br>""": purge_tabs, |
| } |
| ) |
|
|
| asset_purge |
| return asset_purge, purge_tabs |
|
|
|
|
| @app.cell(hide_code=True) |
| def _( |
| delete_data_assets, |
| delete_deployments, |
| delete_repository_items, |
| get_data_asset_ids, |
| get_data_assets_list, |
| get_deployment_ids, |
| get_deployment_list, |
| get_repository_ids, |
| get_repository_list, |
| mo, |
| ): |
| |
| get_data_assets_button = mo.ui.button( |
| label="Get Data Assets Dataframe", |
| on_click=lambda _: get_data_assets_list(), |
| kind="neutral", |
| ) |
|
|
| get_data_asset_id_list = mo.ui.button( |
| label="Turn Dataframe into List of IDs", |
| on_click=lambda _: get_data_asset_ids(get_data_assets_button.value), |
| kind="neutral", |
| ) |
|
|
| purge_data_assets = mo.ui.button( |
| label="Purge Data Assets", |
| on_click=lambda _: delete_data_assets(get_data_asset_id_list.value), |
| kind="danger", |
| ) |
|
|
| |
| get_deployments_button = mo.ui.button( |
| label="Get Deployments Dataframe", |
| on_click=lambda _: get_deployment_list(), |
| kind="neutral", |
| ) |
|
|
| get_deployment_id_list = mo.ui.button( |
| label="Turn Dataframe into List of IDs", |
| on_click=lambda _: get_deployment_ids(get_deployments_button.value), |
| kind="neutral", |
| ) |
|
|
| purge_deployments = mo.ui.button( |
| label="Purge Deployments", |
| on_click=lambda _: delete_deployments(get_deployment_id_list.value), |
| kind="danger", |
| ) |
|
|
| |
| get_repository_button = mo.ui.button( |
| label="Get Repository Dataframe", |
| on_click=lambda _: get_repository_list(), |
| kind="neutral", |
| ) |
|
|
| get_repository_id_list = mo.ui.button( |
| label="Turn Dataframe into List of IDs", |
| on_click=lambda _: get_repository_ids(get_repository_button.value), |
| kind="neutral", |
| ) |
|
|
| purge_repository = mo.ui.button( |
| label="Purge Repository Items", |
| on_click=lambda _: delete_repository_items(get_repository_id_list.value), |
| kind="danger", |
| ) |
| return ( |
| get_data_asset_id_list, |
| get_data_assets_button, |
| get_deployment_id_list, |
| get_deployments_button, |
| get_repository_button, |
| get_repository_id_list, |
| purge_data_assets, |
| purge_deployments, |
| purge_repository, |
| ) |
|
|
|
|
| if __name__ == "__main__": |
| app.run() |
|
|