File size: 2,658 Bytes
c2feb3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# SPDX-FileCopyrightText: 2025 UL Research Institutes
# SPDX-License-Identifier: Apache-2.0

import sys
import time
from pathlib import Path

import click

from dyff.client import Client
from dyff.schema.platform import *
from dyff.schema.requests import *

from app.api.models import PredictionResponse

# ----------------------------------------------------------------------------

WORKDIR = Path(__file__).resolve().parent


@click.command()
@click.option(
    "--account",
    type=str,
    required=True,
    help="Your account ID",
)
@click.option(
    "--name",
    type=str,
    required=True,
    help="The name of your detector model. For display and querying purposes only.",
)
@click.option(
    "--image",
    type=str,
    required=True,
    help="The Docker image to upload. Must exist in your local Docker deamon.",
)
@click.option(
    "--endpoint",
    type=str,
    default="predict",
    help="The endpoint to call on your model to make a prediction.",
)
def main(account: str, name: str, image: str, endpoint: str) -> None:
    dyffapi = Client()

    # You can set these to a known ID to skip that step
    artifact_id = None
    service_id = None

    # Upload the image
    if artifact_id is None:
        # Create an Artifact resource
        artifact = dyffapi.artifacts.create(ArtifactCreateRequest(account=account))
        click.echo(f"artifact_id = \"{artifact.id}\"")
        time.sleep(5)
        # Push the image from the local Docker daemon
        dyffapi.artifacts.push(artifact, source=f"docker-daemon:{image}")
        time.sleep(5)
        # Indicate that we're done pushing
        dyffapi.artifacts.finalize(artifact.id)
    else:
        artifact = dyffapi.artifacts.get(artifact_id)
        assert artifact is not None

    # Create a runnable InferenceService
    if service_id is None:
        # Don't change this
        service_request = InferenceServiceCreateRequest(
            account=account,
            name=name,
            model=None,
            runner=InferenceServiceRunner(
                kind=InferenceServiceRunnerKind.CONTAINER,
                imageRef=EntityIdentifier.of(artifact),
                resources=ModelResources(),
            ),
            interface=InferenceInterface(
                endpoint=endpoint,
                outputSchema=DataSchema.make_output_schema(PredictionResponse),
            ),
        )
        service = dyffapi.inferenceservices.create(service_request)
        click.echo(f"service_id = \"{service.id}\"")
    else:
        service = dyffapi.inferenceservices.get(service_id)
        assert service is not None


if __name__ == "__main__":
    main()