File size: 7,151 Bytes
476455e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
#     http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# language governing permissions and limitations under the License.
"""Data wrangler helpers for data ingestion."""
from __future__ import absolute_import

from typing import Dict
from uuid import uuid4
from sagemaker.dataset_definition.inputs import (
    RedshiftDatasetDefinition,
    AthenaDatasetDefinition,
)


def generate_data_ingestion_flow_from_s3_input(
    input_name: str,
    s3_uri: str,
    s3_content_type: str = "csv",
    s3_has_header: bool = False,
    operator_version: str = "0.1",
    schema: Dict = None,
):
    """Generate the data ingestion only flow from s3 input

    Args:
        input_name (str): the name of the input to flow source node
        s3_uri (str): uri for the s3 input to flow source node
        s3_content_type (str): s3 input content type
        s3_has_header (bool): flag indicating the input has header or not
        operator_version: (str): the version of the operator
        schema: (typing.Dict): the schema for the data to be ingested
    Returns:
        dict (typing.Dict): A flow only conduct data ingestion with 1-1 mapping
        output_name (str): The output name used to configure
        `sagemaker.processing.FeatureStoreOutput`
    """
    source_node = {
        "node_id": str(uuid4()),
        "type": "SOURCE",
        "inputs": [],
        "outputs": [{"name": "default"}],
        "operator": f"sagemaker.s3_source_{operator_version}",
        "parameters": {
            "dataset_definition": {
                "datasetSourceType": "S3",
                "name": input_name,
                "s3ExecutionContext": {
                    "s3Uri": s3_uri,
                    "s3ContentType": s3_content_type,
                    "s3HasHeader": s3_has_header,
                },
            }
        },
    }

    output_node = _get_output_node(source_node["node_id"], operator_version, schema)

    flow = {
        "metadata": {"version": 1, "disable_limits": False},
        "nodes": [source_node, output_node],
    }

    return flow, f'{output_node["node_id"]}.default'


def generate_data_ingestion_flow_from_athena_dataset_definition(
    input_name: str,
    athena_dataset_definition: AthenaDatasetDefinition,
    operator_version: str = "0.1",
    schema: Dict = None,
):
    """Generate the data ingestion only flow from athena input

    Args:
        input_name (str): the name of the input to flow source node
        athena_dataset_definition (AthenaDatasetDefinition): athena input to flow source node
        operator_version: (str): the version of the operator
        schema: (typing.Dict): the schema for the data to be ingested
    Returns:
        dict (typing.Dict): A flow only conduct data ingestion with 1-1 mapping
        output_name (str): The output name used to configure
        `sagemaker.processing.FeatureStoreOutput`
    """
    source_node = {
        "node_id": str(uuid4()),
        "type": "SOURCE",
        "inputs": [],
        "outputs": [{"name": "default"}],
        "operator": f"sagemaker.athena_source_{operator_version}",
        "parameters": {
            "dataset_definition": {
                "datasetSourceType": "Athena",
                "name": input_name,
                "catalogName": athena_dataset_definition.catalog,
                "databaseName": athena_dataset_definition.database,
                "queryString": athena_dataset_definition.query_string,
                "s3OutputLocation": athena_dataset_definition.output_s3_uri,
                "outputFormat": athena_dataset_definition.output_format,
            }
        },
    }

    output_node = _get_output_node(source_node["node_id"], operator_version, schema)

    flow = {
        "metadata": {"version": 1, "disable_limits": False},
        "nodes": [source_node, output_node],
    }

    return flow, f'{output_node["node_id"]}.default'


def generate_data_ingestion_flow_from_redshift_dataset_definition(
    input_name: str,
    redshift_dataset_definition: RedshiftDatasetDefinition,
    operator_version: str = "0.1",
    schema: Dict = None,
):
    """Generate the data ingestion only flow from redshift input

    Args:
        input_name (str): the name of the input to flow source node
        redshift_dataset_definition (RedshiftDatasetDefinition): redshift input to flow source node
        operator_version: (str): the version of the operator
        schema: (typing.Dict): the schema for the data to be ingested
    Returns:
        dict (typing.Dict): A flow only conduct data ingestion with 1-1 mapping
        output_name (str): The output name used to configure
        `sagemaker.processing.FeatureStoreOutput`
    """
    source_node = {
        "node_id": str(uuid4()),
        "type": "SOURCE",
        "inputs": [],
        "outputs": [{"name": "default"}],
        "operator": f"sagemaker.redshift_source_{operator_version}",
        "parameters": {
            "dataset_definition": {
                "datasetSourceType": "Redshift",
                "name": input_name,
                "clusterIdentifier": redshift_dataset_definition.cluster_id,
                "database": redshift_dataset_definition.database,
                "dbUser": redshift_dataset_definition.db_user,
                "queryString": redshift_dataset_definition.query_string,
                "unloadIamRole": redshift_dataset_definition.cluster_role_arn,
                "s3OutputLocation": redshift_dataset_definition.output_s3_uri,
                "outputFormat": redshift_dataset_definition.output_format,
            }
        },
    }

    output_node = _get_output_node(source_node["node_id"], operator_version, schema)

    flow = {
        "metadata": {"version": 1, "disable_limits": False},
        "nodes": [source_node, output_node],
    }

    return flow, f'{output_node["node_id"]}.default'


def _get_output_node(source_node_id: str, operator_version: str, schema: Dict):
    """A helper function to generate output node, for internal use only

    Args:
        source_node_id (str): source node id
        operator_version: (str): the version of the operator
        schema: (typing.Dict): the schema for the data to be ingested
    Returns:
        dict (typing.Dict): output node
    """
    return {
        "node_id": str(uuid4()),
        "type": "TRANSFORM",
        "operator": f"sagemaker.spark.infer_and_cast_type_{operator_version}",
        "trained_parameters": {} if schema is None else schema,
        "parameters": {},
        "inputs": [{"name": "default", "node_id": source_node_id, "output_name": "default"}],
        "outputs": [{"name": "default"}],
    }