code stringclasses 16 values | apis sequence | extract_api stringclasses 16 values |
|---|---|---|
import sys
import os
# We have to append user's current path to sys path so the modules can be resolved
# Otherwise we will got "no module named feathr" error
sys.path.append(os.path.abspath(os.getcwd()))
from feathrcli.cli import init
from click.testing import CliRunner
from feathr.client import FeathrClient
def initialize_data():
"""
Initialize the test data to Azure for testing.
WARNING: It will override the existing test data.
"""
print('Creating test data. This might override existing test data.')
client = FeathrClient()
# materialize feature to online store
client._materialize_features_with_config('feature_gen_conf/feature_gen.conf')
print('Test data push job has started. It will take some time to complete.')
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
# Need to be in the workspace so it won't complain
os.chdir('feathr_user_workspace')
initialize_data()
| [
"feathr.client.FeathrClient"
] | [((775, 786), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (784, 786), False, 'from click.testing import CliRunner\n'), ((544, 558), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (556, 558), False, 'from feathr.client import FeathrClient\n'), ((909, 942), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (917, 942), False, 'import os\n'), ((192, 203), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (201, 203), False, 'import os\n')] |
import os
from datetime import datetime, timedelta
from pathlib import Path
from feathr.client import FeathrClient
from feathr import ValueType
from feathr.job_utils import get_result_df
from feathr import (BackfillTime, MaterializationSettings)
from feathr import FeatureQuery
from feathr import ObservationSettings
from feathr import RedisSink
from feathr import TypedKey
from test_fixture import snowflake_test_setup
def test_feathr_online_store_agg_features():
"""
Test FeathrClient() get_online_features and batch_get can get feature data correctly.
"""
test_workspace_dir = Path(__file__).parent.resolve() / "test_user_workspace"
client = snowflake_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
backfill_time = BackfillTime(start=datetime(2020, 5, 20), end=datetime(2020, 5, 20), step=timedelta(days=1))
redisSink = RedisSink(table_name="snowflakeSampleDemoFeature")
settings = MaterializationSettings(name="snowflakeSampleDemoFeature",
sinks=[redisSink],
feature_names=['f_snowflake_call_center_division_name',
'f_snowflake_call_center_zipcode'],
backfill_time=backfill_time)
client.materialize_features(settings)
# just assume the job is successful without validating the actual result in Redis. Might need to consolidate
# this part with the test_feathr_online_store test case
client.wait_job_to_finish(timeout_sec=600)
res = client.get_online_features('snowflakeSampleDemoFeature', '1',
['f_snowflake_call_center_division_name', 'f_snowflake_call_center_zipcode'])
assert len(res) == 2
assert res[0] != None
assert res[1] != None
res = client.multi_get_online_features('snowflakeSampleDemoFeature',
['1', '2'],
['f_snowflake_call_center_division_name', 'f_snowflake_call_center_zipcode'])
assert res['1'][0] != None
assert res['1'][1] != None
assert res['2'][0] != None
assert res['2'][1] != None
def test_feathr_get_offline_features():
"""
Test get_offline_features() can get feature data from Snowflake source correctly.
"""
test_workspace_dir = Path(__file__).parent.resolve() / "test_user_workspace"
client = snowflake_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
call_sk_id = TypedKey(key_column="CC_CALL_CENTER_SK",
key_column_type=ValueType.INT32,
description="call center sk",
full_name="snowflake.CC_CALL_CENTER_SK")
feature_query = FeatureQuery(
feature_list=['f_snowflake_call_center_division_name', 'f_snowflake_call_center_zipcode'],
key=call_sk_id)
settings = ObservationSettings(
observation_path='jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse'
'=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL')
now = datetime.now()
output_path = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/demo_data/snowflake_output','_', str(now.minute), '_', str(now.second), ".avro"])
client.get_offline_features(observation_settings=settings,
feature_query=feature_query,
output_path=output_path)
# assuming the job can successfully run; otherwise it will throw exception
client.wait_job_to_finish(timeout_sec=900)
| [
"feathr.TypedKey",
"feathr.MaterializationSettings",
"feathr.RedisSink",
"feathr.ObservationSettings",
"feathr.FeatureQuery"
] | [((875, 925), 'feathr.RedisSink', 'RedisSink', ([], {'table_name': '"""snowflakeSampleDemoFeature"""'}), "(table_name='snowflakeSampleDemoFeature')\n", (884, 925), False, 'from feathr import RedisSink\n'), ((941, 1148), 'feathr.MaterializationSettings', 'MaterializationSettings', ([], {'name': '"""snowflakeSampleDemoFeature"""', 'sinks': '[redisSink]', 'feature_names': "['f_snowflake_call_center_division_name', 'f_snowflake_call_center_zipcode']", 'backfill_time': 'backfill_time'}), "(name='snowflakeSampleDemoFeature', sinks=[redisSink\n ], feature_names=['f_snowflake_call_center_division_name',\n 'f_snowflake_call_center_zipcode'], backfill_time=backfill_time)\n", (964, 1148), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((2515, 2663), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""CC_CALL_CENTER_SK"""', 'key_column_type': 'ValueType.INT32', 'description': '"""call center sk"""', 'full_name': '"""snowflake.CC_CALL_CENTER_SK"""'}), "(key_column='CC_CALL_CENTER_SK', key_column_type=ValueType.INT32,\n description='call center sk', full_name='snowflake.CC_CALL_CENTER_SK')\n", (2523, 2663), False, 'from feathr import TypedKey\n'), ((2759, 2882), 'feathr.FeatureQuery', 'FeatureQuery', ([], {'feature_list': "['f_snowflake_call_center_division_name', 'f_snowflake_call_center_zipcode']", 'key': 'call_sk_id'}), "(feature_list=['f_snowflake_call_center_division_name',\n 'f_snowflake_call_center_zipcode'], key=call_sk_id)\n", (2771, 2882), False, 'from feathr import FeatureQuery\n'), ((2911, 3139), 'feathr.ObservationSettings', 'ObservationSettings', ([], {'observation_path': '"""jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL"""'}), "(observation_path=\n 'jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL'\n )\n", (2930, 3139), False, 'from feathr import ObservationSettings\n'), ((3177, 3191), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3189, 3191), False, 'from datetime import datetime, timedelta\n'), ((689, 743), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (701, 743), False, 'import os\n'), ((2441, 2495), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (2453, 2495), False, 'import os\n'), ((785, 806), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (793, 806), False, 'from datetime import datetime, timedelta\n'), ((812, 833), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (820, 833), False, 'from datetime import datetime, timedelta\n'), ((840, 857), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (849, 857), False, 'from datetime import datetime, timedelta\n'), ((599, 613), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (603, 613), False, 'from pathlib import Path\n'), ((2351, 2365), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2355, 2365), False, 'from pathlib import Path\n')] |
from feathr import Feature
from feathr import ExpressionTransformation
from feathr import DerivedFeature
from feathr import BOOLEAN, FLOAT, FLOAT_VECTOR, ValueType
from feathr import TypedKey
import pytest
def assert_config_equals(one, another):
assert one.translate(str.maketrans('', '', ' \n\t\r')) == another.translate(str.maketrans('', '', ' \n\t\r'))
def test_single_key_derived_feature_to_config():
"""Single key derived feature config generation should work"""
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="An user identifier")
user_embedding = Feature(name="user_embedding", feature_type=FLOAT_VECTOR, key=user_key)
# A derived feature
derived_feature = DerivedFeature(name="user_embemdding_derived",
feature_type=FLOAT,
key=user_key,
input_features=user_embedding,
transform="if_else(user_embedding, user_embedding, [])")
derived_feature_config = """
user_embemdding_derived: {
key: [user_id]
inputs: {
user_embedding: {key: [user_id], feature: user_embedding}
}
definition: "if_else(user_embedding, user_embedding, [])"
type: {
type: TENSOR
tensorCategory: DENSE
dimensionType: []
valType: FLOAT
}
}"""
assert_config_equals(derived_feature.to_feature_config(), derived_feature_config)
def test_multikey_derived_feature_to_config():
"""Multikey derived feature config generation should work"""
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="An user identifier")
item_key = TypedKey(full_name="mockdata.item", key_column="item_id", key_column_type=ValueType.INT32, description="An item identifier")
user_embedding = Feature(name="user_embedding", feature_type=FLOAT_VECTOR, key=user_key)
item_embedding = Feature(name="item_embedding", feature_type=FLOAT_VECTOR, key=item_key)
# A derived feature
user_item_similarity = DerivedFeature(name="user_item_similarity",
feature_type=FLOAT,
key=[user_key, item_key],
input_features=[user_embedding, item_embedding],
transform="similarity(user_embedding, item_embedding)")
derived_feature_config = """
user_item_similarity: {
key: [user_id, item_id]
inputs: {
user_embedding: {key: [user_id], feature: user_embedding}
item_embedding: {key: [item_id], feature: item_embedding}
}
definition: "similarity(user_embedding,item_embedding)"
type: {
type: TENSOR
tensorCategory: DENSE
dimensionType: []
valType: FLOAT
}
}"""
assert_config_equals(user_item_similarity.to_feature_config(), derived_feature_config)
def test_derived_feature_to_config_with_alias():
# More complicated use case, viewer viewee aliasged user key
# References the same key feature with different alias
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="An user identifier")
user_embedding = Feature(name="user_embedding", key=user_key, feature_type=FLOAT_VECTOR)
viewer_viewee_distance = DerivedFeature(name="viewer_viewee_distance",
key=[user_key.as_key("viewer"), user_key.as_key("viewee")],
feature_type=FLOAT,
input_features=[user_embedding.with_key("viewer").as_feature("viewer_embedding"),
user_embedding.with_key("viewee").as_feature("viewee_embedding")],
transform="distance(viewer_embedding, viewee_embedding)")
expected_feature_config = """
viewer_viewee_distance: {
key: [viewer, viewee]
inputs: {
viewer_embedding: {key: [viewer], feature: user_embedding}
viewee_embedding: {key: [viewee], feature: user_embedding}
}
definition: "distance(viewer_embedding, viewee_embedding)"
type: {
type: TENSOR
tensorCategory: DENSE
dimensionType: []
valType: FLOAT
}
}"""
assert_config_equals(viewer_viewee_distance.to_feature_config(), expected_feature_config)
def test_multi_key_derived_feature_to_config_with_alias():
# References the same relation feature key alias with different alias
# Note that in this case, it is possible that distance(a, b) != distance(b,a)
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="An user identifier")
user_embedding = Feature(name="user_embedding", key=user_key, feature_type=FLOAT_VECTOR)
viewer_viewee_distance = DerivedFeature(name="viewer_viewee_distance",
key=[user_key.as_key("viewer"), user_key.as_key("viewee")],
feature_type=FLOAT,
input_features=[user_embedding.with_key("viewer").as_feature("viewer_embedding"),
user_embedding.with_key("viewee").as_feature("viewee_embedding")],
transform="distance(viewer_embedding, viewee_embedding)")
viewee_viewer_combined = DerivedFeature(name = "viewee_viewer_combined_distance",
key=[user_key.as_key("viewer"), user_key.as_key("viewee")],
feature_type=FLOAT,
input_features=[viewer_viewee_distance.with_key(["viewer", "viewee"])
.as_feature("viewer_viewee_distance"),
viewer_viewee_distance.with_key(["viewee", "viewer"])
.as_feature("viewee_viewer_distance"),],
transform=ExpressionTransformation("viewer_viewee_distance + viewee_viewer_distance"))
# Note that unlike key features, a relation feature does not need a feature anchor.
expected_feature_config = """
viewee_viewer_combined_distance: {
key: [viewer, viewee]
inputs: {
viewer_viewee_distance: {key: [viewer, viewee], feature: viewer_viewee_distance}
viewee_viewer_distance: {key: [viewee, viewer], feature: viewer_viewee_distance}
}
definition: "viewer_viewee_distance + viewee_viewer_distance"
type: {
type: TENSOR
tensorCategory: DENSE
dimensionType: []
valType: FLOAT
}
}"""
assert_config_equals(viewee_viewer_combined.to_feature_config(), expected_feature_config)
def test_derived_feature_on_multikey_anchored_feature_to_config():
"""Multikey derived feature config generation should work"""
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="First part of an user identifier")
user_key2 = TypedKey(full_name="mockdata.user2", key_column="user_id2", key_column_type=ValueType.INT32, description="Second part of an user identifier")
user_embedding = Feature(name="user_embedding", feature_type=FLOAT_VECTOR, key=[user_key, user_key2])
# A derived feature
user_item_derived = DerivedFeature(name="user_item_similarity",
feature_type=FLOAT,
key=[user_key.as_key("viewer"), user_key2.as_key("viewee")],
input_features=user_embedding.with_key(["viewer", "viewee"]),
transform="if_else(user_embedding, user_embedding, [])")
derived_feature_config = """
user_item_similarity: {
key: [viewer, viewee]
inputs: {
user_embedding: {key: [viewer, viewee], feature: user_embedding}
}
definition: "if_else(user_embedding, user_embedding, [])"
type: {
type: TENSOR
tensorCategory: DENSE
dimensionType: []
valType: FLOAT
}
}"""
assert_config_equals(user_item_derived.to_feature_config(), derived_feature_config)
def test_multi_key_derived_feature_to_config_with_wrong_alias():
# References the same relation feature key alias with wrong alias
# Should throw exception
user_key = TypedKey(full_name="mockdata.user", key_column="user_id", key_column_type=ValueType.INT32, description="An user identifier")
user_embedding = Feature(name="user_embedding", key=user_key, feature_type=FLOAT_VECTOR)
with pytest.raises(AssertionError):
viewer_viewee_distance = DerivedFeature(name="viewer_viewee_distance",
key=[user_key.as_key("non_exist_alias"), user_key.as_key("viewee")],
feature_type=FLOAT,
input_features=[user_embedding.with_key("viewer").as_feature("viewer_embedding"),
user_embedding.with_key("viewee").as_feature("viewee_embedding")],
transform="distance(viewer_embedding, viewee_embedding)")
| [
"feathr.Feature",
"feathr.TypedKey",
"feathr.DerivedFeature",
"feathr.ExpressionTransformation"
] | [((493, 622), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='An user identifier')\n", (501, 622), False, 'from feathr import TypedKey\n'), ((639, 710), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'feature_type': 'FLOAT_VECTOR', 'key': 'user_key'}), "(name='user_embedding', feature_type=FLOAT_VECTOR, key=user_key)\n", (646, 710), False, 'from feathr import Feature\n'), ((758, 936), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""user_embemdding_derived"""', 'feature_type': 'FLOAT', 'key': 'user_key', 'input_features': 'user_embedding', 'transform': '"""if_else(user_embedding, user_embedding, [])"""'}), "(name='user_embemdding_derived', feature_type=FLOAT, key=\n user_key, input_features=user_embedding, transform=\n 'if_else(user_embedding, user_embedding, [])')\n", (772, 936), False, 'from feathr import DerivedFeature\n'), ((1704, 1833), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='An user identifier')\n", (1712, 1833), False, 'from feathr import TypedKey\n'), ((1844, 1973), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.item"""', 'key_column': '"""item_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An item identifier"""'}), "(full_name='mockdata.item', key_column='item_id', key_column_type=\n ValueType.INT32, description='An item identifier')\n", (1852, 1973), False, 'from feathr import TypedKey\n'), ((1991, 2062), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'feature_type': 'FLOAT_VECTOR', 'key': 'user_key'}), "(name='user_embedding', feature_type=FLOAT_VECTOR, key=user_key)\n", (1998, 2062), False, 'from feathr import Feature\n'), ((2084, 2155), 'feathr.Feature', 'Feature', ([], {'name': '"""item_embedding"""', 'feature_type': 'FLOAT_VECTOR', 'key': 'item_key'}), "(name='item_embedding', feature_type=FLOAT_VECTOR, key=item_key)\n", (2091, 2155), False, 'from feathr import Feature\n'), ((2208, 2411), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""user_item_similarity"""', 'feature_type': 'FLOAT', 'key': '[user_key, item_key]', 'input_features': '[user_embedding, item_embedding]', 'transform': '"""similarity(user_embedding, item_embedding)"""'}), "(name='user_item_similarity', feature_type=FLOAT, key=[\n user_key, item_key], input_features=[user_embedding, item_embedding],\n transform='similarity(user_embedding, item_embedding)')\n", (2222, 2411), False, 'from feathr import DerivedFeature\n'), ((3321, 3450), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='An user identifier')\n", (3329, 3450), False, 'from feathr import TypedKey\n'), ((3467, 3538), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'key': 'user_key', 'feature_type': 'FLOAT_VECTOR'}), "(name='user_embedding', key=user_key, feature_type=FLOAT_VECTOR)\n", (3474, 3538), False, 'from feathr import Feature\n'), ((4846, 4975), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='An user identifier')\n", (4854, 4975), False, 'from feathr import TypedKey\n'), ((4992, 5063), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'key': 'user_key', 'feature_type': 'FLOAT_VECTOR'}), "(name='user_embedding', key=user_key, feature_type=FLOAT_VECTOR)\n", (4999, 5063), False, 'from feathr import Feature\n'), ((7258, 7401), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""First part of an user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='First part of an user identifier')\n", (7266, 7401), False, 'from feathr import TypedKey\n'), ((7413, 7559), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user2"""', 'key_column': '"""user_id2"""', 'key_column_type': 'ValueType.INT32', 'description': '"""Second part of an user identifier"""'}), "(full_name='mockdata.user2', key_column='user_id2', key_column_type\n =ValueType.INT32, description='Second part of an user identifier')\n", (7421, 7559), False, 'from feathr import TypedKey\n'), ((7577, 7665), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'feature_type': 'FLOAT_VECTOR', 'key': '[user_key, user_key2]'}), "(name='user_embedding', feature_type=FLOAT_VECTOR, key=[user_key,\n user_key2])\n", (7584, 7665), False, 'from feathr import Feature\n'), ((8799, 8928), 'feathr.TypedKey', 'TypedKey', ([], {'full_name': '"""mockdata.user"""', 'key_column': '"""user_id"""', 'key_column_type': 'ValueType.INT32', 'description': '"""An user identifier"""'}), "(full_name='mockdata.user', key_column='user_id', key_column_type=\n ValueType.INT32, description='An user identifier')\n", (8807, 8928), False, 'from feathr import TypedKey\n'), ((8945, 9016), 'feathr.Feature', 'Feature', ([], {'name': '"""user_embedding"""', 'key': 'user_key', 'feature_type': 'FLOAT_VECTOR'}), "(name='user_embedding', key=user_key, feature_type=FLOAT_VECTOR)\n", (8952, 9016), False, 'from feathr import Feature\n'), ((9027, 9056), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (9040, 9056), False, 'import pytest\n'), ((6312, 6387), 'feathr.ExpressionTransformation', 'ExpressionTransformation', (['"""viewer_viewee_distance + viewee_viewer_distance"""'], {}), "('viewer_viewee_distance + viewee_viewer_distance')\n", (6336, 6387), False, 'from feathr import ExpressionTransformation\n')] |
import glob
import os
import time
from datetime import datetime, timedelta
from pathlib import Path
import pytest
from click.testing import CliRunner
from feathr import (FeatureAnchor, FeatureQuery, ObservationSettings, TypedKey,
ValueType)
from feathr._feature_registry import _FeatureRegistry
from feathr.client import FeathrClient
from feathr.feature_derivations import DerivedFeature
from feathrcli.cli import init
from numpy import equal
from test_fixture import basic_test_setup, registry_test_setup
def test_feathr_register_features_e2e():
"""
This test will register features, get all the registered features, then query a set of already registered features.
"""
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
client: FeathrClient = registry_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
# set output folder based on different runtime
now = datetime.now()
if client.spark_runtime == 'databricks':
output_path = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), ".parquet"])
else:
output_path = ''.join(['abfss://feathrazuretest3fs@fe<EMAIL>.<EMAIL>.core.windows.net/demo_data/output','_', str(now.minute), '_', str(now.second), ".parquet"])
client.register_features()
# Allow purview to process a bit
time.sleep(5)
# in CI test, the project name is set by the CI pipeline so we read it here
all_features = client.list_registered_features(project_name=client.project_name)
all_feature_names = [x['name'] for x in all_features]
assert 'f_is_long_trip_distance' in all_feature_names # test regular ones
assert 'f_trip_time_rounded' in all_feature_names # make sure derived features are there
assert 'f_location_avg_fare' in all_feature_names # make sure aggregated features are there
assert 'f_trip_time_rounded_plus' in all_feature_names # make sure derived features are there
assert 'f_trip_time_distance' in all_feature_names # make sure derived features are there
# Sync workspace from registry, will get all conf files back
client.get_features_from_registry(client.project_name)
feature_query = FeatureQuery(
feature_list=["f_location_avg_fare", "f_trip_time_rounded", "f_is_long_trip_distance"],
key=TypedKey(key_column="DOLocationID",key_column_type=ValueType.INT32))
settings = ObservationSettings(
observation_path="wasbs://<EMAIL>@azure<EMAIL>r<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04_with_index.csv",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
client.get_offline_features(observation_settings=settings,
feature_query=feature_query,
output_path=output_path)
client.wait_job_to_finish(timeout_sec=900)
def test_get_feature_from_registry():
registry = _FeatureRegistry("mock_project","mock_purview","mock_delimeter")
derived_feature_with_multiple_inputs = {
"guid": "derived_feature_with_multiple_input_anchors",
"typeName": "feathr_derived_feature_v1",
"attributes": {
"input_derived_features": [],
"input_anchor_features": [
{
"guid": "input_anchorA",
"typeName": "feathr_anchor_feature_v1",
},
{
"guid": "input_anchorB",
"typeName": "feathr_anchor_feature_v1",
}
]
},
}
hierarchical_derived_feature = {
"guid": "hierarchical_derived_feature",
"typeName": "feathr_derived_feature_v1",
"attributes": {
"input_derived_features": [
{
"guid": "derived_feature_with_multiple_input_anchors",
"typeName": "feathr_derived_feature_v1",
}
],
"input_anchor_features": [
{
"guid": "input_anchorC",
"typeName": "feathr_anchor_feature_v1",
}
],
}
}
anchors = [
{
"guid": "input_anchorA",
"typeName": "feathr_anchor_feature_v1",
},
{
"guid": "input_anchorC",
"typeName": "feathr_anchor_feature_v1",
},
{
"guid": "input_anchorB",
"typeName": "feathr_anchor_feature_v1",
}]
def entity_array_to_dict(arr):
return {x['guid']:x for x in arr}
inputs = registry.search_input_anchor_features(['derived_feature_with_multiple_input_anchors'],entity_array_to_dict(anchors+[derived_feature_with_multiple_inputs]))
assert len(inputs)==2
assert "input_anchorA" in inputs and "input_anchorB" in inputs
inputs = registry.search_input_anchor_features(['hierarchical_derived_feature'],entity_array_to_dict(anchors+[derived_feature_with_multiple_inputs,hierarchical_derived_feature]))
assert len(inputs)==3
assert "input_anchorA" in inputs and "input_anchorB" in inputs and "input_anchorC" in inputs
@pytest.mark.skip(reason="Add back get_features is not supported in feature registry for now and needs further discussion")
def test_feathr_get_features_from_registry():
"""
Test FeathrClient() sync features and get all the conf files from registry
"""
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(init, [])
assert result.exit_code == 0
assert os.path.isdir("./feathr_user_workspace")
os.chdir('feathr_user_workspace')
# Look for conf files, we shouldn't have any
total_conf_files = glob.glob('*/*.conf', recursive=True)
assert len(total_conf_files) == 0
client = FeathrClient()
# Sync workspace from registry, will get all conf files back
client.get_features_from_registry("frame_getting_started")
total_conf_files = glob.glob('*/*.conf', recursive=True)
# we should have at least 3 conf files
assert len(total_conf_files) == 3
| [
"feathr._feature_registry._FeatureRegistry",
"feathr.client.FeathrClient",
"feathr.TypedKey",
"feathr.ObservationSettings"
] | [((5374, 5506), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Add back get_features is not supported in feature registry for now and needs further discussion"""'}), "(reason=\n 'Add back get_features is not supported in feature registry for now and needs further discussion'\n )\n", (5390, 5506), False, 'import pytest\n'), ((967, 981), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (979, 981), False, 'from datetime import datetime, timedelta\n'), ((1398, 1411), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1408, 1411), False, 'import time\n'), ((2455, 2706), 'feathr.ObservationSettings', 'ObservationSettings', ([], {'observation_path': '"""wasbs://<EMAIL>@azure<EMAIL>r<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04_with_index.csv"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(observation_path=\n 'wasbs://<EMAIL>@azure<EMAIL>r<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04_with_index.csv'\n , event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (2474, 2706), False, 'from feathr import FeatureAnchor, FeatureQuery, ObservationSettings, TypedKey, ValueType\n'), ((3005, 3071), 'feathr._feature_registry._FeatureRegistry', '_FeatureRegistry', (['"""mock_project"""', '"""mock_purview"""', '"""mock_delimeter"""'], {}), "('mock_project', 'mock_purview', 'mock_delimeter')\n", (3021, 3071), False, 'from feathr._feature_registry import _FeatureRegistry\n'), ((5651, 5662), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (5660, 5662), False, 'from click.testing import CliRunner\n'), ((845, 899), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (857, 899), False, 'import os\n'), ((5796, 5836), 'os.path.isdir', 'os.path.isdir', (['"""./feathr_user_workspace"""'], {}), "('./feathr_user_workspace')\n", (5809, 5836), False, 'import os\n'), ((5845, 5878), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (5853, 5878), False, 'import os\n'), ((5960, 5997), 'glob.glob', 'glob.glob', (['"""*/*.conf"""'], {'recursive': '(True)'}), "('*/*.conf', recursive=True)\n", (5969, 5997), False, 'import glob\n'), ((6066, 6080), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (6078, 6080), False, 'from feathr.client import FeathrClient\n'), ((6245, 6282), 'glob.glob', 'glob.glob', (['"""*/*.conf"""'], {'recursive': '(True)'}), "('*/*.conf', recursive=True)\n", (6254, 6282), False, 'import glob\n'), ((2371, 2439), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32)\n", (2379, 2439), False, 'from feathr import FeatureAnchor, FeatureQuery, ObservationSettings, TypedKey, ValueType\n'), ((733, 747), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (737, 747), False, 'from pathlib import Path\n')] |
from feathrcli.cli import init
from click.testing import CliRunner
from feathr.client import FeathrClient
import os
import glob
import pandavro as pdx
import pandas as pd
import tempfile
def test_feathr_online_store_databricks():
"""
Test FeathrClient() online_get_features and batch_get can get data correctly.
"""
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
os.chdir('feathr_user_workspace')
client = FeathrClient()
job_res = client.materialize_features()
# just assume the job is successful without validating the actual result in Redis. Might need to consolidate
# this part with the test_feathr_online_store test case
client.wait_job_to_finish(timeout_sec=900)
res = client.online_get_features('nycTaxiDemoFeature', '265', ['f_location_avg_fare', 'f_location_max_fare'])
# just assme there are values. We don't hard code the values for now for testing
# the correctness of the feature generation should be garunteed by feathr runtime.
# ID 239 and 265 are available in the `DOLocationID` column in this file:
# https://s3.amazonaws.com/nyc-tlc/trip+data/green_tripdata_2020-04.csv
# View more detials on this dataset: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page
assert len(res) == 2
assert res[0] != None
assert res[1] != None
res = client.online_batch_get_features('nycTaxiDemoFeature',
['239', '265'],
['f_location_avg_fare', 'f_location_max_fare'])
assert res['239'][0] != None
assert res['239'][1] != None
assert res['265'][0] != None
assert res['265'][1] != None
def test_get_offline_features_databricks():
"""
Test FeathrClient() to make sure offline features can be get successfully
"""
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
os.chdir('feathr_user_workspace')
client = FeathrClient()
returned_spark_job = client.join_offline_features()
res_url = client.wait_job_to_finish(timeout_sec=900)
# just assume the job is successful. if not, the test will fail
| [
"feathr.client.FeathrClient"
] | [((343, 354), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (352, 354), False, 'from click.testing import CliRunner\n'), ((1950, 1961), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1959, 1961), False, 'from click.testing import CliRunner\n'), ((434, 467), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (442, 467), False, 'import os\n'), ((485, 499), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (497, 499), False, 'from feathr.client import FeathrClient\n'), ((2041, 2074), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (2049, 2074), False, 'import os\n'), ((2092, 2106), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (2104, 2106), False, 'from feathr.client import FeathrClient\n')] |
from feathrcli.cli import init
from click.testing import CliRunner
from feathr.client import FeathrClient
import os
def test_feathr_feature_register():
"""
Test FeathrClient() can register features correctly.
"""
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
os.chdir('feathr_user_workspace')
client = FeathrClient()
client.register_features()
| [
"feathr.client.FeathrClient"
] | [((240, 251), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (249, 251), False, 'from click.testing import CliRunner\n'), ((331, 364), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (339, 364), False, 'import os\n'), ((382, 396), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (394, 396), False, 'from feathr.client import FeathrClient\n')] |
from feathr import FeatureAnchor
from feathr.client import FeathrClient
from feathr import BOOLEAN, FLOAT, INT32, STRING, ValueType
from feathr import Feature
from feathr import DerivedFeature
from feathr import INPUT_CONTEXT, HdfsSource
from feathr import WindowAggTransformation
from feathr import TypedKey
def basic_test_setup(config_path: str):
client = FeathrClient(config_path=config_path)
batch_source = HdfsSource(name="nycTaxiBatchSource",
path="abfss://feathrazuretest3fs@fe<EMAIL>test3storage.dfs.core.windows.net/demo_data/green_tripdata_2020-04.csv",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
f_trip_distance = Feature(name="f_trip_distance",
feature_type=FLOAT, transform="trip_distance")
f_trip_time_duration = Feature(name="f_trip_time_duration",
feature_type=INT32,
transform="time_duration(lpep_pickup_datetime, lpep_dropoff_datetime, 'minutes')")
features = [
f_trip_distance,
f_trip_time_duration,
Feature(name="f_is_long_trip_distance",
feature_type=BOOLEAN,
transform="cast_float(trip_distance)>30"),
Feature(name="f_day_of_week",
feature_type=INT32,
transform="dayofweek(lpep_dropoff_datetime)"),
]
request_anchor = FeatureAnchor(name="request_features",
source=INPUT_CONTEXT,
features=features)
f_trip_time_distance = DerivedFeature(name="f_trip_time_distance",
feature_type=FLOAT,
input_features=[
f_trip_distance, f_trip_time_duration],
transform="f_trip_distance * f_trip_time_duration")
f_trip_time_rounded = DerivedFeature(name="f_trip_time_rounded",
feature_type=INT32,
input_features=[f_trip_time_duration],
transform="f_trip_time_duration % 10")
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32,
description="location id in NYC",
full_name="nyc_taxi.location_id")
agg_features = [Feature(name="f_location_avg_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="AVG",
window="90d")),
Feature(name="f_location_max_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="MAX",
window="90d"))
]
agg_anchor = FeatureAnchor(name="aggregationFeatures",
source=batch_source,
features=agg_features)
client.build_features(anchor_list=[agg_anchor, request_anchor], derived_feature_list=[
f_trip_time_distance, f_trip_time_rounded])
return client
def snowflake_test_setup(config_path: str):
client = FeathrClient(config_path=config_path)
batch_source = HdfsSource(name="snowflakeSampleBatchSource",
path="jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL",
)
call_sk_id = TypedKey(key_column="CC_CALL_CENTER_SK",
key_column_type=ValueType.INT32,
description="call center sk",
full_name="snowflake.CC_CALL_CENTER_SK")
f_snowflake_call_center_division_name = Feature(name="f_snowflake_call_center_division_name",feature_type=STRING, transform="CC_DIVISION_NAME", key=call_sk_id)
f_snowflake_call_center_zipcode = Feature(name="f_snowflake_call_center_zipcode",feature_type=STRING, transform="CC_ZIP", key=call_sk_id)
features = [f_snowflake_call_center_division_name, f_snowflake_call_center_zipcode ]
snowflakeFeatures = FeatureAnchor(name="snowflakeFeatures",
source=batch_source,
features=features)
client.build_features(anchor_list=[snowflakeFeatures])
return client
| [
"feathr.WindowAggTransformation",
"feathr.Feature",
"feathr.TypedKey",
"feathr.client.FeathrClient",
"feathr.DerivedFeature",
"feathr.HdfsSource",
"feathr.FeatureAnchor"
] | [((365, 402), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path'}), '(config_path=config_path)\n', (377, 402), False, 'from feathr.client import FeathrClient\n'), ((422, 677), 'feathr.HdfsSource', 'HdfsSource', ([], {'name': '"""nycTaxiBatchSource"""', 'path': '"""abfss://feathrazuretest3fs@fe<EMAIL>test3storage.dfs.core.windows.net/demo_data/green_tripdata_2020-04.csv"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(name='nycTaxiBatchSource', path=\n 'abfss://feathrazuretest3fs@fe<EMAIL>test3storage.dfs.core.windows.net/demo_data/green_tripdata_2020-04.csv'\n , event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (432, 677), False, 'from feathr import INPUT_CONTEXT, HdfsSource\n'), ((776, 854), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_distance"""', 'feature_type': 'FLOAT', 'transform': '"""trip_distance"""'}), "(name='f_trip_distance', feature_type=FLOAT, transform='trip_distance')\n", (783, 854), False, 'from feathr import Feature\n'), ((912, 1056), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_time_duration"""', 'feature_type': 'INT32', 'transform': '"""time_duration(lpep_pickup_datetime, lpep_dropoff_datetime, \'minutes\')"""'}), '(name=\'f_trip_time_duration\', feature_type=INT32, transform=\n "time_duration(lpep_pickup_datetime, lpep_dropoff_datetime, \'minutes\')")\n', (919, 1056), False, 'from feathr import Feature\n'), ((1506, 1585), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""request_features"""', 'source': 'INPUT_CONTEXT', 'features': 'features'}), "(name='request_features', source=INPUT_CONTEXT, features=features)\n", (1519, 1585), False, 'from feathr import FeatureAnchor\n'), ((1684, 1864), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_distance"""', 'feature_type': 'FLOAT', 'input_features': '[f_trip_distance, f_trip_time_duration]', 'transform': '"""f_trip_distance * f_trip_time_duration"""'}), "(name='f_trip_time_distance', feature_type=FLOAT,\n input_features=[f_trip_distance, f_trip_time_duration], transform=\n 'f_trip_distance * f_trip_time_duration')\n", (1698, 1864), False, 'from feathr import DerivedFeature\n'), ((2056, 2205), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_rounded"""', 'feature_type': 'INT32', 'input_features': '[f_trip_time_duration]', 'transform': '"""f_trip_time_duration % 10"""'}), "(name='f_trip_time_rounded', feature_type=INT32,\n input_features=[f_trip_time_duration], transform=\n 'f_trip_time_duration % 10')\n", (2070, 2205), False, 'from feathr import DerivedFeature\n'), ((2339, 2479), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32', 'description': '"""location id in NYC"""', 'full_name': '"""nyc_taxi.location_id"""'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32,\n description='location id in NYC', full_name='nyc_taxi.location_id')\n", (2347, 2479), False, 'from feathr import TypedKey\n'), ((3402, 3492), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""aggregationFeatures"""', 'source': 'batch_source', 'features': 'agg_features'}), "(name='aggregationFeatures', source=batch_source, features=\n agg_features)\n", (3415, 3492), False, 'from feathr import FeatureAnchor\n'), ((3775, 3812), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path'}), '(config_path=config_path)\n', (3787, 3812), False, 'from feathr.client import FeathrClient\n'), ((3832, 4074), 'feathr.HdfsSource', 'HdfsSource', ([], {'name': '"""snowflakeSampleBatchSource"""', 'path': '"""jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL"""'}), "(name='snowflakeSampleBatchSource', path=\n 'jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL'\n )\n", (3842, 4074), False, 'from feathr import INPUT_CONTEXT, HdfsSource\n'), ((4144, 4292), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""CC_CALL_CENTER_SK"""', 'key_column_type': 'ValueType.INT32', 'description': '"""call center sk"""', 'full_name': '"""snowflake.CC_CALL_CENTER_SK"""'}), "(key_column='CC_CALL_CENTER_SK', key_column_type=ValueType.INT32,\n description='call center sk', full_name='snowflake.CC_CALL_CENTER_SK')\n", (4152, 4292), False, 'from feathr import TypedKey\n'), ((4412, 4536), 'feathr.Feature', 'Feature', ([], {'name': '"""f_snowflake_call_center_division_name"""', 'feature_type': 'STRING', 'transform': '"""CC_DIVISION_NAME"""', 'key': 'call_sk_id'}), "(name='f_snowflake_call_center_division_name', feature_type=STRING,\n transform='CC_DIVISION_NAME', key=call_sk_id)\n", (4419, 4536), False, 'from feathr import Feature\n'), ((4571, 4679), 'feathr.Feature', 'Feature', ([], {'name': '"""f_snowflake_call_center_zipcode"""', 'feature_type': 'STRING', 'transform': '"""CC_ZIP"""', 'key': 'call_sk_id'}), "(name='f_snowflake_call_center_zipcode', feature_type=STRING,\n transform='CC_ZIP', key=call_sk_id)\n", (4578, 4679), False, 'from feathr import Feature\n'), ((4793, 4872), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""snowflakeFeatures"""', 'source': 'batch_source', 'features': 'features'}), "(name='snowflakeFeatures', source=batch_source, features=features)\n", (4806, 4872), False, 'from feathr import FeatureAnchor\n'), ((1203, 1311), 'feathr.Feature', 'Feature', ([], {'name': '"""f_is_long_trip_distance"""', 'feature_type': 'BOOLEAN', 'transform': '"""cast_float(trip_distance)>30"""'}), "(name='f_is_long_trip_distance', feature_type=BOOLEAN, transform=\n 'cast_float(trip_distance)>30')\n", (1210, 1311), False, 'from feathr import Feature\n'), ((1348, 1448), 'feathr.Feature', 'Feature', ([], {'name': '"""f_day_of_week"""', 'feature_type': 'INT32', 'transform': '"""dayofweek(lpep_dropoff_datetime)"""'}), "(name='f_day_of_week', feature_type=INT32, transform=\n 'dayofweek(lpep_dropoff_datetime)')\n", (1355, 1448), False, 'from feathr import Feature\n'), ((2744, 2837), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""AVG"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='AVG',\n window='90d')\n", (2767, 2837), False, 'from feathr import WindowAggTransformation\n'), ((3147, 3240), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""MAX"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='MAX',\n window='90d')\n", (3170, 3240), False, 'from feathr import WindowAggTransformation\n')] |
import sys
import os
# We have to append user's current path to sys path so the modules can be resolved
# Otherwise we will got "no module named feathr" error
sys.path.append(os.path.abspath(os.getcwd()))
from feathrcli.cli import init
from click.testing import CliRunner
from feathr.client import FeathrClient
def clean_data():
"""
Remove the test data(feature table: nycTaxiDemoFeature) in Azure.
"""
client = FeathrClient()
table_name = 'nycTaxiDemoFeature'
client._clean_test_data(table_name)
print('Redis table cleaned: ' + table_name)
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
# Need to be in the workspace so it won't complain
os.chdir('feathr_user_workspace')
clean_data()
| [
"feathr.client.FeathrClient"
] | [((584, 595), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (593, 595), False, 'from click.testing import CliRunner\n'), ((432, 446), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (444, 446), False, 'from feathr.client import FeathrClient\n'), ((718, 751), 'os.chdir', 'os.chdir', (['"""feathr_user_workspace"""'], {}), "('feathr_user_workspace')\n", (726, 751), False, 'import os\n'), ((192, 203), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (201, 203), False, 'import os\n')] |
import os
from datetime import datetime
from pathlib import Path
from unittest import result
from click.testing import CliRunner
from feathr import (BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings,
SparkExecutionConfiguration, TypedKey, ValueType)
from feathr.job_utils import get_result_df
from test_fixture import basic_test_setup
from feathr.constants import OUTPUT_FORMAT
# test parquet file read/write without an extension name
def test_feathr_get_offline_features_with_parquet():
"""
Test if the program can read and write parquet files
"""
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
client = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32)
feature_query = FeatureQuery(
feature_list=["f_location_avg_fare"], key=location_id)
settings = ObservationSettings(
observation_path="wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/green_tripdata_2020-04",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
now = datetime.now()
# set output folder based on different runtime
if client.spark_runtime == 'databricks':
output_path = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), ".parquet"])
else:
output_path = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/demo_data/output','_', str(now.minute), '_', str(now.second), ".parquet"])
client.get_offline_features(observation_settings=settings,
feature_query=feature_query,
output_path=output_path,
execution_configuratons=SparkExecutionConfiguration({"spark.feathr.inputFormat": "parquet", "spark.feathr.outputFormat": "parquet"})
)
# assuming the job can successfully run; otherwise it will throw exception
client.wait_job_to_finish(timeout_sec=900)
# download result and just assert the returned result is not empty
res_df = get_result_df(client)
assert res_df.shape[0] > 0
# test delta lake read/write without an extension name
def test_feathr_get_offline_features_with_delta_lake():
"""
Test if the program can read and write delta lake
"""
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
client = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32)
feature_query = FeatureQuery(
feature_list=["f_location_avg_fare"], key=location_id)
settings = ObservationSettings(
observation_path="wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/feathr_delta_table",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
now = datetime.now()
# set output folder based on different runtime
if client.spark_runtime == 'databricks':
output_path = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), "_deltalake"])
else:
output_path = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/demo_data/output','_', str(now.minute), '_', str(now.second), "_deltalake"])
client.get_offline_features(observation_settings=settings,
feature_query=feature_query,
output_path=output_path,
execution_configuratons=SparkExecutionConfiguration({"spark.feathr.inputFormat": "delta", "spark.feathr.outputFormat": "delta"})
)
# assuming the job can successfully run; otherwise it will throw exception
client.wait_job_to_finish(timeout_sec=900)
# download result and just assert the returned result is not empty
res_df = get_result_df(client)
result_format: str = client.get_job_tags().get(OUTPUT_FORMAT, "")
if not (client.spark_runtime == 'azure_synapse' and result_format == 'delta'):
# if users are using delta format in synapse, skip this check, due to issue https://github.com/delta-io/delta-rs/issues/582
assert res_df.shape[0] > 0
| [
"feathr.FeatureQuery",
"feathr.job_utils.get_result_df",
"feathr.ObservationSettings",
"feathr.TypedKey",
"feathr.SparkExecutionConfiguration"
] | [((789, 857), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32)\n", (797, 857), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((907, 974), 'feathr.FeatureQuery', 'FeatureQuery', ([], {'feature_list': "['f_location_avg_fare']", 'key': 'location_id'}), "(feature_list=['f_location_avg_fare'], key=location_id)\n", (919, 974), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((999, 1232), 'feathr.ObservationSettings', 'ObservationSettings', ([], {'observation_path': '"""wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/green_tripdata_2020-04"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(observation_path=\n 'wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/green_tripdata_2020-04'\n , event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (1018, 1232), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((1254, 1268), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1266, 1268), False, 'from datetime import datetime\n'), ((2270, 2291), 'feathr.job_utils.get_result_df', 'get_result_df', (['client'], {}), '(client)\n', (2283, 2291), False, 'from feathr.job_utils import get_result_df\n'), ((2705, 2773), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32)\n", (2713, 2773), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((2823, 2890), 'feathr.FeatureQuery', 'FeatureQuery', ([], {'feature_list': "['f_location_avg_fare']", 'key': 'location_id'}), "(feature_list=['f_location_avg_fare'], key=location_id)\n", (2835, 2890), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((2915, 3144), 'feathr.ObservationSettings', 'ObservationSettings', ([], {'observation_path': '"""wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/feathr_delta_table"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(observation_path=\n 'wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/feathr_delta_table'\n , event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (2934, 3144), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((3166, 3180), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3178, 3180), False, 'from datetime import datetime\n'), ((4182, 4203), 'feathr.job_utils.get_result_df', 'get_result_df', (['client'], {}), '(client)\n', (4195, 4203), False, 'from feathr.job_utils import get_result_df\n'), ((714, 768), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (726, 768), False, 'import os\n'), ((2630, 2684), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (2642, 2684), False, 'import os\n'), ((1911, 2023), 'feathr.SparkExecutionConfiguration', 'SparkExecutionConfiguration', (["{'spark.feathr.inputFormat': 'parquet', 'spark.feathr.outputFormat': 'parquet'}"], {}), "({'spark.feathr.inputFormat': 'parquet',\n 'spark.feathr.outputFormat': 'parquet'})\n", (1938, 2023), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((3827, 3935), 'feathr.SparkExecutionConfiguration', 'SparkExecutionConfiguration', (["{'spark.feathr.inputFormat': 'delta', 'spark.feathr.outputFormat': 'delta'}"], {}), "({'spark.feathr.inputFormat': 'delta',\n 'spark.feathr.outputFormat': 'delta'})\n", (3854, 3935), False, 'from feathr import BOOLEAN, FLOAT, INT32, FeatureQuery, ObservationSettings, SparkExecutionConfiguration, TypedKey, ValueType\n'), ((618, 632), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (622, 632), False, 'from pathlib import Path\n'), ((2534, 2548), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2538, 2548), False, 'from pathlib import Path\n')] |
from datetime import datetime, timedelta
from feathr._materialization_utils import _to_materialization_config
from feathr import (BackfillTime, MaterializationSettings)
from feathr import RedisSink
def test_feature_materialization_config():
backfill_time = BackfillTime(start=datetime(2020, 5, 20), end=datetime(2020, 5,20), step=timedelta(days=1))
redisSink = RedisSink(table_name="nycTaxiDemoFeature")
settings = MaterializationSettings("nycTaxiTable",
sinks=[redisSink],
feature_names=["f_location_avg_fare", "f_location_max_fare"],
backfill_time=backfill_time)
config = _to_materialization_config(settings)
expected_config = """
operational: {
name: nycTaxiTable
endTime: "2020-05-20 00:00:00"
endTimeFormat: "yyyy-MM-dd HH:mm:ss"
resolution: DAILY
output:[
{
name: REDIS
params: {
table_name: "nycTaxiDemoFeature"
}
}
]
}
features: [f_location_avg_fare, f_location_max_fare]
"""
assert ''.join(config.split()) == ''.join(expected_config.split())
def test_feature_materialization_daily_schedule():
"""Test back fill cutoff time for a daily range"""
backfill_time = BackfillTime(start=datetime(2022, 3, 1), end=datetime(2022, 3, 5), step=timedelta(days=1))
settings = MaterializationSettings("", [], [], backfill_time)
expected = [datetime(2022, 3, day) for day in range(1, 6)]
assert settings.get_backfill_cutoff_time() == expected
def test_feature_materialization_hourly_schedule():
"""Test back fill cutoff time for a hourly range"""
backfill_time = BackfillTime(start=datetime(2022, 3, 1, 1), end=datetime(2022, 3, 1, 5), step=timedelta(hours=1))
settings = MaterializationSettings("", [], [], backfill_time)
expected = [datetime(2022,3, 1, hour) for hour in range(1, 6)]
assert settings.get_backfill_cutoff_time() == expected
def test_feature_materialization_now_schedule():
"""Test back fill cutoff time without backfill."""
settings = MaterializationSettings("", [], [])
date = settings.get_backfill_cutoff_time()[0]
expected = datetime.now()
assert expected.year == date.year
assert expected.month == date.month
assert expected.day == date.day
| [
"feathr.RedisSink",
"feathr._materialization_utils._to_materialization_config",
"feathr.MaterializationSettings"
] | [((372, 414), 'feathr.RedisSink', 'RedisSink', ([], {'table_name': '"""nycTaxiDemoFeature"""'}), "(table_name='nycTaxiDemoFeature')\n", (381, 414), False, 'from feathr import RedisSink\n'), ((430, 584), 'feathr.MaterializationSettings', 'MaterializationSettings', (['"""nycTaxiTable"""'], {'sinks': '[redisSink]', 'feature_names': "['f_location_avg_fare', 'f_location_max_fare']", 'backfill_time': 'backfill_time'}), "('nycTaxiTable', sinks=[redisSink], feature_names=[\n 'f_location_avg_fare', 'f_location_max_fare'], backfill_time=backfill_time)\n", (453, 584), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((713, 749), 'feathr._materialization_utils._to_materialization_config', '_to_materialization_config', (['settings'], {}), '(settings)\n', (739, 749), False, 'from feathr._materialization_utils import _to_materialization_config\n'), ((1553, 1603), 'feathr.MaterializationSettings', 'MaterializationSettings', (['""""""', '[]', '[]', 'backfill_time'], {}), "('', [], [], backfill_time)\n", (1576, 1603), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((1969, 2019), 'feathr.MaterializationSettings', 'MaterializationSettings', (['""""""', '[]', '[]', 'backfill_time'], {}), "('', [], [], backfill_time)\n", (1992, 2019), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((2267, 2302), 'feathr.MaterializationSettings', 'MaterializationSettings', (['""""""', '[]', '[]'], {}), "('', [], [])\n", (2290, 2302), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((2368, 2382), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2380, 2382), False, 'from datetime import datetime, timedelta\n'), ((1620, 1642), 'datetime.datetime', 'datetime', (['(2022)', '(3)', 'day'], {}), '(2022, 3, day)\n', (1628, 1642), False, 'from datetime import datetime, timedelta\n'), ((2036, 2062), 'datetime.datetime', 'datetime', (['(2022)', '(3)', '(1)', 'hour'], {}), '(2022, 3, 1, hour)\n', (2044, 2062), False, 'from datetime import datetime, timedelta\n'), ((283, 304), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (291, 304), False, 'from datetime import datetime, timedelta\n'), ((310, 331), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (318, 331), False, 'from datetime import datetime, timedelta\n'), ((337, 354), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (346, 354), False, 'from datetime import datetime, timedelta\n'), ((1466, 1486), 'datetime.datetime', 'datetime', (['(2022)', '(3)', '(1)'], {}), '(2022, 3, 1)\n', (1474, 1486), False, 'from datetime import datetime, timedelta\n'), ((1492, 1512), 'datetime.datetime', 'datetime', (['(2022)', '(3)', '(5)'], {}), '(2022, 3, 5)\n', (1500, 1512), False, 'from datetime import datetime, timedelta\n'), ((1519, 1536), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1528, 1536), False, 'from datetime import datetime, timedelta\n'), ((1875, 1898), 'datetime.datetime', 'datetime', (['(2022)', '(3)', '(1)', '(1)'], {}), '(2022, 3, 1, 1)\n', (1883, 1898), False, 'from datetime import datetime, timedelta\n'), ((1904, 1927), 'datetime.datetime', 'datetime', (['(2022)', '(3)', '(1)', '(5)'], {}), '(2022, 3, 1, 5)\n', (1912, 1927), False, 'from datetime import datetime, timedelta\n'), ((1934, 1952), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1943, 1952), False, 'from datetime import datetime, timedelta\n')] |
import os
from pathlib import Path
import pytest
from feathr import (BackfillTime, MaterializationSettings)
from feathr import RedisSink
from test_fixture import kafka_test_setup
@pytest.mark.skipif(os.environ.get('SPARK_CONFIG__SPARK_CLUSTER') != "azure_synapse",
reason="skip for databricks, as it cannot stop streaming job automatically for now.")
def test_feathr_kafa_streaming_features():
"""
Test FeathrClient() materialize_features can ingest streaming feature correctly
"""
test_workspace_dir = Path(__file__).parent.resolve() / "test_user_workspace"
client = kafka_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
redisSink = RedisSink(table_name="kafkaSampleDemoFeature", streaming=True, streamingTimeoutMs=10000)
settings = MaterializationSettings(name="kafkaSampleDemo",
sinks=[redisSink],
feature_names=['f_modified_streaming_count']
)
client.materialize_features(settings)
client.wait_job_to_finish(timeout_sec=600)
| [
"feathr.RedisSink",
"feathr.MaterializationSettings"
] | [((699, 791), 'feathr.RedisSink', 'RedisSink', ([], {'table_name': '"""kafkaSampleDemoFeature"""', 'streaming': '(True)', 'streamingTimeoutMs': '(10000)'}), "(table_name='kafkaSampleDemoFeature', streaming=True,\n streamingTimeoutMs=10000)\n", (708, 791), False, 'from feathr import RedisSink\n'), ((803, 919), 'feathr.MaterializationSettings', 'MaterializationSettings', ([], {'name': '"""kafkaSampleDemo"""', 'sinks': '[redisSink]', 'feature_names': "['f_modified_streaming_count']"}), "(name='kafkaSampleDemo', sinks=[redisSink],\n feature_names=['f_modified_streaming_count'])\n", (826, 919), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((627, 681), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (639, 681), False, 'import os\n'), ((200, 245), 'os.environ.get', 'os.environ.get', (['"""SPARK_CONFIG__SPARK_CLUSTER"""'], {}), "('SPARK_CONFIG__SPARK_CLUSTER')\n", (214, 245), False, 'import os\n'), ((540, 554), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (544, 554), False, 'from pathlib import Path\n')] |
import os
from datetime import datetime, timedelta
from pathlib import Path
from click.testing import CliRunner
from feathr import BOOLEAN, FLOAT, INT32, ValueType
from feathr.client import FeathrClient
from feathr import ValueType
from feathr.job_utils import get_result_df
from feathr import (BackfillTime, MaterializationSettings)
from feathr import FeatureQuery
from feathr import ObservationSettings
from feathr import RedisSink
from feathr import TypedKey
from feathrcli.cli import init
import pytest
from test_fixture import basic_test_setup
# make sure you have run the upload feature script before running these tests
# the feature configs are from feathr_project/data/feathr_user_workspace
def test_feathr_online_store_agg_features():
"""
Test FeathrClient() get_online_features and batch_get can get data correctly.
"""
# use different time for testing to avoid write conflicts
now = datetime.now()
online_test_table = ''.join(['nycTaxiCITable','_', str(now.minute), '_', str(now.second)])
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
# os.chdir(test_workspace_dir)
client = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
backfill_time = BackfillTime(start=datetime(
2020, 5, 20), end=datetime(2020, 5, 20), step=timedelta(days=1))
redisSink = RedisSink(table_name=online_test_table)
settings = MaterializationSettings("nycTaxiTable",
sinks=[redisSink],
feature_names=[
"f_location_avg_fare", "f_location_max_fare"],
backfill_time=backfill_time)
client.materialize_features(settings)
# just assume the job is successful without validating the actual result in Redis. Might need to consolidate
# this part with the test_feathr_online_store test case
client.wait_job_to_finish(timeout_sec=900)
res = client.get_online_features(online_test_table, '265', [
'f_location_avg_fare', 'f_location_max_fare'])
# just assme there are values. We don't hard code the values for now for testing
# the correctness of the feature generation should be garunteed by feathr runtime.
# ID 239 and 265 are available in the `DOLocationID` column in this file:
# https://s3.amazonaws.com/nyc-tlc/trip+data/green_tripdata_2020-04.csv
# View more detials on this dataset: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page
assert len(res) == 2
assert res[0] != None
assert res[1] != None
res = client.multi_get_online_features(online_test_table,
['239', '265'],
['f_location_avg_fare', 'f_location_max_fare'])
assert res['239'][0] != None
assert res['239'][1] != None
assert res['265'][0] != None
assert res['265'][1] != None
@pytest.mark.skip(reason="Add back when complex types are supported in python API")
def test_feathr_online_store_non_agg_features():
"""
Test FeathrClient() online_get_features and batch_get can get data correctly.
"""
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
client = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
now = datetime.now()
online_test_table = ''.join(['nycTaxiCITable','_', str(now.minute), '_', str(now.second)])
backfill_time = BackfillTime(start=datetime(
2020, 5, 20), end=datetime(2020, 5, 20), step=timedelta(days=1))
redisSink = RedisSink(table_name=online_test_table)
settings = MaterializationSettings("nycTaxiTable",
sinks=[redisSink],
feature_names=["f_gen_trip_distance", "f_gen_is_long_trip_distance", "f1", "f2", "f3", "f4", "f5", "f6"],
backfill_time=backfill_time)
client.materialize_features(settings)
# just assume the job is successful without validating the actual result in Redis. Might need to consolidate
# this part with the test_feathr_online_store test case
client.wait_job_to_finish(timeout_sec=900)
res = client.get_online_features(online_test_table, '111', ['f_gen_trip_distance', 'f_gen_is_long_trip_distance',
'f1', 'f2', 'f3', 'f4', 'f5', 'f6'])
# just assme there are values. We don't hard code the values for now for testing
# the correctness of the feature generation should be garunteed by feathr runtime.
# ID 239 and 265 are available in the `DOLocationID` column in this file:
# https://s3.amazonaws.com/nyc-tlc/trip+data/green_tripdata_2020-04.csv
# View more detials on this dataset: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page
assert len(res) == 8
assert res[0] != None
assert res[1] != None
# assert constant features
_validate_constant_feature(res)
res = client.multi_get_online_features(online_test_table,
['239', '265'],
['f_gen_trip_distance', 'f_gen_is_long_trip_distance', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6'])
_validate_constant_feature(res['239'])
assert res['239'][0] != None
assert res['239'][1] != None
_validate_constant_feature(res['265'])
assert res['265'][0] != None
assert res['265'][1] != None
def _validate_constant_feature(feature):
assert feature[2] == [10.0, 20.0, 30.0]
assert feature[3] == ['a', 'b', 'c']
assert feature[4] == ([1, 2, 3], ['10', '20', '30'])
assert feature[5] == ([1, 2, 3], [True, False, True])
assert feature[6] == ([1, 2, 3], [1.0, 2.0, 3.0])
assert feature[7] == ([1, 2, 3], [1, 2, 3])
def test_dbfs_path():
test_workspace_dir = Path(
__file__).parent.resolve() / "test_user_workspace"
client = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config.yaml"))
if client.spark_runtime.casefold() == "databricks":
# expect this raise an error since the result path is not in dbfs: format
with pytest.raises(RuntimeError):
client.feathr_spark_laucher.download_result(result_path="wasb://res_url", local_folder="/tmp")
def test_feathr_get_offline_features():
"""
Test get_offline_features() can get data correctly.
"""
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(init, [])
client = basic_test_setup(
"./feathr_user_workspace/feathr_config.yaml")
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32,
description="location id in NYC",
full_name="nyc_taxi.location_id")
feature_query = FeatureQuery(
feature_list=["f_location_avg_fare"], key=location_id)
settings = ObservationSettings(
observation_path="wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
now = datetime.now()
# set output folder based on different runtime
if client.spark_runtime == 'databricks':
output_path = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), ".avro"])
else:
output_path = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/demo_data/output','_', str(now.minute), '_', str(now.second), ".avro"])
client.get_offline_features(observation_settings=settings,
feature_query=feature_query,
output_path=output_path)
# assuming the job can successfully run; otherwise it will throw exception
client.wait_job_to_finish(timeout_sec=900)
# download result and just assert the returned result is not empty
res_df = get_result_df(client)
assert res_df.shape[0] > 0
| [
"feathr.RedisSink",
"feathr.FeatureQuery",
"feathr.job_utils.get_result_df",
"feathr.ObservationSettings",
"feathr.TypedKey",
"feathr.MaterializationSettings"
] | [((3010, 3097), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Add back when complex types are supported in python API"""'}), "(reason=\n 'Add back when complex types are supported in python API')\n", (3026, 3097), False, 'import pytest\n'), ((918, 932), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (930, 932), False, 'from datetime import datetime, timedelta\n'), ((1380, 1419), 'feathr.RedisSink', 'RedisSink', ([], {'table_name': 'online_test_table'}), '(table_name=online_test_table)\n', (1389, 1419), False, 'from feathr import RedisSink\n'), ((1435, 1589), 'feathr.MaterializationSettings', 'MaterializationSettings', (['"""nycTaxiTable"""'], {'sinks': '[redisSink]', 'feature_names': "['f_location_avg_fare', 'f_location_max_fare']", 'backfill_time': 'backfill_time'}), "('nycTaxiTable', sinks=[redisSink], feature_names=[\n 'f_location_avg_fare', 'f_location_max_fare'], backfill_time=backfill_time)\n", (1458, 1589), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((3426, 3440), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3438, 3440), False, 'from datetime import datetime, timedelta\n'), ((3674, 3713), 'feathr.RedisSink', 'RedisSink', ([], {'table_name': 'online_test_table'}), '(table_name=online_test_table)\n', (3683, 3713), False, 'from feathr import RedisSink\n'), ((3729, 3931), 'feathr.MaterializationSettings', 'MaterializationSettings', (['"""nycTaxiTable"""'], {'sinks': '[redisSink]', 'feature_names': "['f_gen_trip_distance', 'f_gen_is_long_trip_distance', 'f1', 'f2', 'f3',\n 'f4', 'f5', 'f6']", 'backfill_time': 'backfill_time'}), "('nycTaxiTable', sinks=[redisSink], feature_names=[\n 'f_gen_trip_distance', 'f_gen_is_long_trip_distance', 'f1', 'f2', 'f3',\n 'f4', 'f5', 'f6'], backfill_time=backfill_time)\n", (3752, 3931), False, 'from feathr import BackfillTime, MaterializationSettings\n'), ((6533, 6544), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (6542, 6544), False, 'from click.testing import CliRunner\n'), ((1185, 1239), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (1197, 1239), False, 'import os\n'), ((3360, 3414), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (3372, 3414), False, 'import os\n'), ((6063, 6117), 'os.path.join', 'os.path.join', (['test_workspace_dir', '"""feathr_config.yaml"""'], {}), "(test_workspace_dir, 'feathr_config.yaml')\n", (6075, 6117), False, 'import os\n'), ((6633, 6695), 'test_fixture.basic_test_setup', 'basic_test_setup', (['"""./feathr_user_workspace/feathr_config.yaml"""'], {}), "('./feathr_user_workspace/feathr_config.yaml')\n", (6649, 6695), False, 'from test_fixture import basic_test_setup\n'), ((6732, 6872), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32', 'description': '"""location id in NYC"""', 'full_name': '"""nyc_taxi.location_id"""'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32,\n description='location id in NYC', full_name='nyc_taxi.location_id')\n", (6740, 6872), False, 'from feathr import TypedKey\n'), ((6987, 7054), 'feathr.FeatureQuery', 'FeatureQuery', ([], {'feature_list': "['f_location_avg_fare']", 'key': 'location_id'}), "(feature_list=['f_location_avg_fare'], key=location_id)\n", (6999, 7054), False, 'from feathr import FeatureQuery\n'), ((7087, 7283), 'feathr.ObservationSettings', 'ObservationSettings', ([], {'observation_path': '"""wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(observation_path=\n 'wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv',\n event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (7106, 7283), False, 'from feathr import ObservationSettings\n'), ((7322, 7336), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7334, 7336), False, 'from datetime import datetime, timedelta\n'), ((8173, 8194), 'feathr.job_utils.get_result_df', 'get_result_df', (['client'], {}), '(client)\n', (8186, 8194), False, 'from feathr.job_utils import get_result_df\n'), ((1281, 1302), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (1289, 1302), False, 'from datetime import datetime, timedelta\n'), ((1317, 1338), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (1325, 1338), False, 'from datetime import datetime, timedelta\n'), ((1345, 1362), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1354, 1362), False, 'from datetime import datetime, timedelta\n'), ((3575, 3596), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (3583, 3596), False, 'from datetime import datetime, timedelta\n'), ((3611, 3632), 'datetime.datetime', 'datetime', (['(2020)', '(5)', '(20)'], {}), '(2020, 5, 20)\n', (3619, 3632), False, 'from datetime import datetime, timedelta\n'), ((3639, 3656), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3648, 3656), False, 'from datetime import datetime, timedelta\n'), ((6270, 6297), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (6283, 6297), False, 'import pytest\n'), ((1054, 1068), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1058, 1068), False, 'from pathlib import Path\n'), ((3265, 3279), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (3269, 3279), False, 'from pathlib import Path\n'), ((5968, 5982), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (5972, 5982), False, 'from pathlib import Path\n')] |
from feathr import AvroJsonSchema
from feathr import KafKaSource
from feathr import KafkaConfig
from typing import List
import os
import random
from datetime import datetime, timedelta
from feathr import (BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING,
DerivedFeature, Feature, FeatureAnchor, HdfsSource,
TypedKey, ValueType, WindowAggTransformation)
from feathr.client import FeathrClient
from pyspark.sql import DataFrame
def basic_test_setup(config_path: str):
now = datetime.now()
# set workspace folder by time; make sure we don't have write conflict if there are many CI tests running
os.environ['SPARK_CONFIG__DATABRICKS__WORK_DIR'] = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), '_', str(now.microsecond)])
os.environ['SPARK_CONFIG__AZURE_SYNAPSE__WORKSPACE_DIR'] = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/feathr_github_ci','_', str(now.minute), '_', str(now.second) ,'_', str(now.microsecond)])
client = FeathrClient(config_path=config_path)
batch_source = HdfsSource(name="nycTaxiBatchSource",
path="wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss")
f_trip_distance = Feature(name="f_trip_distance",
feature_type=FLOAT, transform="trip_distance")
f_trip_time_duration = Feature(name="f_trip_time_duration",
feature_type=INT32,
transform="(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60")
features = [
f_trip_distance,
f_trip_time_duration,
Feature(name="f_is_long_trip_distance",
feature_type=BOOLEAN,
transform="cast_float(trip_distance)>30"),
Feature(name="f_day_of_week",
feature_type=INT32,
transform="dayofweek(lpep_dropoff_datetime)"),
]
request_anchor = FeatureAnchor(name="request_features",
source=INPUT_CONTEXT,
features=features)
f_trip_time_distance = DerivedFeature(name="f_trip_time_distance",
feature_type=FLOAT,
input_features=[
f_trip_distance, f_trip_time_duration],
transform="f_trip_distance * f_trip_time_duration")
f_trip_time_rounded = DerivedFeature(name="f_trip_time_rounded",
feature_type=INT32,
input_features=[f_trip_time_duration],
transform="f_trip_time_duration % 10")
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32,
description="location id in NYC",
full_name="nyc_taxi.location_id")
agg_features = [Feature(name="f_location_avg_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="AVG",
window="90d")),
Feature(name="f_location_max_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="MAX",
window="90d"))
]
agg_anchor = FeatureAnchor(name="aggregationFeatures",
source=batch_source,
features=agg_features)
client.build_features(anchor_list=[agg_anchor, request_anchor], derived_feature_list=[
f_trip_time_distance, f_trip_time_rounded])
return client
def snowflake_test_setup(config_path: str):
now = datetime.now()
# set workspace folder by time; make sure we don't have write conflict if there are many CI tests running
os.environ['SPARK_CONFIG__DATABRICKS__WORK_DIR'] = ''.join(['dbfs:/feathrazure_cijob_snowflake','_', str(now.minute), '_', str(now.second), '_', str(now.microsecond)])
os.environ['SPARK_CONFIG__AZURE_SYNAPSE__WORKSPACE_DIR'] = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/feathr_github_ci_snowflake','_', str(now.minute), '_', str(now.second), '_', str(now.microsecond)])
client = FeathrClient(config_path=config_path)
batch_source = HdfsSource(name="snowflakeSampleBatchSource",
path="jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL",
)
call_sk_id = TypedKey(key_column="CC_CALL_CENTER_SK",
key_column_type=ValueType.INT32,
description="call center sk",
full_name="snowflake.CC_CALL_CENTER_SK")
f_snowflake_call_center_division_name = Feature(name="f_snowflake_call_center_division_name",feature_type=STRING, transform="CC_DIVISION_NAME", key=call_sk_id)
f_snowflake_call_center_zipcode = Feature(name="f_snowflake_call_center_zipcode",feature_type=STRING, transform="CC_ZIP", key=call_sk_id)
features = [f_snowflake_call_center_division_name, f_snowflake_call_center_zipcode ]
snowflakeFeatures = FeatureAnchor(name="snowflakeFeatures",
source=batch_source,
features=features)
client.build_features(anchor_list=[snowflakeFeatures])
return client
def kafka_test_setup(config_path: str):
client = FeathrClient(config_path=config_path)
schema = AvroJsonSchema(schemaStr="""
{
"type": "record",
"name": "DriverTrips",
"fields": [
{"name": "driver_id", "type": "long"},
{"name": "trips_today", "type": "int"},
{
"name": "datetime",
"type": {"type": "long", "logicalType": "timestamp-micros"}
}
]
}
""")
stream_source = KafKaSource(name="kafkaStreamingSource",
kafkaConfig=KafkaConfig(brokers=["feathrazureci.servicebus.windows.net:9093"],
topics=["feathrcieventhub"],
schema=schema)
)
driver_id = TypedKey(key_column="driver_id",
key_column_type=ValueType.INT64,
description="driver id",
full_name="nyc driver id")
kafkaAnchor = FeatureAnchor(name="kafkaAnchor",
source=stream_source,
features=[Feature(name="f_modified_streaming_count",
feature_type=INT32,
transform="trips_today + 1",
key=driver_id),
Feature(name="f_modified_streaming_count2",
feature_type=INT32,
transform="trips_today + 2",
key=driver_id)]
)
client.build_features(anchor_list=[kafkaAnchor])
return client
def registry_test_setup(config_path: str):
# use a new project name every time to make sure all features are registered correctly
now = datetime.now()
os.environ["project_config__project_name"] = ''.join(['feathr_ci_registry','_', str(now.minute), '_', str(now.second), '_', str(now.microsecond)])
client = FeathrClient(config_path=config_path, project_registry_tag={"for_test_purpose":"true"})
def add_new_dropoff_and_fare_amount_column(df: DataFrame):
df = df.withColumn("new_lpep_dropoff_datetime", col("lpep_dropoff_datetime"))
df = df.withColumn("new_fare_amount", col("fare_amount") + 1000000)
return df
batch_source = HdfsSource(name="nycTaxiBatchSource",
path="wasbs://public<EMAIL>athr<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04.csv",
event_timestamp_column="lpep_dropoff_datetime",
timestamp_format="yyyy-MM-dd HH:mm:ss",
preprocessing=add_new_dropoff_and_fare_amount_column,
registry_tags={"for_test_purpose":"true"}
)
f_trip_distance = Feature(name="f_trip_distance",
feature_type=FLOAT, transform="trip_distance",
registry_tags={"for_test_purpose":"true"}
)
f_trip_time_duration = Feature(name="f_trip_time_duration",
feature_type=INT32,
transform="(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60")
features = [
f_trip_distance,
f_trip_time_duration,
Feature(name="f_is_long_trip_distance",
feature_type=BOOLEAN,
transform="cast_float(trip_distance)>30"),
Feature(name="f_day_of_week",
feature_type=INT32,
transform="dayofweek(lpep_dropoff_datetime)"),
]
request_anchor = FeatureAnchor(name="request_features",
source=INPUT_CONTEXT,
features=features,
registry_tags={"for_test_purpose":"true"}
)
f_trip_time_distance = DerivedFeature(name="f_trip_time_distance",
feature_type=FLOAT,
input_features=[
f_trip_distance, f_trip_time_duration],
transform="f_trip_distance * f_trip_time_duration")
f_trip_time_rounded = DerivedFeature(name="f_trip_time_rounded",
feature_type=INT32,
input_features=[f_trip_time_duration],
transform="f_trip_time_duration % 10")
f_trip_time_rounded_plus = DerivedFeature(name="f_trip_time_rounded_plus",
feature_type=INT32,
input_features=[f_trip_time_rounded],
transform="f_trip_time_rounded + 100")
location_id = TypedKey(key_column="DOLocationID",
key_column_type=ValueType.INT32,
description="location id in NYC",
full_name="nyc_taxi.location_id")
agg_features = [Feature(name="f_location_avg_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="AVG",
window="90d")),
Feature(name="f_location_max_fare",
key=location_id,
feature_type=FLOAT,
transform=WindowAggTransformation(agg_expr="cast_float(fare_amount)",
agg_func="MAX",
window="90d"))
]
agg_anchor = FeatureAnchor(name="aggregationFeatures",
source=batch_source,
features=agg_features)
derived_feature_list = [
f_trip_time_distance, f_trip_time_rounded, f_trip_time_rounded_plus]
# shuffule the order to make sure they can be parsed correctly
# Those input derived features can be in arbitrary order, but in order to parse the right dependencies, we need to reorder them internally in a certain order.
# This shuffle is to make sure that each time we have random shuffle for the input and make sure the internal sorting algorithm works (we are using topological sort).
random.shuffle(derived_feature_list)
client.build_features(anchor_list=[agg_anchor, request_anchor], derived_feature_list=derived_feature_list)
return client
def get_online_test_table_name(table_name: str):
# use different time for testing to avoid write conflicts
now = datetime.now()
return '_'.join([table_name, str(now.minute), str(now.second)]) | [
"feathr.AvroJsonSchema",
"feathr.Feature",
"feathr.HdfsSource",
"feathr.KafkaConfig",
"feathr.client.FeathrClient",
"feathr.DerivedFeature",
"feathr.FeatureAnchor",
"feathr.TypedKey",
"feathr.WindowAggTransformation"
] | [((516, 530), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (528, 530), False, 'from datetime import datetime, timedelta\n'), ((1058, 1095), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path'}), '(config_path=config_path)\n', (1070, 1095), False, 'from feathr.client import FeathrClient\n'), ((1115, 1317), 'feathr.HdfsSource', 'HdfsSource', ([], {'name': '"""nycTaxiBatchSource"""', 'path': '"""wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""'}), "(name='nycTaxiBatchSource', path=\n 'wasbs://<EMAIL>/sample_data/green_tripdata_2020-04.csv',\n event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss')\n", (1125, 1317), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((1417, 1495), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_distance"""', 'feature_type': 'FLOAT', 'transform': '"""trip_distance"""'}), "(name='f_trip_distance', feature_type=FLOAT, transform='trip_distance')\n", (1424, 1495), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((1553, 1720), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_time_duration"""', 'feature_type': 'INT32', 'transform': '"""(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60"""'}), "(name='f_trip_time_duration', feature_type=INT32, transform=\n '(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60'\n )\n", (1560, 1720), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((2165, 2244), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""request_features"""', 'source': 'INPUT_CONTEXT', 'features': 'features'}), "(name='request_features', source=INPUT_CONTEXT, features=features)\n", (2178, 2244), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((2343, 2523), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_distance"""', 'feature_type': 'FLOAT', 'input_features': '[f_trip_distance, f_trip_time_duration]', 'transform': '"""f_trip_distance * f_trip_time_duration"""'}), "(name='f_trip_time_distance', feature_type=FLOAT,\n input_features=[f_trip_distance, f_trip_time_duration], transform=\n 'f_trip_distance * f_trip_time_duration')\n", (2357, 2523), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((2715, 2864), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_rounded"""', 'feature_type': 'INT32', 'input_features': '[f_trip_time_duration]', 'transform': '"""f_trip_time_duration % 10"""'}), "(name='f_trip_time_rounded', feature_type=INT32,\n input_features=[f_trip_time_duration], transform=\n 'f_trip_time_duration % 10')\n", (2729, 2864), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((2998, 3138), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32', 'description': '"""location id in NYC"""', 'full_name': '"""nyc_taxi.location_id"""'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32,\n description='location id in NYC', full_name='nyc_taxi.location_id')\n", (3006, 3138), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((4061, 4151), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""aggregationFeatures"""', 'source': 'batch_source', 'features': 'agg_features'}), "(name='aggregationFeatures', source=batch_source, features=\n agg_features)\n", (4074, 4151), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((4428, 4442), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4440, 4442), False, 'from datetime import datetime, timedelta\n'), ((4986, 5023), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path'}), '(config_path=config_path)\n', (4998, 5023), False, 'from feathr.client import FeathrClient\n'), ((5043, 5285), 'feathr.HdfsSource', 'HdfsSource', ([], {'name': '"""snowflakeSampleBatchSource"""', 'path': '"""jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL"""'}), "(name='snowflakeSampleBatchSource', path=\n 'jdbc:snowflake://dqllago-ol19457.snowflakecomputing.com/?user=feathrintegration&sfWarehouse=COMPUTE_WH&dbtable=CALL_CENTER&sfDatabase=SNOWFLAKE_SAMPLE_DATA&sfSchema=TPCDS_SF10TCL'\n )\n", (5053, 5285), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((5355, 5503), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""CC_CALL_CENTER_SK"""', 'key_column_type': 'ValueType.INT32', 'description': '"""call center sk"""', 'full_name': '"""snowflake.CC_CALL_CENTER_SK"""'}), "(key_column='CC_CALL_CENTER_SK', key_column_type=ValueType.INT32,\n description='call center sk', full_name='snowflake.CC_CALL_CENTER_SK')\n", (5363, 5503), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((5623, 5747), 'feathr.Feature', 'Feature', ([], {'name': '"""f_snowflake_call_center_division_name"""', 'feature_type': 'STRING', 'transform': '"""CC_DIVISION_NAME"""', 'key': 'call_sk_id'}), "(name='f_snowflake_call_center_division_name', feature_type=STRING,\n transform='CC_DIVISION_NAME', key=call_sk_id)\n", (5630, 5747), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((5782, 5890), 'feathr.Feature', 'Feature', ([], {'name': '"""f_snowflake_call_center_zipcode"""', 'feature_type': 'STRING', 'transform': '"""CC_ZIP"""', 'key': 'call_sk_id'}), "(name='f_snowflake_call_center_zipcode', feature_type=STRING,\n transform='CC_ZIP', key=call_sk_id)\n", (5789, 5890), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((6004, 6083), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""snowflakeFeatures"""', 'source': 'batch_source', 'features': 'features'}), "(name='snowflakeFeatures', source=batch_source, features=features)\n", (6017, 6083), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((6285, 6322), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path'}), '(config_path=config_path)\n', (6297, 6322), False, 'from feathr.client import FeathrClient\n'), ((6336, 6725), 'feathr.AvroJsonSchema', 'AvroJsonSchema', ([], {'schemaStr': '"""\n {\n "type": "record",\n "name": "DriverTrips",\n "fields": [\n {"name": "driver_id", "type": "long"},\n {"name": "trips_today", "type": "int"},\n {\n "name": "datetime",\n "type": {"type": "long", "logicalType": "timestamp-micros"}\n }\n ]\n }\n """'}), '(schemaStr=\n """\n {\n "type": "record",\n "name": "DriverTrips",\n "fields": [\n {"name": "driver_id", "type": "long"},\n {"name": "trips_today", "type": "int"},\n {\n "name": "datetime",\n "type": {"type": "long", "logicalType": "timestamp-micros"}\n }\n ]\n }\n """\n )\n', (6350, 6725), False, 'from feathr import AvroJsonSchema\n'), ((7087, 7208), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""driver_id"""', 'key_column_type': 'ValueType.INT64', 'description': '"""driver id"""', 'full_name': '"""nyc driver id"""'}), "(key_column='driver_id', key_column_type=ValueType.INT64,\n description='driver id', full_name='nyc driver id')\n", (7095, 7208), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((8303, 8317), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8315, 8317), False, 'from datetime import datetime, timedelta\n'), ((8485, 8578), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': 'config_path', 'project_registry_tag': "{'for_test_purpose': 'true'}"}), "(config_path=config_path, project_registry_tag={\n 'for_test_purpose': 'true'})\n", (8497, 8578), False, 'from feathr.client import FeathrClient\n'), ((8837, 9187), 'feathr.HdfsSource', 'HdfsSource', ([], {'name': '"""nycTaxiBatchSource"""', 'path': '"""wasbs://public<EMAIL>athr<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04.csv"""', 'event_timestamp_column': '"""lpep_dropoff_datetime"""', 'timestamp_format': '"""yyyy-MM-dd HH:mm:ss"""', 'preprocessing': 'add_new_dropoff_and_fare_amount_column', 'registry_tags': "{'for_test_purpose': 'true'}"}), "(name='nycTaxiBatchSource', path=\n 'wasbs://public<EMAIL>athr<EMAIL>.blob.core.windows.net/sample_data/green_tripdata_2020-04.csv'\n , event_timestamp_column='lpep_dropoff_datetime', timestamp_format=\n 'yyyy-MM-dd HH:mm:ss', preprocessing=\n add_new_dropoff_and_fare_amount_column, registry_tags={\n 'for_test_purpose': 'true'})\n", (8847, 9187), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((9366, 9493), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_distance"""', 'feature_type': 'FLOAT', 'transform': '"""trip_distance"""', 'registry_tags': "{'for_test_purpose': 'true'}"}), "(name='f_trip_distance', feature_type=FLOAT, transform=\n 'trip_distance', registry_tags={'for_test_purpose': 'true'})\n", (9373, 9493), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((9606, 9773), 'feathr.Feature', 'Feature', ([], {'name': '"""f_trip_time_duration"""', 'feature_type': 'INT32', 'transform': '"""(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60"""'}), "(name='f_trip_time_duration', feature_type=INT32, transform=\n '(to_unix_timestamp(lpep_dropoff_datetime) - to_unix_timestamp(lpep_pickup_datetime))/60'\n )\n", (9613, 9773), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((10211, 10339), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""request_features"""', 'source': 'INPUT_CONTEXT', 'features': 'features', 'registry_tags': "{'for_test_purpose': 'true'}"}), "(name='request_features', source=INPUT_CONTEXT, features=\n features, registry_tags={'for_test_purpose': 'true'})\n", (10224, 10339), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((10503, 10683), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_distance"""', 'feature_type': 'FLOAT', 'input_features': '[f_trip_distance, f_trip_time_duration]', 'transform': '"""f_trip_distance * f_trip_time_duration"""'}), "(name='f_trip_time_distance', feature_type=FLOAT,\n input_features=[f_trip_distance, f_trip_time_duration], transform=\n 'f_trip_distance * f_trip_time_duration')\n", (10517, 10683), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((10875, 11024), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_rounded"""', 'feature_type': 'INT32', 'input_features': '[f_trip_time_duration]', 'transform': '"""f_trip_time_duration % 10"""'}), "(name='f_trip_time_rounded', feature_type=INT32,\n input_features=[f_trip_time_duration], transform=\n 'f_trip_time_duration % 10')\n", (10889, 11024), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((11170, 11323), 'feathr.DerivedFeature', 'DerivedFeature', ([], {'name': '"""f_trip_time_rounded_plus"""', 'feature_type': 'INT32', 'input_features': '[f_trip_time_rounded]', 'transform': '"""f_trip_time_rounded + 100"""'}), "(name='f_trip_time_rounded_plus', feature_type=INT32,\n input_features=[f_trip_time_rounded], transform='f_trip_time_rounded + 100'\n )\n", (11184, 11323), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((11457, 11597), 'feathr.TypedKey', 'TypedKey', ([], {'key_column': '"""DOLocationID"""', 'key_column_type': 'ValueType.INT32', 'description': '"""location id in NYC"""', 'full_name': '"""nyc_taxi.location_id"""'}), "(key_column='DOLocationID', key_column_type=ValueType.INT32,\n description='location id in NYC', full_name='nyc_taxi.location_id')\n", (11465, 11597), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((12520, 12610), 'feathr.FeatureAnchor', 'FeatureAnchor', ([], {'name': '"""aggregationFeatures"""', 'source': 'batch_source', 'features': 'agg_features'}), "(name='aggregationFeatures', source=batch_source, features=\n agg_features)\n", (12533, 12610), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((13206, 13242), 'random.shuffle', 'random.shuffle', (['derived_feature_list'], {}), '(derived_feature_list)\n', (13220, 13242), False, 'import random\n'), ((13494, 13508), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (13506, 13508), False, 'from datetime import datetime, timedelta\n'), ((1862, 1970), 'feathr.Feature', 'Feature', ([], {'name': '"""f_is_long_trip_distance"""', 'feature_type': 'BOOLEAN', 'transform': '"""cast_float(trip_distance)>30"""'}), "(name='f_is_long_trip_distance', feature_type=BOOLEAN, transform=\n 'cast_float(trip_distance)>30')\n", (1869, 1970), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((2007, 2107), 'feathr.Feature', 'Feature', ([], {'name': '"""f_day_of_week"""', 'feature_type': 'INT32', 'transform': '"""dayofweek(lpep_dropoff_datetime)"""'}), "(name='f_day_of_week', feature_type=INT32, transform=\n 'dayofweek(lpep_dropoff_datetime)')\n", (2014, 2107), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((9908, 10016), 'feathr.Feature', 'Feature', ([], {'name': '"""f_is_long_trip_distance"""', 'feature_type': 'BOOLEAN', 'transform': '"""cast_float(trip_distance)>30"""'}), "(name='f_is_long_trip_distance', feature_type=BOOLEAN, transform=\n 'cast_float(trip_distance)>30')\n", (9915, 10016), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((10053, 10153), 'feathr.Feature', 'Feature', ([], {'name': '"""f_day_of_week"""', 'feature_type': 'INT32', 'transform': '"""dayofweek(lpep_dropoff_datetime)"""'}), "(name='f_day_of_week', feature_type=INT32, transform=\n 'dayofweek(lpep_dropoff_datetime)')\n", (10060, 10153), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((6819, 6934), 'feathr.KafkaConfig', 'KafkaConfig', ([], {'brokers': "['feathrazureci.servicebus.windows.net:9093']", 'topics': "['feathrcieventhub']", 'schema': 'schema'}), "(brokers=['feathrazureci.servicebus.windows.net:9093'], topics=[\n 'feathrcieventhub'], schema=schema)\n", (6830, 6934), False, 'from feathr import KafkaConfig\n'), ((3403, 3496), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""AVG"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='AVG',\n window='90d')\n", (3426, 3496), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((3806, 3899), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""MAX"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='MAX',\n window='90d')\n", (3829, 3899), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((7444, 7555), 'feathr.Feature', 'Feature', ([], {'name': '"""f_modified_streaming_count"""', 'feature_type': 'INT32', 'transform': '"""trips_today + 1"""', 'key': 'driver_id'}), "(name='f_modified_streaming_count', feature_type=INT32, transform=\n 'trips_today + 1', key=driver_id)\n", (7451, 7555), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((7768, 7880), 'feathr.Feature', 'Feature', ([], {'name': '"""f_modified_streaming_count2"""', 'feature_type': 'INT32', 'transform': '"""trips_today + 2"""', 'key': 'driver_id'}), "(name='f_modified_streaming_count2', feature_type=INT32, transform=\n 'trips_today + 2', key=driver_id)\n", (7775, 7880), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((11862, 11955), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""AVG"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='AVG',\n window='90d')\n", (11885, 11955), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n'), ((12265, 12358), 'feathr.WindowAggTransformation', 'WindowAggTransformation', ([], {'agg_expr': '"""cast_float(fare_amount)"""', 'agg_func': '"""MAX"""', 'window': '"""90d"""'}), "(agg_expr='cast_float(fare_amount)', agg_func='MAX',\n window='90d')\n", (12288, 12358), False, 'from feathr import BOOLEAN, FLOAT, INPUT_CONTEXT, INT32, STRING, DerivedFeature, Feature, FeatureAnchor, HdfsSource, TypedKey, ValueType, WindowAggTransformation\n')] |
from feathrcli.cli import init
from click.testing import CliRunner
import os
from feathr.client import FeathrClient
def test_configuration_loading():
"""
Test CLI init() is working properly.
"""
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(init, [])
assert result.exit_code == 0
assert os.path.isdir("./feathr_user_workspace")
client = FeathrClient(config_path="./feathr_user_workspace/feathr_config.yaml")
# test the loading is correct even if we are not in that folder
assert client._FEATHR_JOB_JAR_PATH is not None
TEST_LOCATION = '/test_location'
# since we test synapse and databricks runtime using the same set of configs, we need to make sure we set both variable so that pytest can pass for both runners
# Currently we use same runners for both the synapse and databricks runtime (so we don’t have to maintain two set of spark tests).
# And the goal of this test is to make sure that the envs can override the yaml configs
# In that case, we need to set both SPARK_CONFIG__AZURE_SYNAPSE__FEATHR_RUNTIME_LOCATION and SPARK_CONFIG__DATABRICKS__FEATHR_RUNTIME_LOCATION, and read the client._FEATHR_JOB_JAR_PATH variable in the code
# client._FEATHR_JOB_JAR_PATH will read either SPARK_CONFIG__AZURE_SYNAPSE__FEATHR_RUNTIME_LOCATION or SPARK_CONFIG__DATABRICKS__FEATHR_RUNTIME_LOCATION depending on the corresponding config
os.environ['SPARK_CONFIG__AZURE_SYNAPSE__FEATHR_RUNTIME_LOCATION'] = TEST_LOCATION
os.environ['SPARK_CONFIG__DATABRICKS__FEATHR_RUNTIME_LOCATION'] = TEST_LOCATION
# this should not be error out as we will just give users prompt, though the config is not really here
client = FeathrClient(config_path="./feathr_user_workspace/feathr_config.yaml")
assert client._FEATHR_JOB_JAR_PATH == TEST_LOCATION | [
"feathr.client.FeathrClient"
] | [((222, 233), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (231, 233), False, 'from click.testing import CliRunner\n'), ((368, 408), 'os.path.isdir', 'os.path.isdir', (['"""./feathr_user_workspace"""'], {}), "('./feathr_user_workspace')\n", (381, 408), False, 'import os\n'), ((427, 497), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': '"""./feathr_user_workspace/feathr_config.yaml"""'}), "(config_path='./feathr_user_workspace/feathr_config.yaml')\n", (439, 497), False, 'from feathr.client import FeathrClient\n'), ((1794, 1864), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': '"""./feathr_user_workspace/feathr_config.yaml"""'}), "(config_path='./feathr_user_workspace/feathr_config.yaml')\n", (1806, 1864), False, 'from feathr.client import FeathrClient\n')] |
import click
from py4j.java_gateway import JavaGateway
import pathlib
import os
from pathlib import Path
import distutils.dir_util
import subprocess
import urllib.request
from feathr.client import FeathrClient
@click.group()
@click.pass_context
def cli(ctx: click.Context):
"""
Feathr CLI tool. Visit https://github.com/linkedin/feathr for more details.
"""
pass
def check_user_at_root():
"""
Checks if the user is running the CLI from the root of the user workspace.
We require the user to be running the CLI from the root of the user workspace so that path-related functionalities
can work correctly.
"""
# use this file as a anchor point to identify the root of the repo
anchor_file = 'feathr_config.yaml'
user_workspace_dir = Path(".")
anchor_file_path = user_workspace_dir / anchor_file
if not anchor_file_path.exists():
raise click.UsageError('You are NOT at the root of your user workspace("/feathr_user_workspace"). Please '
'execute the command under your user workspace root.')
@cli.command()
@click.option('--name', default="feathr_user_workspace", help='Specify the workspace name.')
@click.option('--git/--no-git', default=False, help='When enabled, a git-based workspace will be created.')
def init(name, git):
"""
Initializes a Feathr project to create and manage features. A team should share a same Feathr project usually via
git. By default, git is NOT used to manage the workspace.
"""
# Check if workspace already exists
workspace_dir = Path(os.path.join(Path.cwd(), name))
workspace_exist = os.path.isdir(workspace_dir)
if workspace_exist:
# workspace already exist. Just exit.
raise click.UsageError(f'Feathr workspace ({name}) already exist. Please use a new folder name.')
output_str = f'Creating workspace {name} with sample config files and mock data ...'
click.echo(output_str)
default_workspace = str(Path(Path(__file__).parent / 'data' / 'feathr_user_workspace').absolute())
# current feathr_user_workspace directory w.r.t. where the init command is executed
pathlib.Path(name).mkdir(parents=True, exist_ok=True)
# create some default folders and example Feathr configs
distutils.dir_util.copy_tree(default_workspace, str(workspace_dir))
# Create a git repo for the workspace
if git:
os.chdir(workspace_dir)
process = subprocess.Popen(['git', 'init'], stdout=subprocess.PIPE)
output = process.communicate()[0]
click.echo(output)
click.echo(click.style('Git init completed for your workspace. Please read the '
'wiki to learn how to manage '
'your workspace with git.', fg='green'))
click.echo(click.style('Feathr initialization completed.', fg='green'))
@cli.command()
@click.argument('filepath', default='feature_join_conf/feature_join.conf', type=click.Path(exists=True))
def join(filepath):
"""
Creates the offline training dataset with the requested features.
"""
check_user_at_root()
click.echo(click.style('Batch joining features with config: ' + filepath, fg='green'))
with open(filepath) as f:
lines = []
for line in f:
click.echo(line, nl=False)
lines.append(line)
click.echo()
click.echo()
client = FeathrClient()
client._get_offline_features_with_config(filepath)
click.echo(click.style('Feathr feature join job submitted. Visit '
'https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job '
'result.', fg='green'))
@cli.command()
@click.argument('filepath', default='feature_gen_conf/feature_gen.conf', type=click.Path(exists=True))
def deploy(filepath):
"""
Deploys the features to online store based on the feature generation config.
"""
check_user_at_root()
click.echo(click.style('Deploying feature generation config: ' + filepath, fg='green'))
with open(filepath) as f:
lines = []
for line in f:
click.echo(line, nl=False)
lines.append(line)
click.echo()
click.echo()
client = FeathrClient()
client._materialize_features_with_config(filepath)
click.echo()
click.echo(click.style('Feathr feature deployment submitted. Visit '
'https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job '
'result.', fg='green'))
@cli.command()
@click.option('--git/--no-git', default=False, help='If git-enabled, the new changes will be added and committed.')
@click.option('--msg', help='The feature name.')
def register(git, msg):
"""
Register your feature metadata to your metadata registry.
"""
check_user_at_root()
# The register command is not integrated with Azure Atlas yet.
click.echo(click.style('Registering your metadata to metadata service...', fg='green'))
if git:
click.echo(click.style('Git: adding all files.', fg='green'))
click.echo(msg)
process = subprocess.Popen(['git', 'add', '-A'], stdout=subprocess.PIPE)
output = process.communicate()[0]
click.echo(output)
click.echo(click.style('Git: committing.', fg='green'))
process2 = subprocess.Popen(['git', 'commit', '-m', msg], stdout=subprocess.PIPE)
output2 = process2.communicate()[0]
click.echo(output2)
client = FeathrClient()
client.register_features()
click.echo(click.style('Feathr registration completed successfully!', fg='green'))
@cli.command()
def start():
"""
Starts a local Feathr engine for local experimentation and testing.
Feathr local test requires feathr local engine, a java jar, to stay running locally. You can download the jar
yourself from feathr website or use this command to download. The jar should be placed under the root of the
feathr_user_workspace. After the jar is downloaded, the command will run this jar. The jar needs to be running(
don't close the terminal) while you want to use 'feathr test'.
"""
def run_jar():
cmd = ['java', '-jar', jar_name]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p:
# Need to continuously pump the results from jar to terminal
for line in p.stdout:
print(line, end='')
check_user_at_root()
# The jar should be placed under the root of the user workspace
jar_name = 'feathr_local_engine.jar'
# Download the jar if it doesn't exist
if not os.path.isfile(jar_name):
url = 'https://azurefeathrstorage.blob.core.windows.net/public/' + jar_name
file_name = url.split('/')[-1]
u = urllib.request.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.get('Content-Length'))
click.echo(click.style('There is no local feathr engine(jar) in the workspace. Will download the feathr jar.',
fg='green'))
click.echo('Downloading feathr jar for local testing: %s Bytes: %s from %s' % (file_name, file_size, url))
file_size_dl = 0
block_sz = 8192
with click.progressbar(length=file_size,
label='Download feathr local engine jar') as bar:
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
bar.update(block_sz)
f.close()
click.echo(click.style(f'Starting the local feathr engine: {jar_name}.'))
click.echo(click.style(f'Please keep this open and start another terminal to run feathr test. This terminal shows '
f'the debug message.', fg='green'))
run_jar()
@cli.command()
@click.option('--features', prompt='Your feature names, separated by comma', help='The feature name.')
def test(features):
"""
Tests a single feature definition locally via local spark mode with mock data. Mock data has to be provided by the
users. Please execute "feathr start" before "feathr test" to setup the local engine.
"""
check_user_at_root()
click.echo('\nProducing feature values for requested features ... ')
gateway = JavaGateway()
# User should run this command at user workspace dir root
user_workspace_dir = os.path.abspath(".")
# for py4j, it's always named as entry_point
stack_entry_point_result = gateway.entry_point.getResult(user_workspace_dir, features)
click.echo('\nFeature computation completed.')
click.echo(stack_entry_point_result)
| [
"feathr.client.FeathrClient"
] | [((213, 226), 'click.group', 'click.group', ([], {}), '()\n', (224, 226), False, 'import click\n'), ((1106, 1202), 'click.option', 'click.option', (['"""--name"""'], {'default': '"""feathr_user_workspace"""', 'help': '"""Specify the workspace name."""'}), "('--name', default='feathr_user_workspace', help=\n 'Specify the workspace name.')\n", (1118, 1202), False, 'import click\n'), ((1199, 1310), 'click.option', 'click.option', (['"""--git/--no-git"""'], {'default': '(False)', 'help': '"""When enabled, a git-based workspace will be created."""'}), "('--git/--no-git', default=False, help=\n 'When enabled, a git-based workspace will be created.')\n", (1211, 1310), False, 'import click\n'), ((4608, 4727), 'click.option', 'click.option', (['"""--git/--no-git"""'], {'default': '(False)', 'help': '"""If git-enabled, the new changes will be added and committed."""'}), "('--git/--no-git', default=False, help=\n 'If git-enabled, the new changes will be added and committed.')\n", (4620, 4727), False, 'import click\n'), ((4724, 4771), 'click.option', 'click.option', (['"""--msg"""'], {'help': '"""The feature name."""'}), "('--msg', help='The feature name.')\n", (4736, 4771), False, 'import click\n'), ((8022, 8127), 'click.option', 'click.option', (['"""--features"""'], {'prompt': '"""Your feature names, separated by comma"""', 'help': '"""The feature name."""'}), "('--features', prompt='Your feature names, separated by comma',\n help='The feature name.')\n", (8034, 8127), False, 'import click\n'), ((783, 792), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (787, 792), False, 'from pathlib import Path\n'), ((1642, 1670), 'os.path.isdir', 'os.path.isdir', (['workspace_dir'], {}), '(workspace_dir)\n', (1655, 1670), False, 'import os\n'), ((1941, 1963), 'click.echo', 'click.echo', (['output_str'], {}), '(output_str)\n', (1951, 1963), False, 'import click\n'), ((3371, 3383), 'click.echo', 'click.echo', ([], {}), '()\n', (3381, 3383), False, 'import click\n'), ((3388, 3400), 'click.echo', 'click.echo', ([], {}), '()\n', (3398, 3400), False, 'import click\n'), ((3415, 3429), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (3427, 3429), False, 'from feathr.client import FeathrClient\n'), ((4222, 4234), 'click.echo', 'click.echo', ([], {}), '()\n', (4232, 4234), False, 'import click\n'), ((4239, 4251), 'click.echo', 'click.echo', ([], {}), '()\n', (4249, 4251), False, 'import click\n'), ((4266, 4280), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (4278, 4280), False, 'from feathr.client import FeathrClient\n'), ((4340, 4352), 'click.echo', 'click.echo', ([], {}), '()\n', (4350, 4352), False, 'import click\n'), ((5557, 5571), 'feathr.client.FeathrClient', 'FeathrClient', ([], {}), '()\n', (5569, 5571), False, 'from feathr.client import FeathrClient\n'), ((8397, 8468), 'click.echo', 'click.echo', (['"""\nProducing feature values for requested features ... """'], {}), '("""\nProducing feature values for requested features ... """)\n', (8407, 8468), False, 'import click\n'), ((8481, 8494), 'py4j.java_gateway.JavaGateway', 'JavaGateway', ([], {}), '()\n', (8492, 8494), False, 'from py4j.java_gateway import JavaGateway\n'), ((8582, 8602), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (8597, 8602), False, 'import os\n'), ((8749, 8798), 'click.echo', 'click.echo', (['"""\nFeature computation completed."""'], {}), '("""\nFeature computation completed.""")\n', (8759, 8798), False, 'import click\n'), ((8800, 8836), 'click.echo', 'click.echo', (['stack_entry_point_result'], {}), '(stack_entry_point_result)\n', (8810, 8836), False, 'import click\n'), ((901, 1063), 'click.UsageError', 'click.UsageError', (['"""You are NOT at the root of your user workspace("/feathr_user_workspace"). Please execute the command under your user workspace root."""'], {}), '(\n \'You are NOT at the root of your user workspace("/feathr_user_workspace"). Please execute the command under your user workspace root.\'\n )\n', (917, 1063), False, 'import click\n'), ((1755, 1851), 'click.UsageError', 'click.UsageError', (['f"""Feathr workspace ({name}) already exist. Please use a new folder name."""'], {}), "(\n f'Feathr workspace ({name}) already exist. Please use a new folder name.')\n", (1771, 1851), False, 'import click\n'), ((2411, 2434), 'os.chdir', 'os.chdir', (['workspace_dir'], {}), '(workspace_dir)\n', (2419, 2434), False, 'import os\n'), ((2453, 2510), 'subprocess.Popen', 'subprocess.Popen', (["['git', 'init']"], {'stdout': 'subprocess.PIPE'}), "(['git', 'init'], stdout=subprocess.PIPE)\n", (2469, 2510), False, 'import subprocess\n'), ((2561, 2579), 'click.echo', 'click.echo', (['output'], {}), '(output)\n', (2571, 2579), False, 'import click\n'), ((2818, 2877), 'click.style', 'click.style', (['"""Feathr initialization completed."""'], {'fg': '"""green"""'}), "('Feathr initialization completed.', fg='green')\n", (2829, 2877), False, 'import click\n'), ((3149, 3223), 'click.style', 'click.style', (["('Batch joining features with config: ' + filepath)"], {'fg': '"""green"""'}), "('Batch joining features with config: ' + filepath, fg='green')\n", (3160, 3223), False, 'import click\n'), ((3500, 3668), 'click.style', 'click.style', (['"""Feathr feature join job submitted. Visit https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job result."""'], {'fg': '"""green"""'}), "(\n 'Feathr feature join job submitted. Visit https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job result.'\n , fg='green')\n", (3511, 3668), False, 'import click\n'), ((2976, 2999), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2986, 2999), False, 'import click\n'), ((3999, 4074), 'click.style', 'click.style', (["('Deploying feature generation config: ' + filepath)"], {'fg': '"""green"""'}), "('Deploying feature generation config: ' + filepath, fg='green')\n", (4010, 4074), False, 'import click\n'), ((4368, 4538), 'click.style', 'click.style', (['"""Feathr feature deployment submitted. Visit https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job result."""'], {'fg': '"""green"""'}), "(\n 'Feathr feature deployment submitted. Visit https://ms.web.azuresynapse.net/en-us/monitoring/sparkapplication for detailed job result.'\n , fg='green')\n", (4379, 4538), False, 'import click\n'), ((3814, 3837), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (3824, 3837), False, 'import click\n'), ((4983, 5058), 'click.style', 'click.style', (['"""Registering your metadata to metadata service..."""'], {'fg': '"""green"""'}), "('Registering your metadata to metadata service...', fg='green')\n", (4994, 5058), False, 'import click\n'), ((5151, 5166), 'click.echo', 'click.echo', (['msg'], {}), '(msg)\n', (5161, 5166), False, 'import click\n'), ((5185, 5247), 'subprocess.Popen', 'subprocess.Popen', (["['git', 'add', '-A']"], {'stdout': 'subprocess.PIPE'}), "(['git', 'add', '-A'], stdout=subprocess.PIPE)\n", (5201, 5247), False, 'import subprocess\n'), ((5298, 5316), 'click.echo', 'click.echo', (['output'], {}), '(output)\n', (5308, 5316), False, 'import click\n'), ((5401, 5471), 'subprocess.Popen', 'subprocess.Popen', (["['git', 'commit', '-m', msg]"], {'stdout': 'subprocess.PIPE'}), "(['git', 'commit', '-m', msg], stdout=subprocess.PIPE)\n", (5417, 5471), False, 'import subprocess\n'), ((5524, 5543), 'click.echo', 'click.echo', (['output2'], {}), '(output2)\n', (5534, 5543), False, 'import click\n'), ((5618, 5688), 'click.style', 'click.style', (['"""Feathr registration completed successfully!"""'], {'fg': '"""green"""'}), "('Feathr registration completed successfully!', fg='green')\n", (5629, 5688), False, 'import click\n'), ((6714, 6738), 'os.path.isfile', 'os.path.isfile', (['jar_name'], {}), '(jar_name)\n', (6728, 6738), False, 'import os\n'), ((7184, 7294), 'click.echo', 'click.echo', (["('Downloading feathr jar for local testing: %s Bytes: %s from %s' % (\n file_name, file_size, url))"], {}), "('Downloading feathr jar for local testing: %s Bytes: %s from %s' %\n (file_name, file_size, url))\n", (7194, 7294), False, 'import click\n'), ((7743, 7804), 'click.style', 'click.style', (['f"""Starting the local feathr engine: {jar_name}."""'], {}), "(f'Starting the local feathr engine: {jar_name}.')\n", (7754, 7804), False, 'import click\n'), ((7821, 7966), 'click.style', 'click.style', (['f"""Please keep this open and start another terminal to run feathr test. This terminal shows the debug message."""'], {'fg': '"""green"""'}), "(\n f'Please keep this open and start another terminal to run feathr test. This terminal shows the debug message.'\n , fg='green')\n", (7832, 7966), False, 'import click\n'), ((1601, 1611), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (1609, 1611), False, 'from pathlib import Path\n'), ((2160, 2178), 'pathlib.Path', 'pathlib.Path', (['name'], {}), '(name)\n', (2172, 2178), False, 'import pathlib\n'), ((2599, 2743), 'click.style', 'click.style', (['"""Git init completed for your workspace. Please read the wiki to learn how to manage your workspace with git."""'], {'fg': '"""green"""'}), "(\n 'Git init completed for your workspace. Please read the wiki to learn how to manage your workspace with git.'\n , fg='green')\n", (2610, 2743), False, 'import click\n'), ((3309, 3335), 'click.echo', 'click.echo', (['line'], {'nl': '(False)'}), '(line, nl=False)\n', (3319, 3335), False, 'import click\n'), ((4160, 4186), 'click.echo', 'click.echo', (['line'], {'nl': '(False)'}), '(line, nl=False)\n', (4170, 4186), False, 'import click\n'), ((5092, 5141), 'click.style', 'click.style', (['"""Git: adding all files."""'], {'fg': '"""green"""'}), "('Git: adding all files.', fg='green')\n", (5103, 5141), False, 'import click\n'), ((5337, 5380), 'click.style', 'click.style', (['"""Git: committing."""'], {'fg': '"""green"""'}), "('Git: committing.', fg='green')\n", (5348, 5380), False, 'import click\n'), ((6292, 6378), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'bufsize': '(1)', 'universal_newlines': '(True)'}), '(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines\n =True)\n', (6308, 6378), False, 'import subprocess\n'), ((7032, 7153), 'click.style', 'click.style', (['"""There is no local feathr engine(jar) in the workspace. Will download the feathr jar."""'], {'fg': '"""green"""'}), "(\n 'There is no local feathr engine(jar) in the workspace. Will download the feathr jar.'\n , fg='green')\n", (7043, 7153), False, 'import click\n'), ((7354, 7431), 'click.progressbar', 'click.progressbar', ([], {'length': 'file_size', 'label': '"""Download feathr local engine jar"""'}), "(length=file_size, label='Download feathr local engine jar')\n", (7371, 7431), False, 'import click\n'), ((1997, 2011), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2001, 2011), False, 'from pathlib import Path\n')] |
from feathrcli.cli import init
from click.testing import CliRunner
import os
from feathr.client import FeathrClient
def test_configuration_loading():
"""
Test the configuration can be overwritten by envs
"""
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(init, [])
assert result.exit_code == 0
assert os.path.isdir('./feathr_user_workspace')
client = FeathrClient(config_path='./feathr_user_workspace/feathr_config.yaml')
# test the loading is correct even if we are not in that folder
assert client._FEATHR_JOB_JAR_PATH is not None
SPARK_RESULT_OUTPUT_PARTS = '4'
# Use a less impactful config to test, as this config might be impactful for all the tests (since it's setting the envs)
os.environ['SPARK_CONFIG__SPARK_RESULT_OUTPUT_PARTS'] = SPARK_RESULT_OUTPUT_PARTS
# this should not be error out as we will just give users prompt, though the config is not really here
client = FeathrClient(config_path='./feathr_user_workspace/feathr_config.yaml')
assert client.output_num_parts == SPARK_RESULT_OUTPUT_PARTS
| [
"feathr.client.FeathrClient"
] | [((235, 246), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (244, 246), False, 'from click.testing import CliRunner\n'), ((381, 421), 'os.path.isdir', 'os.path.isdir', (['"""./feathr_user_workspace"""'], {}), "('./feathr_user_workspace')\n", (394, 421), False, 'import os\n'), ((440, 510), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': '"""./feathr_user_workspace/feathr_config.yaml"""'}), "(config_path='./feathr_user_workspace/feathr_config.yaml')\n", (452, 510), False, 'from feathr.client import FeathrClient\n'), ((1029, 1099), 'feathr.client.FeathrClient', 'FeathrClient', ([], {'config_path': '"""./feathr_user_workspace/feathr_config.yaml"""'}), "(config_path='./feathr_user_workspace/feathr_config.yaml')\n", (1041, 1099), False, 'from feathr.client import FeathrClient\n')] |
README.md exists but content is empty.
- Downloads last month
- 4