text stringlengths 81 112k |
|---|
auto send blocking function, when the interval or the message size has been reached, publish
:return:
def _auto_send(self):
"""
auto send blocking function, when the interval or the message size has been reached, publish
:return:
"""
while True:
if time.time() - self.last_send_time > self.config.async_auto_send_interval_millis or \
len(self._tx_queue) >= self.config.async_auto_send_amount:
self.publish_queue() |
generate the headers for the connection to event hub service based on the provided config
:return: {} headers
def _generate_publish_headers(self):
"""
generate the headers for the connection to event hub service based on the provided config
:return: {} headers
"""
headers = {
'predix-zone-id': self.eventhub_client.zone_id
}
token = self.eventhub_client.service._get_bearer_token()
if self.config.is_grpc():
headers['authorization'] = token[(token.index(' ') + 1):]
else:
headers['authorization'] = token
if self.config.topic == '':
headers['topic'] = self.eventhub_client.zone_id + '_topic'
else:
headers['topic'] = self.config.topic
if self.config.publish_type == self.config.Type.SYNC:
headers['sync-acks'] = 'true'
else:
headers['sync-acks'] = 'false'
headers['send-acks-interval'] = str(self.config.async_cache_ack_interval_millis)
headers['acks'] = str(self.config.async_enable_acks).lower()
headers['nacks'] = str(self.config.async_enable_nacks_only).lower()
headers['cache-acks'] = str(self.config.async_cache_acks_and_nacks).lower()
return headers |
publisher callback that grpc and web socket can pass messages to
address the received message onto the queue
:param publish_ack: EventHub_pb2.Ack the ack received from either wss or grpc
:return: None
def _publisher_callback(self, publish_ack):
"""
publisher callback that grpc and web socket can pass messages to
address the received message onto the queue
:param publish_ack: EventHub_pb2.Ack the ack received from either wss or grpc
:return: None
"""
logging.debug("ack received: " + str(publish_ack).replace('\n', ' '))
self._rx_queue.append(publish_ack) |
initialize the grpc publisher, builds the stub and then starts the grpc manager
:return: None
def _init_grpc_publisher(self):
"""
initialize the grpc publisher, builds the stub and then starts the grpc manager
:return: None
"""
self._stub = EventHub_pb2_grpc.PublisherStub(channel=self._channel)
self.grpc_manager = Eventhub.GrpcManager(stub_call=self._stub.send,
on_msg_callback=self._publisher_callback,
metadata=self._generate_publish_headers().items()) |
send the messages in the tx queue to the GRPC manager
:return: None
def _publish_queue_grpc(self):
"""
send the messages in the tx queue to the GRPC manager
:return: None
"""
messages = EventHub_pb2.Messages(msg=self._tx_queue)
publish_request = EventHub_pb2.PublishRequest(messages=messages)
self.grpc_manager.send_message(publish_request) |
send the messages down the web socket connection as a json object
:return: None
def _publish_queue_wss(self):
"""
send the messages down the web socket connection as a json object
:return: None
"""
msg = []
for m in self._tx_queue:
msg.append({'id': m.id, 'body': m.body, 'zone_id': m.zone_id})
self._ws.send(json.dumps(msg), opcode=websocket.ABNF.OPCODE_BINARY) |
Create a new web socket connection with proper headers.
def _init_publisher_ws(self):
"""
Create a new web socket connection with proper headers.
"""
logging.debug("Initializing new web socket connection.")
url = ('wss://%s/v1/stream/messages/' % self.eventhub_client.host)
headers = self._generate_publish_headers()
logging.debug("URL=" + str(url))
logging.debug("HEADERS=" + str(headers))
websocket.enableTrace(False)
self._ws = websocket.WebSocketApp(url,
header=headers,
on_message=self._on_ws_message,
on_open=self._on_ws_open,
on_close=self._on_ws_close)
self._ws_thread = threading.Thread(target=self._ws.run_forever, kwargs={'ping_interval': 30})
self._ws_thread.daemon = True
self._ws_thread.start()
time.sleep(1) |
on_message callback of websocket class, load the message into a dict and then
update an Ack Object with the results
:param ws: web socket connection that the message was received on
:param message: web socket message in text form
:return: None
def _on_ws_message(self, ws, message):
"""
on_message callback of websocket class, load the message into a dict and then
update an Ack Object with the results
:param ws: web socket connection that the message was received on
:param message: web socket message in text form
:return: None
"""
logging.debug(message)
json_list = json.loads(message)
for rx_ack in json_list:
ack = EventHub_pb2.Ack()
for key, value in rx_ack.items():
setattr(ack, key, value)
self._publisher_callback(ack) |
Create an instance of the Parking Planning Service with the
typical starting settings.
def create(self):
"""
Create an instance of the Parking Planning Service with the
typical starting settings.
"""
self.service.create()
os.environ[self.__module__ + '.uri'] = self.service.settings.data['url']
os.environ[self.__module__ + '.zone_id'] = self.get_predix_zone_id() |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
manifest.add_env_var(self.__module__ + '.uri',
self.service.settings.data['url'])
manifest.add_env_var(self.__module__ + '.zone_id',
self.get_predix_zone_id())
manifest.write_manifest() |
Read an existing manifest.
def read_manifest(self, encrypted=None):
"""
Read an existing manifest.
"""
with open(self.manifest_path, 'r') as input_file:
self.manifest = yaml.safe_load(input_file)
if 'env' not in self.manifest:
self.manifest['env'] = {}
if 'services' not in self.manifest:
self.manifest['services'] = []
# If manifest is encrypted, use manifest key to
# decrypt each value before storing in memory.
if 'PREDIXPY_ENCRYPTED' in self.manifest['env']:
self.encrypted = True
if encrypted or self.encrypted:
key = predix.config.get_crypt_key(self.manifest_key)
f = Fernet(key)
for var in self.manifest['env'].keys():
value = f.decrypt(bytes(self.manifest['env'][var], 'utf-8'))
self.manifest['env'][var] = value.decode('utf-8')
self.app_name = self.manifest['applications'][0]['name']
input_file.close() |
Create a new manifest and write it to
disk.
def create_manifest(self):
"""
Create a new manifest and write it to
disk.
"""
self.manifest = {}
self.manifest['applications'] = [{'name': self.app_name}]
self.manifest['services'] = []
self.manifest['env'] = {
'PREDIXPY_VERSION': str(predix.version),
}
self.write_manifest() |
Returns contents of the manifest where environment variables
that are secret will be encrypted without modifying the existing
state in memory which will remain unencrypted.
def _get_encrypted_manifest(self):
"""
Returns contents of the manifest where environment variables
that are secret will be encrypted without modifying the existing
state in memory which will remain unencrypted.
"""
key = predix.config.get_crypt_key(self.manifest_key)
f = Fernet(key)
manifest = copy.deepcopy(self.manifest)
for var in self.manifest['env'].keys():
value = str(self.manifest['env'][var])
manifest['env'][var] = f.encrypt(bytes(value, 'utf-8')).decode('utf-8')
return manifest |
Write manifest to disk.
:param manifest_path: write to a different location
:param encrypted: write with env data encrypted
def write_manifest(self, manifest_path=None, encrypted=None):
"""
Write manifest to disk.
:param manifest_path: write to a different location
:param encrypted: write with env data encrypted
"""
manifest_path = manifest_path or self.manifest_path
self.manifest['env']['PREDIXPY_VERSION'] = str(predix.version)
with open(manifest_path, 'w') as output_file:
if encrypted or self.encrypted:
self.manifest['env']['PREDIXPY_ENCRYPTED'] = self.manifest_key
content = self._get_encrypted_manifest()
else:
content = self.manifest # shallow reference
if 'PREDIXPY_ENCRYPTED' in content['env']:
del(content['env']['PREDIXPY_ENCRYPTED'])
yaml.safe_dump(content, output_file,
default_flow_style=False, explicit_start=True)
output_file.close() |
Add the given key / value as another environment
variable.
def add_env_var(self, key, value):
"""
Add the given key / value as another environment
variable.
"""
self.manifest['env'][key] = value
os.environ[key] = str(value) |
Add the given service to the manifest.
def add_service(self, service_name):
"""
Add the given service to the manifest.
"""
if service_name not in self.manifest['services']:
self.manifest['services'].append(service_name) |
Will load any environment variables found in the
manifest file into the current process for use
by applications.
When apps run in cloud foundry this would happen
automatically.
def set_os_environ(self):
"""
Will load any environment variables found in the
manifest file into the current process for use
by applications.
When apps run in cloud foundry this would happen
automatically.
"""
for key in self.manifest['env'].keys():
os.environ[key] = str(self.manifest['env'][key]) |
Return the client id that should have all the
needed scopes and authorities for the services
in this manifest.
def get_client_id(self):
"""
Return the client id that should have all the
needed scopes and authorities for the services
in this manifest.
"""
self._client_id = predix.config.get_env_value(predix.app.Manifest, 'client_id')
return self._client_id |
Return the client secret that should correspond with
the client id.
def get_client_secret(self):
"""
Return the client secret that should correspond with
the client id.
"""
self._client_secret = predix.config.get_env_value(predix.app.Manifest, 'client_secret')
return self._client_secret |
Returns an instance of the Time Series Service.
def get_timeseries(self, *args, **kwargs):
"""
Returns an instance of the Time Series Service.
"""
import predix.data.timeseries
ts = predix.data.timeseries.TimeSeries(*args, **kwargs)
return ts |
Returns an instance of the Asset Service.
def get_asset(self):
"""
Returns an instance of the Asset Service.
"""
import predix.data.asset
asset = predix.data.asset.Asset()
return asset |
Returns an insstance of the UAA Service.
def get_uaa(self):
"""
Returns an insstance of the UAA Service.
"""
import predix.security.uaa
uaa = predix.security.uaa.UserAccountAuthentication()
return uaa |
Returns an instance of the Asset Control Service.
def get_acs(self):
"""
Returns an instance of the Asset Control Service.
"""
import predix.security.acs
acs = predix.security.acs.AccessControl()
return acs |
Returns an instance of the Weather Service.
def get_weather(self):
"""
Returns an instance of the Weather Service.
"""
import predix.data.weather
weather = predix.data.weather.WeatherForecast()
return weather |
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
def get_weather_forecast_days(self, latitude, longitude,
days=1, frequency=1, reading_type=None):
"""
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError("Reading frequency must be 1 or 3")
params['days'] = days
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
if reading_type:
# url encoding will make spaces a + instead of %20, which service
# interprets as an "and" search which is undesirable
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = urllib.quote_plus(reading_type)
url = self.uri + '/v1/weather-forecast-days/'
return self.service._get(url, params=params) |
Return the weather forecast for a given location for specific
datetime specified in UTC format.
::
results = ws.get_weather_forecast(lat, long, start, end)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], '=', w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
def get_weather_forecast(self, latitude, longitude, start, end,
frequency=1, reading_type=None):
"""
Return the weather forecast for a given location for specific
datetime specified in UTC format.
::
results = ws.get_weather_forecast(lat, long, start, end)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], '=', w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError("Reading frequency must be 1 or 3")
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
params['start_datetime_utc'] = start
params['end_datetime_utc'] = end
if reading_type:
# Not using urllib.quote_plus() because its using a + which is
# being interpreted by service as an and instead of a space.
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = reading_type
url = self.uri + '/v1/weather-forecast-datetime/'
return self.service._get(url, params=params) |
Can generate a name based on the space, service name and plan.
def _generate_name(self, space, service_name, plan_name):
"""
Can generate a name based on the space, service name and plan.
"""
return str.join('-', [space, service_name, plan_name]).lower() |
Return a sensible configuration path for caching config
settings.
def _get_config_path(self):
"""
Return a sensible configuration path for caching config
settings.
"""
org = self.service.space.org.name
space = self.service.space.name
name = self.name
return "~/.predix/%s/%s/%s.json" % (org, space, name) |
Create a Cloud Foundry service that has custom parameters.
def _create_service(self, parameters={}, **kwargs):
"""
Create a Cloud Foundry service that has custom parameters.
"""
logging.debug("_create_service()")
logging.debug(str.join(',', [self.service_name, self.plan_name,
self.name, str(parameters)]))
return self.service.create_service(self.service_name, self.plan_name,
self.name, parameters, **kwargs) |
Delete a Cloud Foundry service and any associations.
def _delete_service(self, service_only=False):
"""
Delete a Cloud Foundry service and any associations.
"""
logging.debug('_delete_service()')
return self.service.delete_service(self.service_name) |
Get a service key or create one if needed.
def _get_or_create_service_key(self):
"""
Get a service key or create one if needed.
"""
keys = self.service._get_service_keys(self.name)
for key in keys['resources']:
if key['entity']['name'] == self.service_name:
return self.service.get_service_key(self.name,
self.service_name)
self.service.create_service_key(self.name, self.service_name)
return self.service.get_service_key(self.name, self.service_name) |
Will get configuration for the service from a service key.
def _get_service_config(self):
"""
Will get configuration for the service from a service key.
"""
key = self._get_or_create_service_key()
config = {}
config['service_key'] = [{'name': self.name}]
config.update(key['entity']['credentials'])
return config |
Create the service.
def create(self, parameters={}, create_keys=True, **kwargs):
"""
Create the service.
"""
# Create the service
cs = self._create_service(parameters=parameters, **kwargs)
# Create the service key to get config details and
# store in local cache file.
if create_keys:
cfg = parameters
cfg.update(self._get_service_config())
self.settings.save(cfg) |
Returns a valid UAA instance for performing administrative functions
on services.
def _get_or_create_uaa(self, uaa):
"""
Returns a valid UAA instance for performing administrative functions
on services.
"""
if isinstance(uaa, predix.admin.uaa.UserAccountAuthentication):
return uaa
logging.debug("Initializing a new UAA")
return predix.admin.uaa.UserAccountAuthentication() |
Create an instance of the US Weather Forecast Service with
typical starting settings.
def create(self, parameters={}, **kwargs):
"""
Create an instance of the US Weather Forecast Service with
typical starting settings.
"""
# Add parameter during create for UAA issuer
uri = self.uaa.service.settings.data['uri'] + '/oauth/token'
parameters["trustedIssuerIds"] = [uri]
super(PredixService, self).create(parameters=parameters, **kwargs) |
Create an instance of the Time Series Service with the typical
starting settings.
def create(self):
"""
Create an instance of the Time Series Service with the typical
starting settings.
"""
self.service.create()
os.environ[predix.config.get_env_key(self.use_class, 'host')] = self.get_eventhub_host()
os.environ[predix.config.get_env_key(self.use_class, 'port')] = self.get_eventhub_grpc_port()
os.environ[predix.config.get_env_key(self.use_class, 'wss_publish_uri')] = self.get_publish_wss_uri()
os.environ[predix.config.get_env_key(self.use_class, 'zone_id')] = self.get_zone_id() |
Grant the given client id all the scopes and authorities
needed to work with the eventhub service.
def grant_client(self, client_id, publish=False, subscribe=False, publish_protocol=None, publish_topics=None,
subscribe_topics=None, scope_prefix='predix-event-hub', **kwargs):
"""
Grant the given client id all the scopes and authorities
needed to work with the eventhub service.
"""
scopes = ['openid']
authorities = ['uaa.resource']
zone_id = self.get_zone_id()
# always must be part of base user scope
scopes.append('%s.zones.%s.user' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.user' % (scope_prefix, zone_id))
if publish_topics is not None or subscribe_topics is not None:
raise Exception("multiple topics are not currently available in preidx-py")
if publish_topics is None:
publish_topics = ['topic']
if subscribe_topics is None:
subscribe_topics = ['topic']
if publish:
# we are granting just the default topic
if publish_protocol is None:
scopes.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id))
scopes.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id))
else:
scopes.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol))
authorities.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol))
# we are requesting multiple topics
for topic in publish_topics:
if publish_protocol is None:
scopes.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic))
scopes.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic))
scopes.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic))
else:
scopes.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol))
authorities.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol))
if subscribe:
# we are granting just the default topic
scopes.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id))
# we are requesting multiple topics
for topic in subscribe_topics:
scopes.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic))
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) |
returns the publish grpc endpoint for ingestion.
def get_eventhub_host(self):
"""
returns the publish grpc endpoint for ingestion.
"""
for protocol in self.service.settings.data['publish']['protocol_details']:
if protocol['protocol'] == 'grpc':
return protocol['uri'][0:protocol['uri'].index(':')] |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'host'), self.get_eventhub_host())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'port'), self.get_eventhub_grpc_port())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'wss_publish_uri'), self.get_publish_wss_uri())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'zone_id'), self.get_zone_id())
manifest.write_manifest() |
Returns the host address for an instance of Blob Store service from
environment inspection.
def _get_host(self):
"""
Returns the host address for an instance of Blob Store service from
environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
host = services['predix-blobstore'][0]['credentials']['host']
else:
host = predix.config.get_env_value(self, 'host')
# Protocol may not always be included in host setting
if 'https://' not in host:
host = 'https://' + host
return host |
Returns the access key for an instance of Blob Store service from
environment inspection.
def _get_access_key_id(self):
"""
Returns the access key for an instance of Blob Store service from
environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
return services['predix-blobstore'][0]['credentials']['access_key_id']
else:
return predix.config.get_env_value(self, 'access_key_id') |
This method is primarily for illustration and just calls the
boto3 client implementation of list_objects but is a common task
for first time Predix BlobStore users.
def list_objects(self, bucket_name=None, **kwargs):
"""
This method is primarily for illustration and just calls the
boto3 client implementation of list_objects but is a common task
for first time Predix BlobStore users.
"""
if not bucket_name: bucket_name = self.bucket_name
return self.client.list_objects(Bucket=bucket_name, **kwargs) |
This method is primarily for illustration and just calls the
boto3 client implementation of upload_file but is a common task
for first time Predix BlobStore users.
def upload_file(self, src_filepath, dest_filename=None, bucket_name=None,
**kwargs):
"""
This method is primarily for illustration and just calls the
boto3 client implementation of upload_file but is a common task
for first time Predix BlobStore users.
"""
if not bucket_name: bucket_name = self.bucket_name
if not dest_filename: dest_filename = src_filepath
return self.client.upload_file(src_filepath, bucket_name,
dest_filename, **kwargs) |
Reads the local cf CLI cache stored in the users
home directory.
def _get_cloud_foundry_config(self):
"""
Reads the local cf CLI cache stored in the users
home directory.
"""
config = os.path.expanduser(self.config_file)
if not os.path.exists(config):
raise CloudFoundryLoginError('You must run `cf login` to authenticate')
with open(config, "r") as data:
return json.load(data) |
Returns the GUID for the organization currently targeted.
def get_organization_guid(self):
"""
Returns the GUID for the organization currently targeted.
"""
if 'PREDIX_ORGANIZATION_GUID' in os.environ:
return os.environ['PREDIX_ORGANIZATION_GUID']
else:
info = self._get_organization_info()
for key in ('Guid', 'GUID'):
if key in info.keys():
return info[key]
raise ValueError('Unable to determine cf organization guid') |
Returns the GUID for the space currently targeted.
Can be set by environment variable with PREDIX_SPACE_GUID.
Can be determined by ~/.cf/config.json.
def get_space_guid(self):
"""
Returns the GUID for the space currently targeted.
Can be set by environment variable with PREDIX_SPACE_GUID.
Can be determined by ~/.cf/config.json.
"""
if 'PREDIX_SPACE_GUID' in os.environ:
return os.environ['PREDIX_SPACE_GUID']
else:
info = self._get_space_info()
for key in ('Guid', 'GUID'):
if key in info.keys():
return info[key]
raise ValueError('Unable to determine cf space guid') |
Get the user's PredixPy manifest key. Generate and store one if not
yet generated.
def get_crypt_key(key_path):
"""
Get the user's PredixPy manifest key. Generate and store one if not
yet generated.
"""
key_path = os.path.expanduser(key_path)
if os.path.exists(key_path):
with open(key_path, 'r') as data:
key = data.read()
else:
key = Fernet.generate_key()
with open(key_path, 'w') as output:
output.write(key)
return key |
Return environment variable key to use for lookups within a
namespace represented by the package name.
For example, any varialbes for predix.security.uaa are stored
as PREDIX_SECURITY_UAA_KEY
def get_env_key(obj, key=None):
"""
Return environment variable key to use for lookups within a
namespace represented by the package name.
For example, any varialbes for predix.security.uaa are stored
as PREDIX_SECURITY_UAA_KEY
"""
return str.join('_', [obj.__module__.replace('.','_').upper(),
key.upper()]) |
Returns the environment variable value for the attribute of
the given object.
For example `get_env_value(predix.security.uaa, 'uri')` will
return value of environment variable PREDIX_SECURITY_UAA_URI.
def get_env_value(obj, attribute):
"""
Returns the environment variable value for the attribute of
the given object.
For example `get_env_value(predix.security.uaa, 'uri')` will
return value of environment variable PREDIX_SECURITY_UAA_URI.
"""
varname = get_env_key(obj, attribute)
var = os.environ.get(varname)
if not var:
raise ValueError("%s must be set in your environment." % varname)
return var |
Set the environment variable value for the attribute of the
given object.
For example, `set_env_value(predix.security.uaa, 'uri', 'http://...')`
will set the environment variable PREDIX_SECURITY_UAA_URI to the given
uri.
def set_env_value(obj, attribute, value):
"""
Set the environment variable value for the attribute of the
given object.
For example, `set_env_value(predix.security.uaa, 'uri', 'http://...')`
will set the environment variable PREDIX_SECURITY_UAA_URI to the given
uri.
"""
varname = get_env_key(obj, attribute)
os.environ[varname] = value
return varname |
Returns the GUID for the service instance with
the given name.
def get_instance_guid(self, service_name):
"""
Returns the GUID for the service instance with
the given name.
"""
summary = self.space.get_space_summary()
for service in summary['services']:
if service['name'] == service_name:
return service['guid']
raise ValueError("No service with name '%s' found." % (service_name)) |
Return the service bindings for the service instance.
def _get_service_bindings(self, service_name):
"""
Return the service bindings for the service instance.
"""
instance = self.get_instance(service_name)
return self.api.get(instance['service_bindings_url']) |
Remove service bindings to applications.
def delete_service_bindings(self, service_name):
"""
Remove service bindings to applications.
"""
instance = self.get_instance(service_name)
return self.api.delete(instance['service_bindings_url']) |
Return the service keys for the given service.
def _get_service_keys(self, service_name):
"""
Return the service keys for the given service.
"""
guid = self.get_instance_guid(service_name)
uri = "/v2/service_instances/%s/service_keys" % (guid)
return self.api.get(uri) |
Returns a flat list of the names of the service keys
for the given service.
def get_service_keys(self, service_name):
"""
Returns a flat list of the names of the service keys
for the given service.
"""
keys = []
for key in self._get_service_keys(service_name)['resources']:
keys.append(key['entity']['name'])
return keys |
Returns the service key details.
Similar to `cf service-key`.
def get_service_key(self, service_name, key_name):
"""
Returns the service key details.
Similar to `cf service-key`.
"""
for key in self._get_service_keys(service_name)['resources']:
if key_name == key['entity']['name']:
guid = key['metadata']['guid']
uri = "/v2/service_keys/%s" % (guid)
return self.api.get(uri)
return None |
Create a service key for the given service.
def create_service_key(self, service_name, key_name):
"""
Create a service key for the given service.
"""
if self.has_key(service_name, key_name):
logging.warning("Reusing existing service key %s" % (key_name))
return self.get_service_key(service_name, key_name)
body = {
'service_instance_guid': self.get_instance_guid(service_name),
'name': key_name
}
return self.api.post('/v2/service_keys', body) |
Delete a service key for the given service.
def delete_service_key(self, service_name, key_name):
"""
Delete a service key for the given service.
"""
key = self.get_service_key(service_name, key_name)
logging.info("Deleting service key %s for service %s" % (key, service_name))
return self.api.delete(key['metadata']['url']) |
Retrieves a service instance with the given name.
def get_instance(self, service_name):
"""
Retrieves a service instance with the given name.
"""
for resource in self.space._get_instances():
if resource['entity']['name'] == service_name:
return resource['entity'] |
Return the service plans available for a given service.
def get_service_plan_for_service(self, service_name):
"""
Return the service plans available for a given service.
"""
services = self.get_services()
for service in services['resources']:
if service['entity']['label'] == service_name:
response = self.api.get(service['entity']['service_plans_url'])
return response['resources'] |
Return the service plan GUID for the given service / plan.
def get_service_plan_guid(self, service_name, plan_name):
"""
Return the service plan GUID for the given service / plan.
"""
for plan in self.get_service_plan_for_service(service_name):
if plan['entity']['name'] == plan_name:
return plan['metadata']['guid']
return None |
Create a service instance.
def create_service(self, service_type, plan_name, service_name, params,
async=False, **kwargs):
"""
Create a service instance.
"""
if self.space.has_service_with_name(service_name):
logging.warning("Service already exists with that name.")
return self.get_instance(service_name)
if self.space.has_service_of_type(service_type):
logging.warning("Service type already exists.")
guid = self.get_service_plan_guid(service_type, plan_name)
if not guid:
raise ValueError("No service plan named: %s" % (plan_name))
body = {
'name': service_name,
'space_guid': self.space.guid,
'service_plan_guid': guid,
'parameters': params
}
uri = '/v2/service_instances?accepts_incomplete=true'
if async:
uri += '&async=true'
return self.api.post(uri, body) |
Delete the service of the given name. It may fail if there are
any service keys or app bindings. Use purge() if you want
to delete it all.
def delete_service(self, service_name, params=None):
"""
Delete the service of the given name. It may fail if there are
any service keys or app bindings. Use purge() if you want
to delete it all.
"""
if not self.space.has_service_with_name(service_name):
logging.warning("Service not found so... succeeded?")
return True
guid = self.get_instance_guid(service_name)
logging.info("Deleting service %s with guid %s" % (service_name, guid))
# MAINT: this endpoint changes in newer version of api
return self.api.delete("/v2/service_instances/%s?accepts_incomplete=true" %
(guid), params=params) |
Returns the URI endpoint for performing queries of a
Predix Time Series instance from environment inspection.
def _get_query_uri(self):
"""
Returns the URI endpoint for performing queries of a
Predix Time Series instance from environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_timeseries = services['predix-timeseries'][0]['credentials']
return predix_timeseries['query']['uri'].partition('/v1')[0]
else:
return predix.config.get_env_value(self, 'query_uri') |
Returns the ZoneId for performing queries of a Predix
Time Series instance from environment inspection.
def _get_query_zone_id(self):
"""
Returns the ZoneId for performing queries of a Predix
Time Series instance from environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_timeseries = services['predix-timeseries'][0]['credentials']
return predix_timeseries['query']['zone-http-header-value']
else:
return predix.config.get_env_value(self, 'query_zone_id') |
Will make a direct REST call with the given json body payload to
get datapoints.
def _get_datapoints(self, params):
"""
Will make a direct REST call with the given json body payload to
get datapoints.
"""
url = self.query_uri + '/v1/datapoints'
return self.service._get(url, params=params) |
Convenience method that for simple single tag queries will
return just the values to be iterated on.
def get_values(self, *args, **kwargs):
"""
Convenience method that for simple single tag queries will
return just the values to be iterated on.
"""
if isinstance(args[0], list):
raise ValueError("Can only get_values() for a single tag.")
response = self.get_datapoints(*args, **kwargs)
for value in response['tags'][0]['results'][0]['values']:
yield [datetime.datetime.utcfromtimestamp(value[0]/1000),
value[1],
value[2]] |
Returns all of the datapoints that match the given query.
- tags: list or string identifying the name/tag (ie. "temp")
- start: data after this, absolute or relative (ie. '1w-ago' or
1494015972386)
- end: data before this value
- order: ascending (asc) or descending (desc)
- limit: only return a few values (ie. 25)
- qualities: data quality value (ie. [ts.GOOD, ts.UNCERTAIN])
- attributes: dictionary of key-values (ie. {'unit': 'mph'})
- measurement: tuple of operation and value (ie. ('gt', 30))
- aggregations: summary statistics on data results (ie. 'avg')
- post: POST query instead of GET (caching implication)
A few additional observations:
- allow service to do most data validation
- order is applied before limit so resultset will differ
The returned results match what the service response is so you'll
need to unpack it as appropriate. Oftentimes what you want for
a simple single tag query will be:
response['tags'][0]['results'][0]['values']
def get_datapoints(self, tags, start=None, end=None, order=None,
limit=None, qualities=None, attributes=None, measurement=None,
aggregations=None, post=False):
"""
Returns all of the datapoints that match the given query.
- tags: list or string identifying the name/tag (ie. "temp")
- start: data after this, absolute or relative (ie. '1w-ago' or
1494015972386)
- end: data before this value
- order: ascending (asc) or descending (desc)
- limit: only return a few values (ie. 25)
- qualities: data quality value (ie. [ts.GOOD, ts.UNCERTAIN])
- attributes: dictionary of key-values (ie. {'unit': 'mph'})
- measurement: tuple of operation and value (ie. ('gt', 30))
- aggregations: summary statistics on data results (ie. 'avg')
- post: POST query instead of GET (caching implication)
A few additional observations:
- allow service to do most data validation
- order is applied before limit so resultset will differ
The returned results match what the service response is so you'll
need to unpack it as appropriate. Oftentimes what you want for
a simple single tag query will be:
response['tags'][0]['results'][0]['values']
"""
params = {}
# Documentation says start is required for GET but not POST, but
# seems to be required all the time, so using sensible default.
if not start:
start = '1w-ago'
logging.warning("Defaulting query for data with start date %s" % (start))
# Start date can be absolute or relative, only certain legal values
# but service will throw error if used improperly. (ms, s, mi, h, d,
# w, mm, y). Relative dates must end in -ago.
params['start'] = start
# Docs say when making POST with a start that end must also be
# specified, but this does not seem to be the case.
if end:
# MAINT: error when end < start which is handled by service
params['end'] = end
params['tags'] = []
if not isinstance(tags, list):
tags = [tags]
for tag in tags:
query = {}
query['name'] = tag
# Limit resultset with an integer value
if limit:
query['limit'] = int(limit)
# Order must be 'asc' or 'desc' but will get sensible error
# from service.
if order:
query['order'] = order
# Filters are complex and support filtering by
# quality, measurement, and attributes.
filters = {}
# Check for the quality of the datapoints
if qualities is not None:
if isinstance(qualities, int) or isinstance(qualities, str):
qualities = [qualities]
# Timeseries expects quality to be a string, not integer,
# so coerce each into a string
for i, quality in enumerate(qualities):
qualities[i] = str(quality)
filters['qualities'] = {"values": qualities}
# Check for attributes on the datapoints, expected to be
# a dictionary of key / value pairs that datapoints must match.
if attributes is not None:
if not isinstance(attributes, dict):
raise ValueError("Attribute filters must be dictionary.")
filters['attributes'] = attributes
# Check for measurements that meets a given comparison operation
# such as ge, gt, eq, ne, le, lt
if measurement is not None:
filters['measurements'] = {
'condition': measurement[0],
'values': measurement[1]
}
# If we found any filters add them to the query
if filters:
query['filters'] = filters
# Handle any additional aggregations of dataset
if aggregations is not None:
if not isinstance(aggregations, list):
aggregations = [aggregations]
query['aggregations'] = []
for aggregation in aggregations:
query['aggregations'].append({
'sampling': {'datapoints': 1},
'type': aggregation })
params['tags'].append(query)
if post:
return self._post_datapoints(params)
else:
return self._get_datapoints({"query": json.dumps(params)}) |
Create a new websocket connection with proper headers.
def _create_connection(self):
"""
Create a new websocket connection with proper headers.
"""
logging.debug("Initializing new websocket connection.")
headers = {
'Authorization': self.service._get_bearer_token(),
'Predix-Zone-Id': self.ingest_zone_id,
'Content-Type': 'application/json',
}
url = self.ingest_uri
logging.debug("URL=" + str(url))
logging.debug("HEADERS=" + str(headers))
# Should consider connection pooling and longer timeouts
return websocket.create_connection(url, header=headers) |
Reuse existing connection or create a new connection.
def _get_websocket(self, reuse=True):
"""
Reuse existing connection or create a new connection.
"""
# Check if still connected
if self.ws and reuse:
if self.ws.connected:
return self.ws
logging.debug("Stale connection, reconnecting.")
self.ws = self._create_connection()
return self.ws |
Establish or reuse socket connection and send
the given message to the timeseries service.
def _send_to_timeseries(self, message):
"""
Establish or reuse socket connection and send
the given message to the timeseries service.
"""
logging.debug("MESSAGE=" + str(message))
result = None
try:
ws = self._get_websocket()
ws.send(json.dumps(message))
result = ws.recv()
except (websocket.WebSocketConnectionClosedException, Exception) as e:
logging.debug("Connection failed, will try again.")
logging.debug(e)
ws = self._get_websocket(reuse=False)
ws.send(json.dumps(message))
result = ws.recv()
logging.debug("RESULT=" + str(result))
return result |
To reduce network traffic, you can buffer datapoints and
then flush() anything in the queue.
:param name: the name / label / tag for sensor data
:param value: the sensor reading or value to record
:param quality: the quality value, use the constants BAD, GOOD, etc.
(optional and defaults to UNCERTAIN)
:param timestamp: the time the reading was recorded in epoch
milliseconds (optional and defaults to now)
:param attributes: dictionary for any key-value pairs to store with the
reading (optional)
def queue(self, name, value, quality=None, timestamp=None,
attributes=None):
"""
To reduce network traffic, you can buffer datapoints and
then flush() anything in the queue.
:param name: the name / label / tag for sensor data
:param value: the sensor reading or value to record
:param quality: the quality value, use the constants BAD, GOOD, etc.
(optional and defaults to UNCERTAIN)
:param timestamp: the time the reading was recorded in epoch
milliseconds (optional and defaults to now)
:param attributes: dictionary for any key-value pairs to store with the
reading (optional)
"""
# Get timestamp first in case delay opening websocket connection
# and it must have millisecond accuracy
if not timestamp:
timestamp = int(round(time.time() * 1000))
else:
# Coerce datetime objects to epoch
if isinstance(timestamp, datetime.datetime):
timestamp = int(round(int(timestamp.strftime('%s')) * 1000))
# Only specific quality values supported
if quality not in [self.BAD, self.GOOD, self.NA, self.UNCERTAIN]:
quality = self.UNCERTAIN
# Check if adding to queue of an existing tag and add second datapoint
for point in self._queue:
if point['name'] == name:
point['datapoints'].append([timestamp, value, quality])
return
# If adding new tag, initialize and set any attributes
datapoint = {
"name": name,
"datapoints": [[timestamp, value, quality]]
}
# Attributes are extra details for a datapoint
if attributes is not None:
if not isinstance(attributes, dict):
raise ValueError("Attributes are expected to be a dictionary.")
# Validate rules for attribute keys to provide guidance.
invalid_value = ':;= '
has_invalid_value = re.compile(r'[%s]' % (invalid_value)).search
has_valid_key = re.compile(r'^[\w\.\/\-]+$').search
for (key, val) in list(attributes.items()):
# Values cannot be empty
if (val == '') or (val is None):
raise ValueError("Attribute (%s) must have a non-empty value." % (key))
# Values should be treated as a string for regex validation
val = str(val)
# Values cannot contain certain arbitrary characters
if bool(has_invalid_value(val)):
raise ValueError("Attribute (%s) cannot contain (%s)." %
(key, invalid_value))
# Attributes have to be alphanumeric-ish
if not bool(has_valid_key):
raise ValueError("Key (%s) not alphanumeric-ish." % (key))
datapoint['attributes'] = attributes
self._queue.append(datapoint)
logging.debug("QUEUE: " + str(len(self._queue))) |
Can accept a name/tag and value to be queued and then send anything in
the queue to the time series service. Optional parameters include
setting quality, timestamp, or attributes.
See spec for queue() for complete list of options.
Example of sending a batch of values:
queue('temp', 70.1)
queue('humidity', 20.4)
send()
Example of sending one and flushing queue immediately
send('temp', 70.3)
send('temp', 70.4, quality=ts.GOOD, attributes={'unit': 'F'})
def send(self, name=None, value=None, **kwargs):
"""
Can accept a name/tag and value to be queued and then send anything in
the queue to the time series service. Optional parameters include
setting quality, timestamp, or attributes.
See spec for queue() for complete list of options.
Example of sending a batch of values:
queue('temp', 70.1)
queue('humidity', 20.4)
send()
Example of sending one and flushing queue immediately
send('temp', 70.3)
send('temp', 70.4, quality=ts.GOOD, attributes={'unit': 'F'})
"""
if name and value:
self.queue(name, value, **kwargs)
timestamp = int(round(time.time() * 1000))
# The label "name" or "tag" is sometimes used ambiguously
msg = {
"messageId": timestamp,
"body": self._queue
}
self._queue = []
return self._send_to_timeseries(msg) |
This convenience method will execute the query passed in as is. For
more complex functionality you may want to use the sqlalchemy engine
directly, but this serves as an example implementation.
:param select_query: SQL statement to execute that will identify the
resultset of interest.
def execute(self, statement, *args, **kwargs):
"""
This convenience method will execute the query passed in as is. For
more complex functionality you may want to use the sqlalchemy engine
directly, but this serves as an example implementation.
:param select_query: SQL statement to execute that will identify the
resultset of interest.
"""
with self.engine.connect() as conn:
s = sqlalchemy.sql.text(statement)
return conn.execute(s, **kwargs) |
Shutdown the client, shutdown the sub clients and stop the health checker
:return: None
def shutdown(self):
"""
Shutdown the client, shutdown the sub clients and stop the health checker
:return: None
"""
self._run_health_checker = False
if self.publisher is not None:
self.publisher.shutdown()
if self.subscriber is not None:
self.subscriber.shutdown() |
Get a env variable as defined by the service admin
:param key: the base of the key to use
:return: the env if it exists
def get_service_env_value(self, key):
"""
Get a env variable as defined by the service admin
:param key: the base of the key to use
:return: the env if it exists
"""
service_key = predix.config.get_env_key(self, key)
value = os.environ[service_key]
if not value:
raise ValueError("%s env unset" % key)
return value |
build the grpc channel used for both publisher and subscriber
:return: None
def _init_channel(self):
"""
build the grpc channel used for both publisher and subscriber
:return: None
"""
host = self._get_host()
port = self._get_grpc_port()
if 'TLS_PEM_FILE' in os.environ:
with open(os.environ['TLS_PEM_FILE'], mode='rb') as f: # b is important -> binary
file_content = f.read()
credentials = grpc.ssl_channel_credentials(root_certificates=file_content)
else:
credentials = grpc.ssl_channel_credentials()
self._channel = grpc.secure_channel(host + ":" + port, credentials=credentials)
self._init_health_checker() |
start the health checker stub and start a thread to ping it every 30 seconds
:return: None
def _init_health_checker(self):
"""
start the health checker stub and start a thread to ping it every 30 seconds
:return: None
"""
stub = Health_pb2_grpc.HealthStub(channel=self._channel)
self._health_check = stub.Check
health_check_thread = threading.Thread(target=self._health_check_thread)
health_check_thread.daemon = True
health_check_thread.start() |
Health checker thread that pings the service every 30 seconds
:return: None
def _health_check_thread(self):
"""
Health checker thread that pings the service every 30 seconds
:return: None
"""
while self._run_health_checker:
response = self._health_check(Health_pb2.HealthCheckRequest(service='predix-event-hub.grpc.health'))
logging.debug('received health check: ' + str(response))
time.sleep(30)
return |
Create a new temporary cloud foundry space for
a project.
def create_temp_space():
"""
Create a new temporary cloud foundry space for
a project.
"""
# Truncating uuid to just take final 12 characters since space name
# is used to name services and there is a 50 character limit on instance
# names.
# MAINT: hacky with possible collisions
unique_name = str(uuid.uuid4()).split('-')[-1]
admin = predix.admin.cf.spaces.Space()
res = admin.create_space(unique_name)
space = predix.admin.cf.spaces.Space(
guid=res['metadata']['guid'],
name=res['entity']['name'])
space.target()
return space |
Get the marketplace services.
def _get_spaces(self):
"""
Get the marketplace services.
"""
guid = self.api.config.get_organization_guid()
uri = '/v2/organizations/%s/spaces' % (guid)
return self.api.get(uri) |
Target the current space for any forthcoming Cloud Foundry
operations.
def target(self):
"""
Target the current space for any forthcoming Cloud Foundry
operations.
"""
# MAINT: I don't like this, but will deal later
os.environ['PREDIX_SPACE_GUID'] = self.guid
os.environ['PREDIX_SPACE_NAME'] = self.name
os.environ['PREDIX_ORGANIZATION_GUID'] = self.org.guid
os.environ['PREDIX_ORGANIZATION_NAME'] = self.org.name |
Return a flat list of the names for spaces in the organization.
def get_spaces(self):
"""
Return a flat list of the names for spaces in the organization.
"""
self.spaces = []
for resource in self._get_spaces()['resources']:
self.spaces.append(resource['entity']['name'])
return self.spaces |
Create a new space with the given name in the current target
organization.
def create_space(self, space_name, add_users=True):
"""
Create a new space with the given name in the current target
organization.
"""
body = {
'name': space_name,
'organization_guid': self.api.config.get_organization_guid()
}
# MAINT: may need to do this more generally later
if add_users:
space_users = []
org_users = self.org.get_users()
for org_user in org_users['resources']:
guid = org_user['metadata']['guid']
space_users.append(guid)
body['manager_guids'] = space_users
body['developer_guids'] = space_users
return self.api.post('/v2/spaces', body) |
Delete the current space, or a space with the given name
or guid.
def delete_space(self, name=None, guid=None):
"""
Delete the current space, or a space with the given name
or guid.
"""
if not guid:
if name:
spaces = self._get_spaces()
for space in spaces['resources']:
if space['entity']['name'] == name:
guid = space['metadata']['guid']
break
if not guid:
raise ValueError("Space with name %s not found." % (name))
else:
guid = self.guid
logging.warning("Deleting space (%s) and all services." % (guid))
return self.api.delete("/v2/spaces/%s" % (guid), params={'recursive':
'true'}) |
Returns a flat list of the service names available
from the marketplace for this space.
def get_services(self):
"""
Returns a flat list of the service names available
from the marketplace for this space.
"""
services = []
for resource in self._get_services()['resources']:
services.append(resource['entity']['label'])
return services |
Returns the service instances activated in this space.
def _get_instances(self, page_number=None):
"""
Returns the service instances activated in this space.
"""
instances = []
uri = '/v2/spaces/%s/service_instances' % self.guid
json_response = self.api.get(uri)
instances += json_response['resources']
while json_response['next_url'] is not None:
json_response = self.api.get(json_response['next_url'])
instances += json_response['resources']
return instances |
Returns a flat list of the names of services created
in this space.
def get_instances(self):
"""
Returns a flat list of the names of services created
in this space.
"""
services = []
for resource in self._get_instances():
services.append(resource['entity']['name'])
return services |
Tests whether a service instance exists for the given
service.
def has_service_of_type(self, service_type):
"""
Tests whether a service instance exists for the given
service.
"""
summary = self.get_space_summary()
for instance in summary['services']:
if 'service_plan' in instance:
if service_type == instance['service_plan']['service']['label']:
return True
return False |
Remove all services and apps from the space.
Will leave the space itself, call delete_space() if you
want to remove that too.
Similar to `cf delete-space -f <space-name>`.
def purge(self):
"""
Remove all services and apps from the space.
Will leave the space itself, call delete_space() if you
want to remove that too.
Similar to `cf delete-space -f <space-name>`.
"""
logging.warning("Purging all services from space %s" %
(self.name))
service = predix.admin.cf.services.Service()
for service_name in self.get_instances():
service.purge(service_name)
apps = predix.admin.cf.apps.App()
for app_name in self.get_apps():
apps.delete_app(app_name) |
Create an instance of the PostgreSQL service with the typical starting
settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
:param allocated_storage: int for GBs to be allocated for storage
:param encryption_at_rest: boolean for encrypting data that is stored
:param restore_to_time: UTC date within recovery period for db
backup to be used when initiating
def create(self, max_wait=300, allocated_storage=None,
encryption_at_rest=None, restore_to_time=None, **kwargs):
"""
Create an instance of the PostgreSQL service with the typical starting
settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
:param allocated_storage: int for GBs to be allocated for storage
:param encryption_at_rest: boolean for encrypting data that is stored
:param restore_to_time: UTC date within recovery period for db
backup to be used when initiating
"""
# MAINT: Add these if there is demand for it and validated
if allocated_storage or encryption_at_rest or restore_to_time:
raise NotImplementedError()
# Will need to wait for the service to be provisioned before can add
# service keys and get env details.
self.service.create(async=True, create_keys=False)
while self._create_in_progress() and max_wait > 0:
if max_wait % 5 == 0:
logging.warning('Can take {}s for create to finish.'.format(max_wait))
time.sleep(1)
max_wait -= 1
# Now get the service env (via service keys)
cfg = self.service._get_service_config()
self.service.settings.save(cfg)
hostname = predix.config.get_env_key(self.use_class, 'hostname')
os.environ[hostname] = self.service.settings.data['hostname']
password = predix.config.get_env_key(self.use_class, 'password')
os.environ[password] = self.service.settings.data['password']
port = predix.config.get_env_key(self.use_class, 'port')
os.environ[port] = str(self.service.settings.data['port'])
username = predix.config.get_env_key(self.use_class, 'username')
os.environ[username] = self.service.settings.data['username']
uri = predix.config.get_env_key(self.use_class, 'uri')
os.environ[uri] = self.service.settings.data['uri'] |
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
"""
manifest.add_service(self.service.name)
hostname = predix.config.get_env_key(self.use_class, 'hostname')
manifest.add_env_var(hostname, self.service.settings.data['hostname'])
password = predix.config.get_env_key(self.use_class, 'password')
manifest.add_env_var(password, self.service.settings.data['password'])
port = predix.config.get_env_key(self.use_class, 'port')
manifest.add_env_var(port, self.service.settings.data['port'])
username = predix.config.get_env_key(self.use_class, 'username')
manifest.add_env_var(username, self.service.settings.data['username'])
uri = predix.config.get_env_key(self.use_class, 'uri')
manifest.add_env_var(uri, self.service.settings.data['uri'])
manifest.write_manifest() |
Create an instance of the Analytics Framework Service with the
typical starting settings.
If not provided, will reuse the runtime client for the ui
as well.
def create(self, asset, timeseries, client_id, client_secret,
ui_client_id=None, ui_client_secret=None):
"""
Create an instance of the Analytics Framework Service with the
typical starting settings.
If not provided, will reuse the runtime client for the ui
as well.
"""
assert isinstance(asset, predix.admin.asset.Asset), \
"Require an existing predix.admin.asset.Asset instance"
assert isinstance(timeseries, predix.admin.timeseries.TimeSeries), \
"Require an existing predix.admin.timeseries.TimeSeries instance"
parameters = {
'predixAssetZoneId': asset.get_zone_id(),
'predixTimeseriesZoneId': timeseries.get_query_zone_id(),
'runtimeClientId': client_id,
'runtimeClientSecret': client_secret,
'uiClientId': ui_client_id or client_id,
'uiClientSecret': ui_client_secret or client_secret,
'uiDomainPrefix': self.service.name,
}
self.service.create(parameters=parameters) |
For cloud operations there is support for multiple pools of resources
dedicated to logstash. The service name as a result follows the
pattern logstash-{n} where n is some number. We can find it from the
service marketplace.
def _find_service_name(self):
"""
For cloud operations there is support for multiple pools of resources
dedicated to logstash. The service name as a result follows the
pattern logstash-{n} where n is some number. We can find it from the
service marketplace.
"""
space = predix.admin.cf.spaces.Space()
services = space.get_services()
for service in services:
if service.startswith('logstash'):
return service
return 'logstash-3' |
Add to the manifest to make sure it is bound to the
application.
def add_to_manifest(self, manifest):
"""
Add to the manifest to make sure it is bound to the
application.
"""
manifest.add_service(self.service.name)
manifest.write_manifest() |
Returns the Predix Zone Id for the service that is a required
header in service calls.
def _get_zone_id(self):
"""
Returns the Predix Zone Id for the service that is a required
header in service calls.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_asset = services['predix-asset'][0]['credentials']
return predix_asset['zone']['http-header-value']
else:
return predix.config.get_env_value(self, 'zone_id') |
Returns a flat list of the names of collections in the asset
service.
..
['wind-turbines', 'jet-engines']
def get_collections(self):
"""
Returns a flat list of the names of collections in the asset
service.
..
['wind-turbines', 'jet-engines']
"""
collections = []
for result in self._get_collections():
collections.append(result['collection'])
return collections |
Returns a specific collection from the asset service with
the given collection endpoint.
Supports passing through parameters such as...
- filters such as "name=Vesuvius" following GEL spec
- fields such as "uri,description" comma delimited
- page_size such as "100" (the default)
def get_collection(self, collection, filter=None, fields=None,
page_size=None):
"""
Returns a specific collection from the asset service with
the given collection endpoint.
Supports passing through parameters such as...
- filters such as "name=Vesuvius" following GEL spec
- fields such as "uri,description" comma delimited
- page_size such as "100" (the default)
"""
params = {}
if filter:
params['filter'] = filter
if fields:
params['fields'] = fields
if page_size:
params['pageSize'] = page_size
uri = self.uri + '/v1' + collection
return self.service._get(uri, params=params) |
Returns a new guid for use in posting a new asset to a collection.
def create_guid(self, collection=None):
"""
Returns a new guid for use in posting a new asset to a collection.
"""
guid = str(uuid.uuid4())
if collection:
return str.join('/', [collection, guid])
else:
return guid |
Creates a new collection. This is mostly just transport layer
and passes collection and body along. It presumes the body
already has generated.
The collection is *not* expected to have the id.
def post_collection(self, collection, body):
"""
Creates a new collection. This is mostly just transport layer
and passes collection and body along. It presumes the body
already has generated.
The collection is *not* expected to have the id.
"""
assert isinstance(body, (list)), "POST requires body to be a list"
assert collection.startswith('/'), "Collections must start with /"
uri = self.uri + '/v1' + collection
return self.service._post(uri, body) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.