text stringlengths 81 112k |
|---|
Updates an existing collection.
The collection being updated *is* expected to include the id.
def put_collection(self, collection, body):
"""
Updates an existing collection.
The collection being updated *is* expected to include the id.
"""
uri = self.uri + '/v1' + collection
return self.service._put(uri, body) |
Deletes an existing collection.
The collection being updated *is* expected to include the id.
def delete_collection(self, collection):
"""
Deletes an existing collection.
The collection being updated *is* expected to include the id.
"""
uri = str.join('/', [self.uri, collection])
return self.service._delete(uri) |
Will make specific updates to a record based on JSON Patch
documentation.
https://tools.ietf.org/html/rfc6902
the format of changes is something like::
[{
'op': 'add',
'path': '/newfield',
'value': 'just added'
}]
def patch_collection(self, collection, changes):
"""
Will make specific updates to a record based on JSON Patch
documentation.
https://tools.ietf.org/html/rfc6902
the format of changes is something like::
[{
'op': 'add',
'path': '/newfield',
'value': 'just added'
}]
"""
uri = str.join('/', [self.uri, collection])
return self.service._patch(uri, changes) |
Save an asset collection to the service.
def save(self, collection):
"""
Save an asset collection to the service.
"""
assert isinstance(collection, predix.data.asset.AssetCollection), "Expected AssetCollection"
collection.validate()
self.put_collection(collection.uri, collection.__dict__) |
Populate a manifest file generated from details from the
cloud foundry space environment.
def create_manifest_from_space(self):
"""
Populate a manifest file generated from details from the
cloud foundry space environment.
"""
space = predix.admin.cf.spaces.Space()
summary = space.get_space_summary()
for instance in summary['services']:
service_type = instance['service_plan']['service']['label']
name = instance['name']
if service_type in self.supported:
service = self.supported[service_type](name=name)
service.add_to_manifest(self)
elif service_type == 'us-weather-forecast':
weather = predix.admin.weather.WeatherForecast(name=name)
weather.add_to_manifest(self)
else:
logging.warning("Unsupported service type: %s" % service_type) |
Lock the manifest to the current organization and space regardless of
Cloud Foundry target.
def lock_to_org_space(self):
"""
Lock the manifest to the current organization and space regardless of
Cloud Foundry target.
"""
self.add_env_var('PREDIX_ORGANIZATION_GUID', self.space.org.guid)
self.add_env_var('PREDIX_ORGANIZATION_NAME', self.space.org.name)
self.add_env_var('PREDIX_SPACE_GUID', self.space.guid)
self.add_env_var('PREDIX_SPACE_NAME', self.space.name)
self.write_manifest() |
Creates an instance of UAA Service.
:param admin_secret: The secret password for administering the service
such as adding clients and users.
def create_uaa(self, admin_secret, **kwargs):
"""
Creates an instance of UAA Service.
:param admin_secret: The secret password for administering the service
such as adding clients and users.
"""
uaa = predix.admin.uaa.UserAccountAuthentication(**kwargs)
if not uaa.exists():
uaa.create(admin_secret, **kwargs)
uaa.add_to_manifest(self)
return uaa |
Create a client and add it to the manifest.
:param client_id: The client id used to authenticate as a client
in UAA.
:param client_secret: The secret password used by a client to
authenticate and generate a UAA token.
:param uaa: The UAA to create client with
def create_client(self, client_id=None, client_secret=None, uaa=None):
"""
Create a client and add it to the manifest.
:param client_id: The client id used to authenticate as a client
in UAA.
:param client_secret: The secret password used by a client to
authenticate and generate a UAA token.
:param uaa: The UAA to create client with
"""
if not uaa:
uaa = predix.admin.uaa.UserAccountAuthentication()
# Client id and secret can be generated if not provided as arguments
if not client_id:
client_id = uaa._create_id()
if not client_secret:
client_secret = uaa._create_secret()
uaa.create_client(client_id, client_secret)
uaa.add_client_to_manifest(client_id, client_secret, self) |
Creates an instance of the Time Series Service.
def create_timeseries(self, **kwargs):
"""
Creates an instance of the Time Series Service.
"""
ts = predix.admin.timeseries.TimeSeries(**kwargs)
ts.create()
client_id = self.get_client_id()
if client_id:
ts.grant_client(client_id)
ts.add_to_manifest(self)
return ts |
Creates an instance of the Asset Service.
def create_asset(self, **kwargs):
"""
Creates an instance of the Asset Service.
"""
asset = predix.admin.asset.Asset(**kwargs)
asset.create()
client_id = self.get_client_id()
if client_id:
asset.grant_client(client_id)
asset.add_to_manifest(self)
return asset |
Creates an instance of the Asset Service.
def create_acs(self, **kwargs):
"""
Creates an instance of the Asset Service.
"""
acs = predix.admin.acs.AccessControl(**kwargs)
acs.create()
client_id = self.get_client_id()
if client_id:
acs.grant_client(client_id)
acs.grant_client(client_id)
acs.add_to_manifest(self)
return acs |
Creates an instance of the Asset Service.
def create_weather(self, **kwargs):
"""
Creates an instance of the Asset Service.
"""
weather = predix.admin.weather.WeatherForecast(**kwargs)
weather.create()
client_id = self.get_client_id()
if client_id:
weather.grant_client(client_id)
weather.grant_client(client_id)
weather.add_to_manifest(self)
return weather |
Creates an instance of the BlobStore Service.
def create_blobstore(self, **kwargs):
"""
Creates an instance of the BlobStore Service.
"""
blobstore = predix.admin.blobstore.BlobStore(**kwargs)
blobstore.create()
blobstore.add_to_manifest(self)
return blobstore |
Creates an instance of the Logging Service.
def create_logstash(self, **kwargs):
"""
Creates an instance of the Logging Service.
"""
logstash = predix.admin.logstash.Logging(**kwargs)
logstash.create()
logstash.add_to_manifest(self)
logging.info('Install Kibana-Me-Logs application by following GitHub instructions')
logging.info('git clone https://github.com/cloudfoundry-community/kibana-me-logs.git')
return logstash |
Creates an instance of the Cache Service.
def create_cache(self, **kwargs):
"""
Creates an instance of the Cache Service.
"""
cache = predix.admin.cache.Cache(**kwargs)
cache.create(**kwargs)
cache.add_to_manifest(self)
return cache |
todo make it so the client can be customised to publish/subscribe
Creates an instance of eventhub service
def create_eventhub(self, **kwargs):
"""
todo make it so the client can be customised to publish/subscribe
Creates an instance of eventhub service
"""
eventhub = predix.admin.eventhub.EventHub(**kwargs)
eventhub.create()
eventhub.add_to_manifest(self)
eventhub.grant_client(client_id=self.get_client_id(), **kwargs)
eventhub.add_to_manifest(self)
return eventhub |
Returns a list of service names. Can return all services, just
those supported by PredixPy, or just those not yet supported by
PredixPy.
:param available: Return the services that are
available in PredixPy. (Defaults to True)
:param unavailable: Return the services that are not yet
supported by PredixPy. (Defaults to False)
:param deprecated: Return the services that are
supported by PredixPy but no longer available. (True)
def get_service_marketplace(self, available=True, unavailable=False,
deprecated=False):
"""
Returns a list of service names. Can return all services, just
those supported by PredixPy, or just those not yet supported by
PredixPy.
:param available: Return the services that are
available in PredixPy. (Defaults to True)
:param unavailable: Return the services that are not yet
supported by PredixPy. (Defaults to False)
:param deprecated: Return the services that are
supported by PredixPy but no longer available. (True)
"""
supported = set(self.supported.keys())
all_services = set(self.space.get_services())
results = set()
if available:
results.update(supported)
if unavailable:
results.update(all_services.difference(supported))
if deprecated:
results.update(supported.difference(all_services))
return list(results) |
If we are in an app context we can authenticate immediately.
def _auto_authenticate(self):
"""
If we are in an app context we can authenticate immediately.
"""
client_id = predix.config.get_env_value(predix.app.Manifest, 'client_id')
client_secret = predix.config.get_env_value(predix.app.Manifest, 'client_secret')
if client_id and client_secret:
logging.info("Automatically authenticated as %s" % (client_id))
self.uaa.authenticate(client_id, client_secret) |
Simple GET request for a given path.
def _get(self, uri, params=None, headers=None):
"""
Simple GET request for a given path.
"""
if not headers:
headers = self._get_headers()
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.get(uri, headers=headers, params=params)
logging.debug("STATUS=" + str(response.status_code))
if response.status_code == 200:
return response.json()
else:
logging.error(b"ERROR=" + response.content)
response.raise_for_status() |
Simple POST request for a given path.
def _post(self, uri, data):
"""
Simple POST request for a given path.
"""
headers = self._get_headers()
logging.debug("URI=" + str(uri))
logging.debug("BODY=" + json.dumps(data))
response = self.session.post(uri, headers=headers,
data=json.dumps(data))
if response.status_code in [200, 204]:
try:
return response.json()
except ValueError:
return "{}"
else:
logging.error(response.content)
response.raise_for_status() |
Simple PUT operation for a given path.
def _put(self, uri, data):
"""
Simple PUT operation for a given path.
"""
headers = self._get_headers()
logging.debug("URI=" + str(uri))
logging.debug("BODY=" + json.dumps(data))
response = self.session.put(uri, headers=headers,
data=json.dumps(data))
if response.status_code in [201, 204]:
return data
else:
logging.error(response.content)
response.raise_for_status() |
Simple DELETE operation for a given path.
def _delete(self, uri):
"""
Simple DELETE operation for a given path.
"""
headers = self._get_headers()
response = self.session.delete(uri, headers=headers)
# Will return a 204 on successful delete
if response.status_code == 204:
return response
else:
logging.error(response.content)
response.raise_for_status() |
Simple PATCH operation for a given path.
The body is expected to list operations to perform to update
the data. Operations include:
- add
- remove
- replace
- move
- copy
- test
[
{ "op": "test", "path": "/a/b/c", "value": "foo" },
]
def _patch(self, uri, data):
"""
Simple PATCH operation for a given path.
The body is expected to list operations to perform to update
the data. Operations include:
- add
- remove
- replace
- move
- copy
- test
[
{ "op": "test", "path": "/a/b/c", "value": "foo" },
]
"""
headers = self._get_headers()
response = self.session.patch(uri, headers=headers,
data=json.dumps(data))
# Will return a 204 on successful patch
if response.status_code == 204:
return response
else:
logging.error(response.content)
response.raise_for_status() |
Returns the full path that uniquely identifies
the resource endpoint.
def _get_resource_uri(self, guid=None):
"""
Returns the full path that uniquely identifies
the resource endpoint.
"""
uri = self.uri + '/v1/resource'
if guid:
uri += '/' + urllib.quote_plus(guid)
return uri |
Returns a specific resource by resource id.
def get_resource(self, resource_id):
"""
Returns a specific resource by resource id.
"""
# resource_id could be a path such as '/asset/123' so quote
uri = self._get_resource_uri(guid=resource_id)
return self.service._get(uri) |
Create new resources and associated attributes.
Example:
acs.post_resource([
{
"resourceIdentifier": "masaya",
"parents": [],
"attributes": [
{
"issuer": "default",
"name": "country",
"value": "Nicaragua"
}
],
}
])
The issuer is effectively a namespace, and in policy evaluations you
identify an attribute by a specific namespace. Many examples provide
a URL but it could be any arbitrary string.
The body is a list, so many resources can be added at the same time.
def _post_resource(self, body):
"""
Create new resources and associated attributes.
Example:
acs.post_resource([
{
"resourceIdentifier": "masaya",
"parents": [],
"attributes": [
{
"issuer": "default",
"name": "country",
"value": "Nicaragua"
}
],
}
])
The issuer is effectively a namespace, and in policy evaluations you
identify an attribute by a specific namespace. Many examples provide
a URL but it could be any arbitrary string.
The body is a list, so many resources can be added at the same time.
"""
assert isinstance(body, (list)), "POST for requires body to be a list"
uri = self._get_resource_uri()
return self.service._post(uri, body) |
Remove a specific resource by its identifier.
def delete_resource(self, resource_id):
"""
Remove a specific resource by its identifier.
"""
# resource_id could be a path such as '/asset/123' so quote
uri = self._get_resource_uri(guid=resource_id)
return self.service._delete(uri) |
Update a resource for the given resource id. The body is not
a list but a dictionary of a single resource.
def _put_resource(self, resource_id, body):
"""
Update a resource for the given resource id. The body is not
a list but a dictionary of a single resource.
"""
assert isinstance(body, (dict)), "PUT requires body to be a dict."
# resource_id could be a path such as '/asset/123' so quote
uri = self._get_resource_uri(guid=resource_id)
return self.service._put(uri, body) |
Will add the given resource with a given identifier and attribute
dictionary.
example/
add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'})
def add_resource(self, resource_id, attributes, parents=[],
issuer='default'):
"""
Will add the given resource with a given identifier and attribute
dictionary.
example/
add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'})
"""
# MAINT: consider test to avoid adding duplicate resource id
assert isinstance(attributes, (dict)), "attributes expected to be dict"
attrs = []
for key in attributes.keys():
attrs.append({
'issuer': issuer,
'name': key,
'value': attributes[key]
})
body = {
"resourceIdentifier": resource_id,
"parents": parents,
"attributes": attrs,
}
return self._put_resource(resource_id, body) |
Returns the full path that uniquely identifies
the subject endpoint.
def _get_subject_uri(self, guid=None):
"""
Returns the full path that uniquely identifies
the subject endpoint.
"""
uri = self.uri + '/v1/subject'
if guid:
uri += '/' + urllib.quote_plus(guid)
return uri |
Returns a specific subject by subject id.
def get_subject(self, subject_id):
"""
Returns a specific subject by subject id.
"""
# subject_id could be a path such as '/user/j12y' so quote
uri = self._get_subject_uri(guid=subject_id)
return self.service._get(uri) |
Create new subjects and associated attributes.
Example:
acs.post_subject([
{
"subjectIdentifier": "/role/evangelist",
"parents": [],
"attributes": [
{
"issuer": "default",
"name": "role",
"value": "developer evangelist",
}
]
}
])
The issuer is effectively a namespace, and in policy evaluations
you identify an attribute by a specific namespace. Many examples
provide a URL but it could be any arbitrary string.
The body is a list, so many subjects can be added at the same time.
def _post_subject(self, body):
"""
Create new subjects and associated attributes.
Example:
acs.post_subject([
{
"subjectIdentifier": "/role/evangelist",
"parents": [],
"attributes": [
{
"issuer": "default",
"name": "role",
"value": "developer evangelist",
}
]
}
])
The issuer is effectively a namespace, and in policy evaluations
you identify an attribute by a specific namespace. Many examples
provide a URL but it could be any arbitrary string.
The body is a list, so many subjects can be added at the same time.
"""
assert isinstance(body, (list)), "POST requires body to be a list"
uri = self._get_subject_uri()
return self.service._post(uri, body) |
Remove a specific subject by its identifier.
def delete_subject(self, subject_id):
"""
Remove a specific subject by its identifier.
"""
# subject_id could be a path such as '/role/analyst' so quote
uri = self._get_subject_uri(guid=subject_id)
return self.service._delete(uri) |
Update a subject for the given subject id. The body is not
a list but a dictionary of a single resource.
def _put_subject(self, subject_id, body):
"""
Update a subject for the given subject id. The body is not
a list but a dictionary of a single resource.
"""
assert isinstance(body, (dict)), "PUT requires body to be dict."
# subject_id could be a path such as '/asset/123' so quote
uri = self._get_subject_uri(guid=subject_id)
return self.service._put(uri, body) |
Will add the given subject with a given identifier and attribute
dictionary.
example/
add_subject('/user/j12y', {'username': 'j12y'})
def add_subject(self, subject_id, attributes, parents=[],
issuer='default'):
"""
Will add the given subject with a given identifier and attribute
dictionary.
example/
add_subject('/user/j12y', {'username': 'j12y'})
"""
# MAINT: consider test to avoid adding duplicate subject id
assert isinstance(attributes, (dict)), "attributes expected to be dict"
attrs = []
for key in attributes.keys():
attrs.append({
'issuer': issuer,
'name': key,
'value': attributes[key]
})
body = {
"subjectIdentifier": subject_id,
"parents": parents,
"attributes": attrs,
}
return self._put_subject(subject_id, body) |
Tests whether or not the ACS service being monitored is alive.
def _get_monitoring_heartbeat(self):
"""
Tests whether or not the ACS service being monitored is alive.
"""
target = self.uri + '/monitoring/heartbeat'
response = self.session.get(target)
return response |
Will test whether the ACS service is up and alive.
def is_alive(self):
"""
Will test whether the ACS service is up and alive.
"""
response = self.get_monitoring_heartbeat()
if response.status_code == 200 and response.content == 'alive':
return True
return False |
Returns the full path that uniquely identifies
the subject endpoint.
def _get_policy_set_uri(self, guid=None):
"""
Returns the full path that uniquely identifies
the subject endpoint.
"""
uri = self.uri + '/v1/policy-set'
if guid:
uri += '/' + urllib.quote_plus(guid)
return uri |
Will create or update a policy set for the given path.
def _put_policy_set(self, policy_set_id, body):
"""
Will create or update a policy set for the given path.
"""
assert isinstance(body, (dict)), "PUT requires body to be a dict."
uri = self._get_policy_set_uri(guid=policy_set_id)
return self.service._put(uri, body) |
Get a specific policy set by id.
def _get_policy_set(self, policy_set_id):
"""
Get a specific policy set by id.
"""
uri = self._get_policy_set_uri(guid=policy_set_id)
return self.service._get(uri) |
Delete a specific policy set by id. Method is idempotent.
def delete_policy_set(self, policy_set_id):
"""
Delete a specific policy set by id. Method is idempotent.
"""
uri = self._get_policy_set_uri(guid=policy_set_id)
return self.service._delete(uri) |
Will create a new policy set to enforce the given policy details.
The name is just a helpful descriptor for the policy.
The action maps to a HTTP verb.
Policies are evaluated against resources and subjects. They are
identified by matching a uriTemplate or attributes.
Examples::
resource = {
"uriTemplate": "/asset/{id}"
}
subject: {
"attributes": [{
"issuer": "default",
"name": "role"
}]
}
The condition is expected to be a string that defines a groovy
operation that can be evaluated.
Examples::
condition = "match.single(subject.attributes('default', 'role'),
'admin')
def add_policy(self, name, action, resource, subject, condition,
policy_set_id=None, effect='PERMIT'):
"""
Will create a new policy set to enforce the given policy details.
The name is just a helpful descriptor for the policy.
The action maps to a HTTP verb.
Policies are evaluated against resources and subjects. They are
identified by matching a uriTemplate or attributes.
Examples::
resource = {
"uriTemplate": "/asset/{id}"
}
subject: {
"attributes": [{
"issuer": "default",
"name": "role"
}]
}
The condition is expected to be a string that defines a groovy
operation that can be evaluated.
Examples::
condition = "match.single(subject.attributes('default', 'role'),
'admin')
"""
# If not given a policy set id will generate one
if not policy_set_id:
policy_set_id = str(uuid.uuid4())
# Only a few operations / actions are supported in policy definitions
if action not in ['GET', 'PUT', 'POST', 'DELETE']:
raise ValueError("Invalid action")
# Defines a single policy to be part of the policy set.
policy = {
"name": name,
"target": {
"resource": resource,
"subject": subject,
"action": action,
},
"conditions": [{
"name": "",
"condition": condition,
}],
"effect": effect,
}
# Body of the request is a list of policies
body = {
"name": policy_set_id,
"policies": [policy],
}
result = self._put_policy_set(policy_set_id, body)
return result |
Evaluate a policy-set against a subject and resource.
example/
is_allowed('/user/j12y', 'GET', '/asset/12')
def is_allowed(self, subject_id, action, resource_id, policy_sets=[]):
"""
Evaluate a policy-set against a subject and resource.
example/
is_allowed('/user/j12y', 'GET', '/asset/12')
"""
body = {
"action": action,
"subjectIdentifier": subject_id,
"resourceIdentifier": resource_id,
}
if policy_sets:
body['policySetsEvaluationOrder'] = policy_sets
# Will return a 200 with decision
uri = self.uri + '/v1/policy-evaluation'
logging.debug("URI=" + str(uri))
logging.debug("BODY=" + str(body))
response = self.service._post(uri, body)
if 'effect' in response:
if response['effect'] in ['NOT_APPLICABLE', 'PERMIT']:
return True
return False |
Main download function
def download(url, path=None, headers=None, session=None, show_progress=True,
resume=True, auto_retry=True, max_rst_retries=5,
pass_through_opts=None, cainfo=None, user_agent=None, auth=None):
"""Main download function"""
hm = Homura(url, path, headers, session, show_progress, resume,
auto_retry, max_rst_retries, pass_through_opts, cainfo,
user_agent, auth)
hm.start() |
Fill in the path of the PEM file containing the CA certificate.
The priority is: 1. user provided path, 2. path to the cacert.pem
bundle provided by certifi (if installed), 3. let pycurl use the
system path where libcurl's cacert bundle is assumed to be stored,
as established at libcurl build time.
def _fill_in_cainfo(self):
"""Fill in the path of the PEM file containing the CA certificate.
The priority is: 1. user provided path, 2. path to the cacert.pem
bundle provided by certifi (if installed), 3. let pycurl use the
system path where libcurl's cacert bundle is assumed to be stored,
as established at libcurl build time.
"""
if self.cainfo:
cainfo = self.cainfo
else:
try:
cainfo = certifi.where()
except AttributeError:
cainfo = None
if cainfo:
self._pycurl.setopt(pycurl.CAINFO, cainfo) |
Sending a single cURL request to download
def curl(self):
"""Sending a single cURL request to download"""
c = self._pycurl
# Resume download
if os.path.exists(self.path) and self.resume:
mode = 'ab'
self.downloaded = os.path.getsize(self.path)
c.setopt(pycurl.RESUME_FROM, self.downloaded)
else:
mode = 'wb'
with open(self.path, mode) as f:
c.setopt(c.URL, utf8_encode(self.url))
if self.auth:
c.setopt(c.USERPWD, '%s:%s' % self.auth)
c.setopt(c.USERAGENT, self._user_agent)
c.setopt(c.WRITEDATA, f)
h = self._get_pycurl_headers()
if h is not None:
c.setopt(pycurl.HTTPHEADER, h)
c.setopt(c.NOPROGRESS, 0)
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(c.PROGRESSFUNCTION, self.progress)
self._fill_in_cainfo()
if self._pass_through_opts:
for key, value in self._pass_through_opts.items():
c.setopt(key, value)
c.perform() |
Start downloading, handling auto retry, download resume and path
moving
def start(self):
"""
Start downloading, handling auto retry, download resume and path
moving
"""
if not self.auto_retry:
self.curl()
return
while not self.is_finished:
try:
self.curl()
except pycurl.error as e:
# transfer closed with n bytes remaining to read
if e.args[0] == pycurl.E_PARTIAL_FILE:
pass
# HTTP server doesn't seem to support byte ranges.
# Cannot resume.
elif e.args[0] == pycurl.E_HTTP_RANGE_ERROR:
break
# Recv failure: Connection reset by peer
elif e.args[0] == pycurl.E_RECV_ERROR:
if self._rst_retries < self.max_rst_retries:
pass
else:
raise e
self._rst_retries += 1
else:
raise e
self._move_path()
self._done() |
Move the downloaded file to the authentic path (identified by
effective URL)
def _move_path(self):
"""
Move the downloaded file to the authentic path (identified by
effective URL)
"""
if is_temp_path(self._path) and self._pycurl is not None:
eurl = self._pycurl.getinfo(pycurl.EFFECTIVE_URL)
er = get_resource_name(eurl)
r = get_resource_name(self.url)
if er != r and os.path.exists(self.path):
new_path = self._get_path(self._path, eurl)
shutil.move(self.path, new_path)
self.path = new_path |
Create a new instance of the UAA service. Requires a
secret password for the 'admin' user account.
def create(self, secret, **kwargs):
"""
Create a new instance of the UAA service. Requires a
secret password for the 'admin' user account.
"""
parameters = {"adminClientSecret": secret}
self.service.create(parameters=parameters)
# Store URI into environment variable
predix.config.set_env_value(self.use_class, 'uri', self._get_uri())
# Once we create it login
self.authenticate() |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variable to manifest
varname = predix.config.set_env_value(self.use_class, 'uri',
self._get_uri())
manifest.add_env_var(varname, self._get_uri())
manifest.write_manifest() |
Authenticate into the UAA instance as the admin user.
def authenticate(self):
"""
Authenticate into the UAA instance as the admin user.
"""
# Make sure we've stored uri for use
predix.config.set_env_value(self.use_class, 'uri', self._get_uri())
self.uaac = predix.security.uaa.UserAccountAuthentication()
self.uaac.authenticate('admin', self._get_admin_secret(),
use_cache=False)
self.is_admin = True |
Use a cryptograhically-secure Pseudorandom number generator for picking
a combination of letters, digits, and punctuation to be our secret.
:param length: how long to make the secret (12 seems ok most of the time)
def _create_secret(self, length=12):
"""
Use a cryptograhically-secure Pseudorandom number generator for picking
a combination of letters, digits, and punctuation to be our secret.
:param length: how long to make the secret (12 seems ok most of the time)
"""
# Charset will have 64 +- characters
charset = string.digits + string.ascii_letters + '+-'
return "".join(random.SystemRandom().choice(charset) for _ in
range(length)) |
Create a new client for use by applications.
def create_client(self, client_id, client_secret):
"""
Create a new client for use by applications.
"""
assert self.is_admin, "Must authenticate() as admin to create client"
return self.uaac.create_client(client_id, client_secret) |
Add the client credentials to the specified manifest.
def add_client_to_manifest(self, client_id, client_secret, manifest):
"""
Add the client credentials to the specified manifest.
"""
assert self.is_admin, "Must authenticate() as admin to create client"
return self.uaac.add_client_to_manifest(client_id, client_secret,
manifest) |
Returns the URI endpoint for an instance of a UAA
service instance from environment inspection.
def _get_uaa_uri(self):
"""
Returns the URI endpoint for an instance of a UAA
service instance from environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_uaa = services['predix-uaa'][0]['credentials']
return predix_uaa['uri']
else:
return predix.config.get_env_value(self, 'uri') |
Returns response of authenticating with the given client and
secret.
def _authenticate_client(self, client, secret):
"""
Returns response of authenticating with the given client and
secret.
"""
client_s = str.join(':', [client, secret])
credentials = base64.b64encode(client_s.encode('utf-8')).decode('utf-8')
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Cache-Control': 'no-cache',
'Authorization': 'Basic ' + credentials
}
params = {
'client_id': client,
'grant_type': 'client_credentials'
}
uri = self.uri + '/oauth/token'
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(params))
response = requests.post(uri, headers=headers, params=params)
if response.status_code == 200:
logging.debug("RESPONSE=" + str(response.json()))
return response.json()
else:
logging.warning("Failed to authenticate as %s" % (client))
response.raise_for_status() |
Returns the response of authenticating with the given
user and password.
def _authenticate_user(self, user, password):
"""
Returns the response of authenticating with the given
user and password.
"""
headers = self._get_headers()
params = {
'username': user,
'password': password,
'grant_type': 'password',
}
uri = self.uri + '/oauth/token'
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(params))
response = requests.post(uri, headers=headers, params=params)
if response.status_code == 200:
logging.debug("RESPONSE=" + str(response.json()))
return response.json()
else:
logging.warning("Failed to authenticate %s" % (user))
response.raise_for_status() |
For a given client will test whether or not the token
has expired.
This is for testing a client object and does not look up
from client_id. You can use _get_client_from_cache() to
lookup a client from client_id.
def is_expired_token(self, client):
"""
For a given client will test whether or not the token
has expired.
This is for testing a client object and does not look up
from client_id. You can use _get_client_from_cache() to
lookup a client from client_id.
"""
if 'expires' not in client:
return True
expires = dateutil.parser.parse(client['expires'])
if expires < datetime.datetime.now():
return True
return False |
If we don't yet have a uaa cache we need to
initialize it. As there may be more than one
UAA instance we index by issuer and then store
any clients, users, etc.
def _initialize_uaa_cache(self):
"""
If we don't yet have a uaa cache we need to
initialize it. As there may be more than one
UAA instance we index by issuer and then store
any clients, users, etc.
"""
try:
os.makedirs(os.path.dirname(self._cache_path))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
data = {}
data[self.uri] = []
return data |
Read cache of UAA client/user details.
def _read_uaa_cache(self):
"""
Read cache of UAA client/user details.
"""
self._cache_path = os.path.expanduser('~/.predix/uaa.json')
if not os.path.exists(self._cache_path):
return self._initialize_uaa_cache()
with open(self._cache_path, 'r') as data:
return json.load(data) |
For the given client_id return what is
cached.
def _get_client_from_cache(self, client_id):
"""
For the given client_id return what is
cached.
"""
data = self._read_uaa_cache()
# Only if we've cached any for this issuer
if self.uri not in data:
return
for client in data[self.uri]:
if client['id'] == client_id:
return client |
Cache the client details into a cached file on disk.
def _write_to_uaa_cache(self, new_item):
"""
Cache the client details into a cached file on disk.
"""
data = self._read_uaa_cache()
# Initialize client list if first time
if self.uri not in data:
data[self.uri] = []
# Remove existing client record and any expired tokens
for client in data[self.uri]:
if new_item['id'] == client['id']:
data[self.uri].remove(client)
continue
# May have old tokens laying around to be cleaned up
if 'expires' in client:
expires = dateutil.parser.parse(client['expires'])
if expires < datetime.datetime.now():
data[self.uri].remove(client)
continue
data[self.uri].append(new_item)
with open(self._cache_path, 'w') as output:
output.write(json.dumps(data, sort_keys=True, indent=4)) |
Authenticate the given client against UAA. The resulting token
will be cached for reuse.
def authenticate(self, client_id, client_secret, use_cache=True):
"""
Authenticate the given client against UAA. The resulting token
will be cached for reuse.
"""
# We will reuse a token for as long as we have one cached
# and it hasn't expired.
if use_cache:
client = self._get_client_from_cache(client_id)
if (client) and (not self.is_expired_token(client)):
self.authenticated = True
self.client = client
return
# Let's authenticate the client
client = {
'id': client_id,
'secret': client_secret
}
res = self._authenticate_client(client_id, client_secret)
client.update(res)
expires = datetime.datetime.now() + \
datetime.timedelta(seconds=res['expires_in'])
client['expires'] = expires.isoformat()
# Cache it for repeated use until expired
self._write_to_uaa_cache(client)
self.client = client
self.authenticated = True |
Log currently authenticated user out, invalidating any existing tokens.
def logout(self):
"""
Log currently authenticated user out, invalidating any existing tokens.
"""
# Remove token from local cache
# MAINT: need to expire token on server
data = self._read_uaa_cache()
if self.uri in data:
for client in data[self.uri]:
if client['id'] == self.client['id']:
data[self.uri].remove(client)
with open(self._cache_path, 'w') as output:
output.write(json.dumps(data, sort_keys=True, indent=4)) |
Simple POST request for a given uri path.
def _post(self, uri, data, headers=None):
"""
Simple POST request for a given uri path.
"""
if not headers:
headers = self._get_headers()
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(data))
response = self.session.post(uri, headers=headers,
data=json.dumps(data))
logging.debug("STATUS=" + str(response.status_code))
if response.status_code in [200, 201]:
return response.json()
else:
logging.error(b"ERROR=" + response.content)
response.raise_for_status() |
Returns the bare access token for the authorized client.
def get_token(self):
"""
Returns the bare access token for the authorized client.
"""
if not self.authenticated:
raise ValueError("Must authenticate() as a client first.")
# If token has expired we'll need to refresh and get a new
# client credential
if self.is_expired_token(self.client):
logging.info("client token expired, will need to refresh token")
self.authenticate(self.client['id'], self.client['secret'],
use_cache=False)
return self.client['access_token'] |
Returns the scopes for the authenticated client.
def get_scopes(self):
"""
Returns the scopes for the authenticated client.
"""
if not self.authenticated:
raise ValueError("Must authenticate() as a client first.")
scope = self.client['scope']
return scope.split() |
Warn that the required scope is not found in the scopes
granted to the currently authenticated user.
::
# The admin user should have client admin permissions
uaa.assert_has_permission('admin', 'clients.admin')
def assert_has_permission(self, scope_required):
"""
Warn that the required scope is not found in the scopes
granted to the currently authenticated user.
::
# The admin user should have client admin permissions
uaa.assert_has_permission('admin', 'clients.admin')
"""
if not self.authenticated:
raise ValueError("Must first authenticate()")
if scope_required not in self.get_scopes():
logging.warning("Authenticated as %s" % (self.client['id']))
logging.warning("Have scopes: %s" % (str.join(',', self.get_scopes())))
logging.warning("Insufficient scope %s for operation" % (scope_required))
raise ValueError("Client does not have permission.")
return True |
Grant the given client_id permissions for managing clients.
- clients.admin: super user scope to create, modify, delete
- clients.write: scope ot create and modify clients
- clients.read: scope to read info about clients
- clients.secret: scope to change password of a client
def grant_client_permissions(self, client_id, admin=False, write=False,
read=False, secret=False):
"""
Grant the given client_id permissions for managing clients.
- clients.admin: super user scope to create, modify, delete
- clients.write: scope ot create and modify clients
- clients.read: scope to read info about clients
- clients.secret: scope to change password of a client
"""
self.assert_has_permission('clients.admin')
perms = []
if admin:
perms.append('clients.admin')
if write or admin:
perms.append('clients.write')
if read or admin:
perms.append('clients.read')
if secret or admin:
perms.append('clients.secret')
if perms:
self.update_client_grants(client_id, scope=perms,
authorities=perms) |
Returns the clients stored in the instance of UAA.
def get_clients(self):
"""
Returns the clients stored in the instance of UAA.
"""
self.assert_has_permission('clients.read')
uri = self.uri + '/oauth/clients'
headers = self.get_authorization_headers()
response = requests.get(uri, headers=headers)
return response.json()['resources'] |
Returns details about a specific client by the client_id.
def get_client(self, client_id):
"""
Returns details about a specific client by the client_id.
"""
self.assert_has_permission('clients.read')
uri = self.uri + '/oauth/clients/' + client_id
headers = self.get_authorization_headers()
response = requests.get(uri, headers=headers)
if response.status_code == 200:
return response.json()
else:
# Not found but don't raise
return |
Will extend the client with additional scopes or
authorities. Any existing scopes and authorities will be left
as is unless asked to replace entirely.
def update_client_grants(self, client_id, scope=[], authorities=[],
grant_types=[], redirect_uri=[], replace=False):
"""
Will extend the client with additional scopes or
authorities. Any existing scopes and authorities will be left
as is unless asked to replace entirely.
"""
self.assert_has_permission('clients.write')
client = self.get_client(client_id)
if not client:
raise ValueError("Must first create client: '%s'" % (client_id))
if replace:
changes = {
'client_id': client_id,
'scope': scope,
'authorities': authorities,
}
else:
changes = {'client_id': client_id}
if scope:
changes['scope'] = client['scope']
changes['scope'].extend(scope)
if authorities:
changes['authorities'] = client['authorities']
changes['authorities'].extend(authorities)
if grant_types:
if 'authorization_code' in grant_types and not redirect_uri:
logging.warning("A redirect_uri is required for authorization_code.")
changes['authorized_grant_types'] = client['authorized_grant_types']
changes['authorized_grant_types'].extend(grant_types)
if redirect_uri:
if 'redirect_uri' in client:
changes['redirect_uri'] = client['redirect_uri']
changes['redirect_uri'].extend(redirect_uri)
else:
changes['redirect_uri'] = redirect_uri
uri = self.uri + '/oauth/clients/' + client_id
headers = {
"pragma": "no-cache",
"Cache-Control": "no-cache",
"Content-Type": "application/json",
"Accepts": "application/json",
"Authorization": "Bearer " + self.get_token()
}
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + json.dumps(changes))
response = requests.put(uri, headers=headers, data=json.dumps(changes))
logging.debug("STATUS=" + str(response.status_code))
if response.status_code == 200:
return response
else:
logging.error(response.content)
response.raise_for_status() |
Grant the given client_id permissions for managing users. System
for Cross-domain Identity Management (SCIM) are required for accessing
/Users and /Groups endpoints of UAA.
- scim.read: scope for read access to all SCIM endpoints
- scim.write: scope for write access to all SCIM endpoints
- scim.create: scope to create/invite users and verify an account only
- scim.userids: scope for id and username+origin conversion
- scim.zones: scope for group management of users only
- scim.invite: scope to participate in invitations
- openid: scope to access /userinfo
def grant_scim_permissions(self, client_id, read=False, write=False,
create=False, userids=False, zones=False, invite=False,
openid=False):
"""
Grant the given client_id permissions for managing users. System
for Cross-domain Identity Management (SCIM) are required for accessing
/Users and /Groups endpoints of UAA.
- scim.read: scope for read access to all SCIM endpoints
- scim.write: scope for write access to all SCIM endpoints
- scim.create: scope to create/invite users and verify an account only
- scim.userids: scope for id and username+origin conversion
- scim.zones: scope for group management of users only
- scim.invite: scope to participate in invitations
- openid: scope to access /userinfo
"""
self.assert_has_permission('clients.admin')
perms = []
if read:
perms.append('scim.read')
if write:
perms.append('scim.write')
if create:
perms.append('scim.create')
if userids:
perms.append('scim.userids')
if zones:
perms.append('scim.zones')
if invite:
perms.append('scim.invite')
if openid:
perms.append('openid')
if perms:
self.update_client_grants(client_id, scope=perms, authorities=perms) |
Will create a new client for your application use.
- client_credentials: allows client to get access token
- refresh_token: can be used to get new access token when expired
without re-authenticating
- authorization_code: redirection-based flow for user authentication
More details about Grant types:
- https://github.com/cloudfoundry/uaa/blob/master/docs/UAA-Security.md
- https://tools.ietf.org/html/rfc6749
A redirect_uri is required when using authorization_code. See:
https://www.predix.io/support/article/KB0013026
def create_client(self, client_id, client_secret, manifest=None,
client_credentials=True, refresh_token=True,
authorization_code=False, redirect_uri=[]):
"""
Will create a new client for your application use.
- client_credentials: allows client to get access token
- refresh_token: can be used to get new access token when expired
without re-authenticating
- authorization_code: redirection-based flow for user authentication
More details about Grant types:
- https://github.com/cloudfoundry/uaa/blob/master/docs/UAA-Security.md
- https://tools.ietf.org/html/rfc6749
A redirect_uri is required when using authorization_code. See:
https://www.predix.io/support/article/KB0013026
"""
self.assert_has_permission('clients.admin')
if authorization_code and not redirect_uri:
raise ValueError("Must provide a redirect_uri for clients used with authorization_code")
# Check if client already exists
client = self.get_client(client_id)
if client:
return client
uri = self.uri + '/oauth/clients'
headers = {
"pragma": "no-cache",
"Cache-Control": "no-cache",
"Content-Type": "application/json",
"Accepts": "application/json",
"Authorization": "Bearer " + self.get_token()
}
grant_types = []
if client_credentials:
grant_types.append('client_credentials')
if refresh_token:
grant_types.append('refresh_token')
if authorization_code:
grant_types.append('authorization_code')
params = {
"client_id": client_id,
"client_secret": client_secret,
"scope": ["uaa.none"],
"authorized_grant_types": grant_types,
"authorities": ["uaa.none"],
"autoapprove": []
}
if redirect_uri:
params.append(redirect_uri)
response = requests.post(uri, headers=headers, data=json.dumps(params))
if response.status_code == 201:
if manifest:
self.add_client_to_manifest(client_id, client_secret, manifest)
client = {
'id': client_id,
'secret': client_secret
}
self._write_to_uaa_cache(client)
return response
else:
logging.error(response.content)
response.raise_for_status() |
Add the given client / secret to the manifest for use in
the application.
def add_client_to_manifest(self, client_id, client_secret, manifest):
"""
Add the given client / secret to the manifest for use in
the application.
"""
client_id_key = 'PREDIX_APP_CLIENT_ID'
manifest.add_env_var(client_id_key, client_id)
client_secret_key = 'PREDIX_APP_CLIENT_SECRET'
manifest.add_env_var(client_secret_key, client_secret)
manifest.write_manifest() |
Creates a new user account with the required details.
::
create_user('j12y', 'my-secret', 'Delancey', 'Jayson', 'volcano@ge.com')
def create_user(self, username, password, family_name, given_name, primary_email,
details={}):
"""
Creates a new user account with the required details.
::
create_user('j12y', 'my-secret', 'Delancey', 'Jayson', 'volcano@ge.com')
"""
self.assert_has_permission('scim.write')
data = {
'userName': username,
'password': password,
'name': {
'familyName': family_name,
'givenName': given_name,
},
'emails': [{
'value': primary_email,
'primary': True,
}]
}
if details:
data.update(details)
return self._post_user(data) |
Delete user with given id.
def delete_user(self, id):
"""
Delete user with given id.
"""
self.assert_has_permission('scim.write')
uri = self.uri + '/Users/%s' % id
headers = self._get_headers()
logging.debug("URI=" + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.delete(uri, headers=headers)
logging.debug("STATUS=" + str(response.status_code))
if response.status_code == 200:
return response
else:
logging.error(response.content)
response.raise_for_status() |
Returns users accounts stored in UAA.
See https://docs.cloudfoundry.org/api/uaa/#list63
For filtering help, see:
http://www.simplecloud.info/specs/draft-scim-api-01.html#query-resources
def get_users(self, filter=None, sortBy=None, sortOrder=None,
startIndex=None, count=None):
"""
Returns users accounts stored in UAA.
See https://docs.cloudfoundry.org/api/uaa/#list63
For filtering help, see:
http://www.simplecloud.info/specs/draft-scim-api-01.html#query-resources
"""
self.assert_has_permission('scim.read')
params = {}
if filter:
params['filter'] = filter
if sortBy:
params['sortBy'] = sortBy
if sortOrder:
params['sortOrder'] = sortOrder
if startIndex:
params['startIndex'] = startIndex
if count:
params['count'] = count
return self._get(self.uri + '/Users', params=params) |
Returns details for user of the given username.
If there is more than one match will only return the first. Use
get_users() for full result set.
def get_user_by_username(self, username):
"""
Returns details for user of the given username.
If there is more than one match will only return the first. Use
get_users() for full result set.
"""
results = self.get_users(filter='username eq "%s"' % (username))
if results['totalResults'] == 0:
logging.warning("Found no matches for given username.")
return
elif results['totalResults'] > 1:
logging.warning("Found %s matches for username %s" %
(results['totalResults'], username))
return results['resources'][0] |
Returns details for user with the given email address.
If there is more than one match will only return the first. Use
get_users() for full result set.
def get_user_by_email(self, email):
"""
Returns details for user with the given email address.
If there is more than one match will only return the first. Use
get_users() for full result set.
"""
results = self.get_users(filter='email eq "%s"' % (email))
if results['totalResults'] == 0:
logging.warning("Found no matches for given email.")
return
elif results['totalResults'] > 1:
logging.warning("Found %s matches for email %s" %
(results['totalResults'], email))
return results['resources'][0] |
Returns details about the user for the given id.
Use get_user_by_email() or get_user_by_username() for help
identifiying the id.
def get_user(self, id):
"""
Returns details about the user for the given id.
Use get_user_by_email() or get_user_by_username() for help
identifiying the id.
"""
self.assert_has_permission('scim.read')
return self._get(self.uri + '/Users/%s' % (id)) |
Create an instance of the Time Series Service with the typical
starting settings.
def create(self):
"""
Create an instance of the Time Series Service with the typical
starting settings.
"""
self.service.create()
predix.config.set_env_value(self.use_class, 'ingest_uri',
self.get_ingest_uri())
predix.config.set_env_value(self.use_class, 'ingest_zone_id',
self.get_ingest_zone_id())
predix.config.set_env_value(self.use_class, 'query_uri',
self.get_query_uri())
predix.config.set_env_value(self.use_class, 'query_zone_id',
self.get_query_zone_id()) |
Grant the given client id all the scopes and authorities
needed to work with the timeseries service.
def grant_client(self, client_id, read=True, write=True):
"""
Grant the given client id all the scopes and authorities
needed to work with the timeseries service.
"""
scopes = ['openid']
authorities = ['uaa.resource']
if write:
for zone in self.service.settings.data['ingest']['zone-token-scopes']:
scopes.append(zone)
authorities.append(zone)
if read:
for zone in self.service.settings.data['query']['zone-token-scopes']:
scopes.append(zone)
authorities.append(zone)
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) |
Return the uri used for queries on time series data.
def get_query_uri(self):
"""
Return the uri used for queries on time series data.
"""
# Query URI has extra path we don't want so strip it off here
query_uri = self.service.settings.data['query']['uri']
query_uri = urlparse(query_uri)
return query_uri.scheme + '://' + query_uri.netloc |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
uri = predix.config.get_env_key(self.use_class, 'ingest_uri')
manifest.add_env_var(uri, self.get_ingest_uri())
zone_id = predix.config.get_env_key(self.use_class, 'ingest_zone_id')
manifest.add_env_var(zone_id, self.get_ingest_zone_id())
uri = predix.config.get_env_key(self.use_class, 'query_uri')
manifest.add_env_var(uri, self.get_query_uri())
zone_id = predix.config.get_env_key(self.use_class, 'query_zone_id')
manifest.add_env_var(zone_id, self.get_query_zone_id())
manifest.write_manifest() |
This method tries to determine the requirements of a particular project
by inspecting the possible places that they could be defined.
It will attempt, in order:
1) to parse setup.py in the root for an install_requires value
2) to read a requirements.txt file or a requirements.pip in the root
3) to read all .txt files in a folder called 'requirements' in the root
4) to read files matching "*requirements*.txt" and "*reqs*.txt" in the root,
excluding any starting or ending with 'test'
If one of these succeeds, then a list of pkg_resources.Requirement's
will be returned. If none can be found, then a RequirementsNotFound
will be raised
def find_requirements(path):
"""
This method tries to determine the requirements of a particular project
by inspecting the possible places that they could be defined.
It will attempt, in order:
1) to parse setup.py in the root for an install_requires value
2) to read a requirements.txt file or a requirements.pip in the root
3) to read all .txt files in a folder called 'requirements' in the root
4) to read files matching "*requirements*.txt" and "*reqs*.txt" in the root,
excluding any starting or ending with 'test'
If one of these succeeds, then a list of pkg_resources.Requirement's
will be returned. If none can be found, then a RequirementsNotFound
will be raised
"""
requirements = []
setup_py = os.path.join(path, 'setup.py')
if os.path.exists(setup_py) and os.path.isfile(setup_py):
try:
requirements = from_setup_py(setup_py)
requirements.sort()
return requirements
except CouldNotParseRequirements:
pass
for reqfile_name in ('requirements.txt', 'requirements.pip'):
reqfile_path = os.path.join(path, reqfile_name)
if os.path.exists(reqfile_path) and os.path.isfile(reqfile_path):
try:
requirements += from_requirements_txt(reqfile_path)
except CouldNotParseRequirements as e:
pass
requirements_dir = os.path.join(path, 'requirements')
if os.path.exists(requirements_dir) and os.path.isdir(requirements_dir):
from_dir = from_requirements_dir(requirements_dir)
if from_dir is not None:
requirements += from_dir
from_blob = from_requirements_blob(path)
if from_blob is not None:
requirements += from_blob
requirements = list(set(requirements))
if len(requirements) > 0:
requirements.sort()
return requirements
raise RequirementsNotFound |
Returns the GUID for the app instance with
the given name.
def get_app_guid(self, app_name):
"""
Returns the GUID for the app instance with
the given name.
"""
summary = self.space.get_space_summary()
for app in summary['apps']:
if app['name'] == app_name:
return app['guid'] |
Delete the given app.
Will fail intentionally if there are any service
bindings. You must delete those first.
def delete_app(self, app_name):
"""
Delete the given app.
Will fail intentionally if there are any service
bindings. You must delete those first.
"""
if app_name not in self.space.get_apps():
logging.warning("App not found so... succeeded?")
return True
guid = self.get_app_guid(app_name)
self.api.delete("/v2/apps/%s" % (guid)) |
Reads in config file of UAA credential information
or generates one as a side-effect if not yet
initialized.
def _get_service_config(self):
"""
Reads in config file of UAA credential information
or generates one as a side-effect if not yet
initialized.
"""
# Should work for windows, osx, and linux environments
if not os.path.exists(self.config_path):
try:
os.makedirs(os.path.dirname(self.config_path))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
return {}
with open(self.config_path, 'r') as data:
return json.load(data) |
Will write the config out to disk.
def _write_service_config(self):
"""
Will write the config out to disk.
"""
with open(self.config_path, 'w') as output:
output.write(json.dumps(self.data, sort_keys=True, indent=4)) |
Create an instance of the Blob Store Service with the typical
starting settings.
def create(self, **kwargs):
"""
Create an instance of the Blob Store Service with the typical
starting settings.
"""
self.service.create(**kwargs)
predix.config.set_env_value(self.use_class, 'url',
self.service.settings.data['url'])
predix.config.set_env_value(self.use_class, 'access_key_id',
self.service.settings.data['access_key_id'])
predix.config.set_env_value(self.use_class, 'bucket_name',
self.service.settings.data['bucket_name'])
predix.config.set_env_value(self.use_class, 'host',
self.service.settings.data['host'])
predix.config.set_env_value(self.use_class, 'secret_access_key',
self.service.settings.data['secret_access_key']) |
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to the list of services
manifest.add_service(self.service.name)
# Add environment variables
url = predix.config.get_env_key(self.use_class, 'url')
manifest.add_env_var(url, self.service.settings.data['url'])
akid = predix.config.get_env_key(self.use_class, 'access_key_id')
manifest.add_env_var(akid, self.service.settings.data['access_key_id'])
bucket = predix.config.get_env_key(self.use_class, 'bucket_name')
manifest.add_env_var(bucket, self.service.settings.data['bucket_name'])
host = predix.config.get_env_key(self.use_class, 'host')
manifest.add_env_var(host, self.service.settings.data['host'])
secret_access_key = predix.config.get_env_key(self.use_class, 'secret_access_key')
manifest.add_env_var(secret_access_key, self.service.settings.data['secret_access_key'])
manifest.write_manifest() |
return a generator for all subscribe messages
:return: None
def subscribe(self):
"""
return a generator for all subscribe messages
:return: None
"""
while self.run_subscribe_generator:
if len(self._rx_messages) != 0:
yield self._rx_messages.pop(0)
return |
send acks to the service
:param message: EventHub_pb2.Message
:return: None
def send_acks(self, message):
"""
send acks to the service
:param message: EventHub_pb2.Message
:return: None
"""
if isinstance(message, EventHub_pb2.Message):
ack = EventHub_pb2.Ack(partition=message.partition, offset=message.offset)
self.grpc_manager.send_message(EventHub_pb2.SubscriptionResponse(ack=ack))
elif isinstance(message, EventHub_pb2.SubscriptionMessage):
acks = []
for m in message.messages:
acks.append(EventHub_pb2.Ack(parition=m.partition, offset=m.offset))
self.grpc_manager.send_message(EventHub_pb2.SubscriptionAcks(ack=acks)) |
generate the subscribe stub headers based on the supplied config
:return: i
def _generate_subscribe_headers(self):
"""
generate the subscribe stub headers based on the supplied config
:return: i
"""
headers =[]
headers.append(('predix-zone-id', self.eventhub_client.zone_id))
token = self.eventhub_client.service._get_bearer_token()
headers.append(('subscribername', self._config.subscriber_name))
headers.append(('authorization', token[(token.index(' ') + 1):]))
if self._config.topics is []:
headers.append(('topic', self.eventhub_client.zone_id + '_topic'))
else:
for topic in self._config.topics:
headers.append(('topic', topic))
headers.append(('offset-newest', str(self._config.recency == self._config.Recency.NEWEST).lower()))
headers.append(('acks', str(self._config.acks_enabled).lower()))
if self._config.acks_enabled:
headers.append(('max-retries', str(self._config.ack_max_retries)))
headers.append(('retry-interval', str(self._config.ack_retry_interval_seconds) + 's'))
headers.append(('duration-before-retry', str(self._config.ack_duration_before_retry_seconds) + 's'))
if self._config.batching_enabled:
headers.append(('batch-size', str(self._config.batch_size)))
headers.append(('batch-interval', str(self._config.batch_interval_millis) + 'ms'))
return headers |
Returns the raw results of an asset search for a given bounding
box.
def _get_assets(self, bbox, size=None, page=None, asset_type=None,
device_type=None, event_type=None, media_type=None):
"""
Returns the raw results of an asset search for a given bounding
box.
"""
uri = self.uri + '/v1/assets/search'
headers = self._get_headers()
params = {
'bbox': bbox,
}
# Query parameters
params['q'] = []
if device_type:
if isinstance(device_type, str):
device_type = [device_type]
for device in device_type:
if device not in self.DEVICE_TYPES:
logging.warning("Invalid device type: %s" % device)
params['q'].append("device-type:%s" % device)
if asset_type:
if isinstance(asset_type, str):
asset_type = [asset_type]
for asset in asset_type:
if asset not in self.ASSET_TYPES:
logging.warning("Invalid asset type: %s" % asset)
params['q'].append("assetType:%s" % asset)
if media_type:
if isinstance(media_type, str):
media_type = [media_type]
for media in media_type:
if media not in self.MEDIA_TYPES:
logging.warning("Invalid media type: %s" % media)
params['q'].append("mediaType:%s" % media)
if event_type:
if isinstance(event_type, str):
event_type = [event_type]
for event in event_type:
if event not in self.EVENT_TYPES:
logging.warning("Invalid event type: %s" % event)
params['q'].append("eventTypes:%s" % event)
# Pagination parameters
if size:
params['size'] = size
if page:
params['page'] = page
return self.service._get(uri, params=params, headers=headers) |
Query the assets stored in the intelligent environment for a given
bounding box and query.
Assets can be filtered by type of asset, event, or media available.
- device_type=['DATASIM']
- asset_type=['CAMERA']
- event_type=['PKIN']
- media_type=['IMAGE']
Pagination can be controlled with keyword parameters
- page=2
- size=100
Returns a list of assets stored in a dictionary that describe their:
- asset-id
- device-type
- device-id
- media-type
- coordinates
- event-type
Additionally there are some _links for additional information.
def get_assets(self, bbox, **kwargs):
"""
Query the assets stored in the intelligent environment for a given
bounding box and query.
Assets can be filtered by type of asset, event, or media available.
- device_type=['DATASIM']
- asset_type=['CAMERA']
- event_type=['PKIN']
- media_type=['IMAGE']
Pagination can be controlled with keyword parameters
- page=2
- size=100
Returns a list of assets stored in a dictionary that describe their:
- asset-id
- device-type
- device-id
- media-type
- coordinates
- event-type
Additionally there are some _links for additional information.
"""
response = self._get_assets(bbox, **kwargs)
# Remove broken HATEOAS _links but identify asset uid first
assets = []
for asset in response['_embedded']['assets']:
asset_url = asset['_links']['self']
uid = asset_url['href'].split('/')[-1]
asset['uid'] = uid
del(asset['_links'])
assets.append(asset)
return assets |
Returns raw response for an given asset by its unique id.
def _get_asset(self, asset_uid):
"""
Returns raw response for an given asset by its unique id.
"""
uri = self.uri + '/v2/assets/' + asset_uid
headers = self._get_headers()
return self.service._get(uri, headers=headers) |
Label input grid with hysteresis method.
Args:
input_grid: 2D array of values.
Returns:
Labeled output grid.
def label(self, input_grid):
"""
Label input grid with hysteresis method.
Args:
input_grid: 2D array of values.
Returns:
Labeled output grid.
"""
unset = 0
high_labels, num_labels = label(input_grid > self.high_thresh)
region_ranking = np.argsort(maximum(input_grid, high_labels, index=np.arange(1, num_labels + 1)))[::-1]
output_grid = np.zeros(input_grid.shape, dtype=int)
stack = []
for rank in region_ranking:
label_num = rank + 1
label_i, label_j = np.where(high_labels == label_num)
for i in range(label_i.size):
if output_grid[label_i[i], label_j[i]] == unset:
stack.append((label_i[i], label_j[i]))
while len(stack) > 0:
index = stack.pop()
output_grid[index] = label_num
for i in range(index[0] - 1, index[0] + 2):
for j in range(index[1] - 1, index[1] + 2):
if 0 <= i < output_grid.shape[0] and 0 <= j < output_grid.shape[1]:
if (input_grid[i, j] > self.low_thresh) and (output_grid[i, j] == unset):
stack.append((i, j))
return output_grid |
Remove labeled objects that do not meet size threshold criteria.
Args:
labeled_grid: 2D output from label method.
min_size: minimum size of object in pixels.
Returns:
labeled grid with smaller objects removed.
def size_filter(labeled_grid, min_size):
"""
Remove labeled objects that do not meet size threshold criteria.
Args:
labeled_grid: 2D output from label method.
min_size: minimum size of object in pixels.
Returns:
labeled grid with smaller objects removed.
"""
out_grid = np.zeros(labeled_grid.shape, dtype=int)
slices = find_objects(labeled_grid)
j = 1
for i, s in enumerate(slices):
box = labeled_grid[s]
size = np.count_nonzero(box.ravel() == (i + 1))
if size >= min_size and box.shape[0] > 1 and box.shape[1] > 1:
out_grid[np.where(labeled_grid == i + 1)] = j
j += 1
return out_grid |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.