text
stringlengths 29
850k
|
|---|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class SearchApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def search_account_entities(self, **kwargs): # noqa: E501
"""Search over a customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_account_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_account_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_account_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_account_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/account', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_account_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_account_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_account_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_account_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_account_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_account_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/account/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_account_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_account_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_account_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_account_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_account_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_account_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/account/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_deleted_entities(self, **kwargs): # noqa: E501
"""Search over a customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_deleted_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_alert_deleted_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_alert_deleted_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAlert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_deleted_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert/deleted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedAlert', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_deleted_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_alert_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_alert_deleted_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_deleted_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_alert_deleted_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert/deleted/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_deleted_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_alert_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_alert_deleted_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_deleted_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_deleted_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert/deleted/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAlertWithStats
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_alert_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_alert_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedAlertWithStats
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedAlertWithStats', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_alert_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_alert_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_alert_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_alert_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_alert_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_alert_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_alert_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_alert_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_alert_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/alert/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_deleted_entities(self, **kwargs): # noqa: E501
"""Search over a customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCloudIntegration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_deleted_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_deleted_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_cloud_integration_deleted_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCloudIntegration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_deleted_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration/deleted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedCloudIntegration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_deleted_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_cloud_integration_deleted_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_deleted_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_cloud_integration_deleted_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration/deleted/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_deleted_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_cloud_integration_deleted_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_deleted_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_deleted_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration/deleted/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCloudIntegration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_cloud_integration_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCloudIntegration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedCloudIntegration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_cloud_integration_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_cloud_integration_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_cloud_integration_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_cloud_integration_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_cloud_integration_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_cloud_integration_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted cloud integrations # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_cloud_integration_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_cloud_integration_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/cloudintegration/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_deleted_entities(self, **kwargs): # noqa: E501
"""Search over a customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_deleted_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_dashboard_deleted_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_dashboard_deleted_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_deleted_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard/deleted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedDashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_deleted_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_dashboard_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_dashboard_deleted_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_deleted_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_dashboard_deleted_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard/deleted/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_deleted_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_dashboard_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_dashboard_deleted_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_deleted_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_deleted_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard/deleted/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_dashboard_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_dashboard_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedDashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_dashboard_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_dashboard_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_dashboard_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_dashboard_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_dashboard_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_dashboard_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_dashboard_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_dashboard_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/dashboard/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_external_link_entities(self, **kwargs): # noqa: E501
"""Search over a customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_link_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedExternalLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_external_link_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_external_link_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_external_link_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_link_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedExternalLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_external_link_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/extlink', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedExternalLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_external_links_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_links_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_external_links_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_external_links_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_external_links_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_links_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_external_links_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_external_links_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/extlink/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_external_links_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_links_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_external_links_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_external_links_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_external_links_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's external links # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_external_links_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_external_links_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/extlink/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_ingestion_policy_entities(self, **kwargs): # noqa: E501
"""Search over a customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_ingestion_policy_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_ingestion_policy_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_ingestion_policy_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedIngestionPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_ingestion_policy_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/ingestionpolicy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedIngestionPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_ingestion_policy_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_ingestion_policy_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_ingestion_policy_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_ingestion_policy_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_ingestion_policy_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_ingestion_policy_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/ingestionpolicy/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_ingestion_policy_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_ingestion_policy_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_ingestion_policy_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_ingestion_policy_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's ingestion policies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ingestion_policy_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_ingestion_policy_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/ingestionpolicy/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_maintenance_window_entities(self, **kwargs): # noqa: E501
"""Search over a customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMaintenanceWindow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_maintenance_window_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_maintenance_window_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_maintenance_window_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMaintenanceWindow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_maintenance_window_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/maintenancewindow', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedMaintenanceWindow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_maintenance_window_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_maintenance_window_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_maintenance_window_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_maintenance_window_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_maintenance_window_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_maintenance_window_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/maintenancewindow/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_maintenance_window_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_maintenance_window_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_maintenance_window_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_maintenance_window_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's maintenance windows # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_maintenance_window_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_maintenance_window_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/maintenancewindow/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_application_entities(self, **kwargs): # noqa: E501
"""Search over all the customer's non-deleted monitored applications # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMonitoredApplicationDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_application_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_monitored_application_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_monitored_application_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over all the customer's non-deleted monitored applications # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMonitoredApplicationDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_application_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredapplication', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedMonitoredApplicationDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_application_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted monitored application # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_application_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_monitored_application_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_monitored_application_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted monitored application # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_application_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_monitored_application_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredapplication/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_application_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted monitored clusters # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_application_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_monitored_application_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_monitored_application_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted monitored clusters # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_application_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_application_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredapplication/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_service_entities(self, **kwargs): # noqa: E501
"""Search over all the customer's non-deleted monitored services # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMonitoredServiceDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_service_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_monitored_service_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_monitored_service_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over all the customer's non-deleted monitored services # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedMonitoredServiceDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_service_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredservice', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedMonitoredServiceDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_service_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted monitored application # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_service_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_monitored_service_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_monitored_service_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted monitored application # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_service_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_monitored_service_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredservice/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_monitored_service_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted monitored clusters # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_monitored_service_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_monitored_service_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_monitored_service_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted monitored clusters # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_monitored_service_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_monitored_service_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/monitoredservice/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_notficant_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notficant_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_notficant_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_notficant_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_notficant_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notficant_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_notficant_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/notificant/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_notificant_entities(self, **kwargs): # noqa: E501
"""Search over a customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notificant_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedNotificant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_notificant_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_notificant_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_notificant_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notificant_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedNotificant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_notificant_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/notificant', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedNotificant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_notificant_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notificant_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_notificant_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_notificant_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_notificant_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's notificants # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_notificant_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_notificant_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_notificant_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/notificant/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_deleted_entities(self, **kwargs): # noqa: E501
"""Search over a customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedProxy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_deleted_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_proxy_deleted_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_proxy_deleted_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedProxy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_deleted_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy/deleted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedProxy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_deleted_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_proxy_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_proxy_deleted_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_deleted_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_proxy_deleted_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy/deleted/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_deleted_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_proxy_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_proxy_deleted_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_deleted_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_deleted_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy/deleted/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedProxy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_proxy_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_proxy_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedProxy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedProxy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_proxy_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_proxy_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_proxy_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_proxy_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_proxy_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_proxy_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_proxy_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted proxies # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_proxy_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_proxy_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/proxy/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_deleted_entities(self, **kwargs): # noqa: E501
"""Search over a customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDerivedMetricDefinition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_deleted_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_registered_query_deleted_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_registered_query_deleted_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDerivedMetricDefinition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_deleted_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric/deleted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedDerivedMetricDefinition', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_deleted_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_registered_query_deleted_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_registered_query_deleted_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_deleted_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_registered_query_deleted_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric/deleted/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_deleted_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_registered_query_deleted_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_registered_query_deleted_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_deleted_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_deleted_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric/deleted/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDerivedMetricDefinitionWithStats
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_registered_query_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_registered_query_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedDerivedMetricDefinitionWithStats
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedDerivedMetricDefinitionWithStats', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_registered_query_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_registered_query_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's non-deleted derived metric definitions # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_registered_query_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_registered_query_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted derived metric definition # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_registered_query_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_registered_query_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_registered_query_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's non-deleted derived metric definition # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_registered_query_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_registered_query_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/derivedmetric/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_related_report_event_anomaly_entities(self, event_id, **kwargs): # noqa: E501
"""List the related events and anomalies over a firing event # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_related_report_event_anomaly_entities(event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str event_id: (required)
:param EventSearchRequest body:
:return: ResponseContainerPagedReportEventAnomalyDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_related_report_event_anomaly_entities_with_http_info(event_id, **kwargs) # noqa: E501
else:
(data) = self.search_related_report_event_anomaly_entities_with_http_info(event_id, **kwargs) # noqa: E501
return data
def search_related_report_event_anomaly_entities_with_http_info(self, event_id, **kwargs): # noqa: E501
"""List the related events and anomalies over a firing event # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_related_report_event_anomaly_entities_with_http_info(event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str event_id: (required)
:param EventSearchRequest body:
:return: ResponseContainerPagedReportEventAnomalyDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_related_report_event_anomaly_entities" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_id' is set
if ('event_id' not in params or
params['event_id'] is None):
raise ValueError("Missing the required parameter `event_id` when calling `search_related_report_event_anomaly_entities`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_id' in params:
path_params['eventId'] = params['event_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/event/related/{eventId}/withAnomalies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedReportEventAnomalyDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_related_report_event_entities(self, event_id, **kwargs): # noqa: E501
"""List the related events over a firing event # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_related_report_event_entities(event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str event_id: (required)
:param EventSearchRequest body:
:return: ResponseContainerPagedRelatedEvent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_related_report_event_entities_with_http_info(event_id, **kwargs) # noqa: E501
else:
(data) = self.search_related_report_event_entities_with_http_info(event_id, **kwargs) # noqa: E501
return data
def search_related_report_event_entities_with_http_info(self, event_id, **kwargs): # noqa: E501
"""List the related events over a firing event # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_related_report_event_entities_with_http_info(event_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str event_id: (required)
:param EventSearchRequest body:
:return: ResponseContainerPagedRelatedEvent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['event_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_related_report_event_entities" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'event_id' is set
if ('event_id' not in params or
params['event_id'] is None):
raise ValueError("Missing the required parameter `event_id` when calling `search_related_report_event_entities`") # noqa: E501
collection_formats = {}
path_params = {}
if 'event_id' in params:
path_params['eventId'] = params['event_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/event/related/{eventId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedRelatedEvent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_report_event_entities(self, **kwargs): # noqa: E501
"""Search over a customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EventSearchRequest body:
:return: ResponseContainerPagedEvent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_report_event_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_report_event_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_report_event_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EventSearchRequest body:
:return: ResponseContainerPagedEvent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_report_event_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/event', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedEvent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_report_event_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_report_event_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_report_event_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_report_event_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_report_event_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/event/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_report_event_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_report_event_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_report_event_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_report_event_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's events # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_report_event_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_report_event_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/event/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_role_entities(self, **kwargs): # noqa: E501
"""Search over a customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedRoleDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_role_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_role_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_role_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedRoleDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_role_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/role', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedRoleDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_role_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_role_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_role_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_role_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_role_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_role_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/role/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_role_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_role_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_role_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_role_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's roles # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_role_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_role_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/role/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_service_account_entities(self, **kwargs): # noqa: E501
"""Search over a customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_service_account_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_service_account_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_service_account_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_service_account_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/serviceaccount', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_service_account_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_service_account_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_service_account_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_service_account_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_service_account_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_service_account_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/serviceaccount/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_service_account_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_service_account_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_service_account_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_service_account_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_service_account_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_service_account_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/serviceaccount/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_tagged_source_entities(self, **kwargs): # noqa: E501
"""Search over a customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceSearchRequestContainer body:
:return: ResponseContainerPagedSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_tagged_source_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_tagged_source_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_tagged_source_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceSearchRequestContainer body:
:return: ResponseContainerPagedSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_tagged_source_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/source', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_tagged_source_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_tagged_source_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_tagged_source_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_tagged_source_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_tagged_source_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_tagged_source_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/source/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_tagged_source_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_tagged_source_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_tagged_source_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_tagged_source_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's sources # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_tagged_source_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_tagged_source_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/source/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_entities(self, **kwargs): # noqa: E501
"""Search over a customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCustomerFacingUserObject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_user_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_user_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedCustomerFacingUserObject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedCustomerFacingUserObject', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_user_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_user_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_user_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/user/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_user_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_user_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/user/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_group_entities(self, **kwargs): # noqa: E501
"""Search over a customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedUserGroupModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_group_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_user_group_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_user_group_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedUserGroupModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_group_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/usergroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedUserGroupModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_group_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_group_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_user_group_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_user_group_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_group_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_user_group_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/usergroup/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_group_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_user_group_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_user_group_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_user_group_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's user groups # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_user_group_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_group_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/usergroup/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_web_hook_entities(self, **kwargs): # noqa: E501
"""Search over a customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_web_hook_entities(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedNotificant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_web_hook_entities_with_http_info(**kwargs) # noqa: E501
return data
def search_web_hook_entities_with_http_info(self, **kwargs): # noqa: E501
"""Search over a customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_web_hook_entities_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param SortableSearchRequest body:
:return: ResponseContainerPagedNotificant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_web_hook_entities" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/webhook', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedNotificant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_web_hook_for_facet(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_web_hook_for_facet(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_web_hook_for_facet_with_http_info(facet, **kwargs) # noqa: E501
else:
(data) = self.search_web_hook_for_facet_with_http_info(facet, **kwargs) # noqa: E501
return data
def search_web_hook_for_facet_with_http_info(self, facet, **kwargs): # noqa: E501
"""Lists the values of a specific facet over the customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_web_hook_for_facet_with_http_info(facet, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str facet: (required)
:param FacetSearchRequestContainer body:
:return: ResponseContainerFacetResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['facet', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_web_hook_for_facet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'facet' is set
if ('facet' not in params or
params['facet'] is None):
raise ValueError("Missing the required parameter `facet` when calling `search_web_hook_for_facet`") # noqa: E501
collection_formats = {}
path_params = {}
if 'facet' in params:
path_params['facet'] = params['facet'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/webhook/{facet}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_webhook_for_facets(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_webhook_for_facets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_webhook_for_facets_with_http_info(**kwargs) # noqa: E501
return data
def search_webhook_for_facets_with_http_info(self, **kwargs): # noqa: E501
"""Lists the values of one or more facets over the customer's webhooks # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_webhook_for_facets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param FacetsSearchRequestContainer body:
:return: ResponseContainerFacetsResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_webhook_for_facets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/webhook/facets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerFacetsResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
Our US contributor, Thomas P. Turner, holds an airline transport pilot certificate with instructor, CFII and MEI ratings and a Masters Degree in Aviation Safety. He is a 2015 inductee to the Flight Instructor Hall of Fame. He has been lead instructor for Beechcraft’s Bonanza pilot training program; a production test pilot for engine modifications; an aviation insurance underwriter; a corporate pilot; a captain in the US Air Force, and the author of several general aviation pilot training books. He directs the education and safety arm of a 9000-member pilots’ organisation. Tom Turner lives in Wichita, Kansas, USA and is a regular visitor to Australia.
|
# -*- coding: utf-8 -*-
"""
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
import socket
import os
from .base import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='=SuS&AJ-F7KxV^lYdC^l%Uxn`CV{5<RHMAgwCA>J)xa:6O_Q@c')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ]
INSTALLED_APPS += ['debug_toolbar', ]
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', ]
# tricks to have debug toolbar when developing with docker
if os.environ.get('USE_DOCKER') == 'yes':
ip = socket.gethostbyname(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + '1']
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
|
How cats say, ‘I love you’ – Cinema Veterinary Centre - 661-253-9300- A full-service veterinary hospital servicing Valencia, Santa Clarita, Stevenson Ranch, Saugus, Castaic, Newhall and Canyon Country.
Because, common cats aren’t just going to outright say those three little words. Here are the telltale signs that your feline friend is giving you loving-eyes.
Cats demonstrate their devotion to their humans in several important and obvious ways, if you know what to look for. There is even some reason to think that we misunderstand cats when they are asking for attention and mistake this for a desire to be fed. Here are just a few of the silent but sure ways your cat tells you that /she loves you.
Territorial tenderness: Watch your cat move around in his/her territory (a.k.a., your house). He’ll walk with his tail straight up in the air, perpendicular to his body, perhaps with a slight kink at the tip. This upright tail is a sign of affection between cats and, now, between us and our cats. Similarly, cats who like each other a lot will rub on one another mostly using their faces. When your cat rubs his face against your ankles, he’s expressing the same level of affection for you that he has for a trusted cat. Given the choice, he’d rub against your face but you’re usually a bit too high up for that. We’re often irritated by cats who jump on the counter, but they’re often trying to get closer to us for a good face rub. They’ll rub their faces on inanimate things: doorways, chair legs or other furniture more often when you’re close by.
Friendly fondness: Cats who have been raised together have a few other ways of expressing their affection for each other. They may be found curled up together sleeping in a heap—even when it’s warm and they don’t need each other’s heat. So, when your cat hits your lap and curls up contentedly, you should know that this is love as well. The bond between cats is further strengthened by grooming each other. Of course, doing that keeps them cleaner than they might otherwise perhaps be. More important, though, it’s a mark of the power of friendship. Your stroking is the same powerful bonding. They love that you have focused your attention on them, moving your hands along their fur (particularly around their head and neck). It means that you two are family. Some cats will try to groom you back, but it may be annoying, given the barbs on their tongue that are helpful for efficient fur care. If you act as if it’s unpleasant, he’ll stop.
|
#!/usr/bin/env python3
import sys, os.path, os, time, stat, struct, ctypes, io, subprocess, math, random, difflib
import ply, re, traceback
import argparse, base64, json
from js_global import AbstractGlob
from js_cc import concat_smaps
#source map concatenator, for
#browsers that don't support
#index maps
class LocalGlob(AbstractGlob):
g_file = ""
g_outfile = ""
glob = LocalGlob()
def main():
cparse = argparse.ArgumentParser(add_help=False)
glob.add_args(cparse)
cparse.add_argument("--help", action="help", help="Print this message")
args = cparse.parse_args()
glob.parse_args(cparse, args)
glob.g_outfile = args.outfile
#test_regexpr()
#return 1
glob.g_file = args.infile
if args.infile == None:
print("js_smcat.py: no input files")
return -1
f = open(args.infile, "r")
files = f.readlines()
f.close()
for i in range(len(files)):
files[i] = os.path.abspath(os.path.normpath(files[i].strip()))
ret = json.dumps(concat_smaps(files))
f = open(args.outfile, "w")
f.write(ret)
f.close()
return 0
if __name__ == "__main__":
import io, traceback
try:
ret = main()
except SystemExit:
ret = -1
except:
traceback.print_stack()
traceback.print_exc()
ret = -1
sys.exit(ret)
|
In our age full of digital color photos, the simplest way to give a picture a strong contrast is through colors. When seeking high-contrast colors, you can take advantage of the primary color diagram, where complementary colors are located opposite each other. So for example the color that contrasts with red is sky blue.
A basic diagram of primary and complementary colors. This is a great tool to have around when you’re seeking high-contrast colors.
The red fern leaf contrasts well with the green leaves.
The abstract subject (the reflection of the yellow-orange building) contrasts with the cool shade of the car hood.
One very striking kind of contrast to use is when a colorful subject stands against a monochrome background. This kind of contrast is relatively difficult to find, and so sometimes selective desaturation in a photo editor is used to achieve it. And in fact, you can even find this kind of function in most of the digital cameras sold today.
But even though selective desaturation is fairly popular among amateurs, advanced photographers avoid it. This technique was popular in the 1990s, but today it’s practically taboo among professional photographers. But a colorful subject located naturally, in real life, against a monochrome background really is eye-pleasing. And actually, that’s also why gray backgrounds in studios are so universal.
An example of a colorful subject against a monochrome background (the sepia tinting was added in post-production).
The striking color contrast of the rusty steel piping and the chipped concrete wall makes for an interesting abstract composition.
But wait, what if you’re doing monochromatic (black-and-white) photography? There’s still a great kind of contrast to go after here, and that’s tonal contrast. After you’ve desaturated a color photo, its different colors no longer contrast—but their shades still do. Photographers who worked with actual black-and-white film had a keen ability to see the world tonally and were able to guess which objects would be dark in a final picture, and which ones light. They altered the tonalities of the things they photographed using color filters placed in front of the lens.
In the digital era monochrome pictures are, paradoxically, easier than ever to make. The ideal route here is to shoot to RAW and use a monochrome preview. That gives you a full-color image, while still showing the desaturated version of it on the camera display. You can later go on and apply a variety of digital color filters to the color RAW file to very precisely alter the tonality of individual elements in the picture.
Tonal contrast in a color photo. This photo is strongly underexposed, and so the illuminated clock really stands out against its dark background.
A graphic composition that makes use of tonal contrast—a bright subject on a dark background.
Photographs containing contrasts of meaning are very popular and can often be amusing—or can force you to think about unexpected connections. One frequent contrast of meaning are old vs. new. You’ll find it often in any city rich in history). You’ll also find it disguised as smooth (clean) vs. beaten-up. Fashion photographs of beautiful models and clothing in an industrial milieu are a good example of this. Some other good examples of contrasts of meaning are civilization in the wild and elements of nature in urban areas.
I enhanced the contrast of meaning here (baroque-era statues looking at a modern building) by converting the photo to black-and-white and applying selective coloring (warm vs. cool colors).
A classic contrast of old and new construction. Many such examples can be found in any city that’s rich in history.
Photos containing contrasts of meaning are more interesting for your audience, because these contrasts force them to think about the photo. These photos can contain an unexpected message or a joke. Tonal and color contrasts, meanwhile, make for pictures that are pleasing to the eye. These three types of contrast are not the only ones that exist, so think up and use your own types of contrast. Photographs containing contrasting shapes, for example, are also interesting!
|
#!/usr/bin/env python
import json
import sys
import os
from bottle import route, run, get
import time
import httplib
server = "127.0.0.1"
statport = "18082"
host = "%s:18001" % server
staturl = "http://%s:%s/status" % (server,statport)
blob = {"id": "foo", "url": staturl}
data = json.dumps(blob)
connection = httplib.HTTPConnection(host)
connection.request('POST', '/checks', data)
result = connection.getresponse()
print "RESULT: %s - %s" % (result.status, result.reason)
def usage():
print "%s [status: OK,Unknown,Warning,Critical]" % (sys.argv[0])
msgs = {
"OK": "Everything is groovy!",
"Unknown": "Unknown error!",
"Warning": "Houston, I think we have a problem.",
"Critical": "Danger Will Rogers! Danger!"
}
t = len(sys.argv)
if t < 2:
usage()
sys.exit(1)
else:
statusm = sys.argv[1]
t = time.localtime()
ts = time.strftime('%Y-%m-%dT%H:%M:%S%Z', t)
rootdir = "./"
# Change working directory so relative paths (and template lookup) work again
root = os.path.join(os.path.dirname(__file__))
sys.path.insert(0, root)
# generate nested python dictionaries, copied from here:
# http://stackoverflow.com/questions/635483/what-is-the-best-way-to-implement-nested-dictionaries-in-python
class AutoVivification(dict):
"""Implementation of perl's autovivification feature."""
def __getitem__(self, item):
try:
return dict.__getitem__(self, item)
except KeyError:
value = self[item] = type(self)()
return value
@get('/status')
def status():
data = AutoVivification()
data['id'] = "bar"
data['status'] = statusm
data['date'] = ts
data['message'] = msgs[statusm]
data['version'] = "1.0.0"
return data
run(host='localhost', port=statport, debug=True)
|
Today was the toughest workout yet on my current marathon training schedule. The plan called for 12 miles at sub 7:36 pace, which is actually faster than my previous half marathon PR from 2 years ago. Yes, it has been indeed two years since my last half marathon. I have been running at least one ultra marathon per month over the last two years, but road half marathons and marathons have been few and far between.
I put off the session until after work. I really didn't have much confidence in my ability to hold this pace. I hadn't worked on speed until the last 3 weeks and even though I broke my 5K PR, I felt I was a long way from my sub 3:15 marathon training goal pace. However, I didn't have a choice, there was no way to skip this one. At 5:15 PM I left my house with the goal to be back in 90 minutes. The weather was getting cooler by the minute but I knew I would be huffing and puffing very shortly, so I went with shorts and a short sleeve shirt. Turns out that was definitely the right decision. As soon as I hit the start button on my Garmin, I took off running. I didn't want to start off in a hole.
After one mile, I was right at 7:30 pace. I started my first loop around the UAH campus determined to get the 4 loops done as required, but bartering with myself that I could always cut it short if I felt tired or cold or "insert any reasonable or unreasonable excuse here". I continued to run and keep the pace as I entered my second loop. I kept doing the math. Alright, after this loop I would be halfway done, and so on.
I completed loop 3 without any problems and as I started loop 4, I actually felt pretty good. Don't get me wrong, I was tired, but I still had plenty of leg left. I was still in control of my breathing (although it had gotten just a little shorter) and my legs felt absolutely fine. So I decided to pick it up ever so slightly to see what would happen. When I made my final turn onto my street, I had lowered my average pace to around 7:26, still feeling good. When I finally arrived at my house, I had completed 11.5 miles in 1 hour 25 minutes, almost 15 seconds faster per mile than my current half marathon PR. With the Huntsville Half marathon on my training schedule for next weekend, I am now starting to believe that I night actually be capable of running the training plan required 6:58-7:18 pace (obviously at the slower end of this scale). Hopefully that's not too ambitious, but you gotta have goals, right?
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/framework/tensor_description.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2
from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2
from tensorflow.core.framework import allocation_description_pb2 as tensorflow_dot_core_dot_framework_dot_allocation__description__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/framework/tensor_description.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n2tensorflow/core/framework/tensor_description.proto\x12\ntensorflow\x1a%tensorflow/core/framework/types.proto\x1a,tensorflow/core/framework/tensor_shape.proto\x1a\x36tensorflow/core/framework/allocation_description.proto\"\xa8\x01\n\x11TensorDescription\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12+\n\x05shape\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12\x41\n\x16\x61llocation_description\x18\x04 \x01(\x0b\x32!.tensorflow.AllocationDescriptionB8\n\x18org.tensorflow.frameworkB\x17TensorDescriptionProtosP\x01\xf8\x01\x01\x62\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_allocation__description__pb2.DESCRIPTOR,])
_TENSORDESCRIPTION = _descriptor.Descriptor(
name='TensorDescription',
full_name='tensorflow.TensorDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dtype', full_name='tensorflow.TensorDescription.dtype', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shape', full_name='tensorflow.TensorDescription.shape', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='allocation_description', full_name='tensorflow.TensorDescription.allocation_description', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=376,
)
_TENSORDESCRIPTION.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE
_TENSORDESCRIPTION.fields_by_name['shape'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO
_TENSORDESCRIPTION.fields_by_name['allocation_description'].message_type = tensorflow_dot_core_dot_framework_dot_allocation__description__pb2._ALLOCATIONDESCRIPTION
DESCRIPTOR.message_types_by_name['TensorDescription'] = _TENSORDESCRIPTION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TensorDescription = _reflection.GeneratedProtocolMessageType('TensorDescription', (_message.Message,), dict(
DESCRIPTOR = _TENSORDESCRIPTION,
__module__ = 'tensorflow.core.framework.tensor_description_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.TensorDescription)
))
_sym_db.RegisterMessage(TensorDescription)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030org.tensorflow.frameworkB\027TensorDescriptionProtosP\001\370\001\001'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
|
Is it only the 10th of April? It already feels like it's been a really long month. Today, I'm writing about inspiration. I is for Inspiration, but can I digress for a minute or two? I know this is about the A to Z of Nanowrimo, but I have some other inspiration themed stuff to share.
I'll let you into a secret. When I started writing my A to Z Blogging Challenge posts, because I was doing two challenges on two sites, I had a preference. At the start, I preferred writing about my wish list. It was fun and let's be honest, it's basically just me writing about my daydreams. However, my site broke and I was affected by the whole Pipdig drama. If you haven't heard about that, feel free to type the word into a Twitter search and you'll see what's been going on. I had their themes on both my sites and was hosted by them over on Rock Paper Spirit, which in itself was a problem, but then my hosting plan just...broke! Suddenly, it became clear why I love Blogger so much as a blogging platform. It's so easy to use and easy to fix. Suddenly, I felt inspired to write on Blogger far more than I was inspired to write on my self-hosted Wordpress site.
So, that's all I have to say about inspiration for blogging and for this challenge. Let's crack on with the Nanowrimo inspiration.
It can be really hard to get inspired and it can be even harder to stay inspired. We've all been there. You start off all enthusiasm and caffeine-fuelled writing sprints. Then you get to about half-way through the challenge and your enthusiasm wanes. You push yourself to keep going but you find you have nothing to write about. Your story was finished a couple of thousand words ago and how did you ever think you were going to squeeze 50,000 words out of that plot?
I'm currently a creative writing student and one of the first things we learned as part of our degree course was how to get inspired. There were some tips and tricks they taught us, such as freewriting and clustering. Freewriting is something I'd like to write more about in the next few weeks, after this challenge is over, so you might be able to see a link over the word if you're reading it after April. If you haven't heard of this technique before, it's essentially just putting the pen to paper and letting your thoughts run out. You don't worry about grammar or coherence. You just let your imagination flow onto the page. The point is not to write something of publishable standard. The point is to write something. That's pretty much the whole point of Nanowrimo!
The other technique I mentioned is clustering, which again I would like to write about in more detail soon. It's like a spider diagram. You take a word or concept or idea and write it in the centre of the page. You draw lines out from there and put a word that links to that word on the end of each line. Then you do the same for that word. Before long, you're eight legs out on the diagram and you've come up with words, phrases or ideas that have no direct link to the original word and it can draw out some surprising links. If you haven't tried this as a technique for unblocking, I'd give it a go.
How do you deal with lack of inspiration?
Tomorrow I'm going to be writing about J for July, which might sound like it isn't linked to Nanowrimo, but I assure you, it is! Over on Rock Paper Spirit today I'm writing about Interlagos, which is something that will probably forever stay on the wish list since it's impossible for me to go there. Head over to find out why.
to exhibit that I have a very good uncanny feeling I came upon exactly what I needed.
|
from django.core.management.base import BaseCommand
from faceDB.face_db import FaceDB
from faceDB.face import FaceCluster
from faceDB.util import * # only required for saving cluster images
from carnie_helper import RudeCarnie
from query.models import *
import random
import json
class Command(BaseCommand):
help = 'Find genders for all the detected faces'
def add_arguments(self, parser):
parser.add_argument('path')
def handle_video(self, path, rude_carnie):
print path
video = Video.objects.filter(path=path).get()
labelset = video.detected_labelset()
faces = Face.objects.filter(frame__labelset=labelset).all()
faces = [f for f in faces if f.gender == '0']
print(len(faces))
imgs = ['./assets/thumbnails/{}_{}.png'.format(labelset.id, f.id) for f in faces]
male_ids = []
female_ids = []
if len(imgs) == 0:
return
genders = rude_carnie.get_gender_batch(imgs)
for face, gender in zip(faces, genders):
if gender[0] == 'M':
male_ids.append(face.id)
elif gender[0] == 'F':
female_ids.append(face.id)
Face.objects.filter(id__in=male_ids).update(gender='M')
Face.objects.filter(id__in=female_ids).update(gender='F')
def handle(self, *args, **options):
with open(options['path']) as f:
paths = [s.strip() for s in f.readlines()]
model_dir = '/app/deps/rude-carnie/inception_gender_checkpoint'
rc = RudeCarnie(model_dir=model_dir)
for path in paths:
self.handle_video(path, rc)
|
Hypnosis (hip NO sis) is a state of increased awareness. It happens when a person intensely focuses his or her attention. Everyone moves into and out of this intense focus daily. Focus allows us to do things like read a book, play a video game, engage in imaginative play, play sports or watch a good show on TV. Hypnosis is a way of using imagination to help the mind and body communicate.
Hypnosis is a tool that can help you get control over how you think, feel, and behave. A trained health care professional can help you learn to use hypnosis to help with your problems. It lets you be more open to suggestion to solve problems (but you will always still be in charge of yourself). Hypnosis is a therapy that is used with or instead of traditional therapies.
Achieve behavior changes: such as sleep issues, fear of procedures, stress.
Help you come up with creative ways to achieve your goals.
You will learn to use breathing skills and your imagination, and to relax your body. This will help you to gain control over your mind and body to reach your goals.
With practice, you will be able to do self-hypnosis. You can use this skill as you need it: for instance, to be less anxious before a procedure or when you have increased pain.
|
# Copyright (C) 2016 Antoine Carme <Antoine.Carme@Laposte.net>
# All rights reserved.
# This file is part of the Python Automatic Forecasting (PyAF) library and is made available under
# the terms of the 3 Clause BSD license
import pandas as pd
import numpy as np
from . import SignalDecomposition as sigdec
from . import Perf as tsperf
from . import Utils as tsutil
class cPredictionIntervalsEstimator:
def __init__(self):
self.mModel = None;
self.mSignalFrame = pd.DataFrame()
self.mHorizon = -1;
self.mFitPerformances = {}
self.mForecastPerformances = {}
self.mTestPerformances = {}
def computePerformances(self):
self.mTime = self.mModel.mTime;
self.mSignal = self.mModel.mOriginalSignal;
self.mHorizon = self.mModel.mTimeInfo.mHorizon;
lTimeColumn = self.mTime;
lSignalColumn = self.mSignal;
lForecastColumn = str(self.mSignal) + "_Forecast";
df = self.mModel.mTrend.mSignalFrame.reset_index();
N = df.shape[0];
(lOriginalFit, lOriginalForecast, lOriginalTest) = self.mModel.mTimeInfo.mSplit.cutFrame(df);
df1 = df;
for h in range(0 , self.mHorizon):
df2 = None;
df2 = self.mModel.forecastOneStepAhead(df1, horizon_index = h+1, perf_mode = True);
df2 = df2.head(N);
lHorizonName = lForecastColumn + "_" + str(h + 1);
(lFrameFit, lFrameForecast, lFrameTest) = self.mModel.mTimeInfo.mSplit.cutFrame(df2);
self.mFitPerformances[lHorizonName] = tsperf.cPerf();
self.mFitPerformances[lHorizonName].compute(lOriginalFit[lSignalColumn], lFrameFit[lForecastColumn], lHorizonName);
self.mForecastPerformances[lHorizonName] = tsperf.cPerf();
self.mForecastPerformances[lHorizonName].compute(lOriginalForecast[lSignalColumn], lFrameForecast[lForecastColumn], lHorizonName);
self.mTestPerformances[lHorizonName] = tsperf.cPerf();
if(lOriginalTest.shape[0] > 0):
self.mTestPerformances[lHorizonName].compute(lOriginalTest[lSignalColumn], lFrameTest[lForecastColumn], lHorizonName);
df1 = df2[[lTimeColumn , lForecastColumn,
self.mModel.mTimeInfo.mRowNumberColumn,
self.mModel.mTimeInfo.mNormalizedTimeColumn]];
df1.columns = [lTimeColumn , lSignalColumn, self.mModel.mTimeInfo.mRowNumberColumn,
self.mModel.mTimeInfo.mNormalizedTimeColumn]
# self.dump_detailed();
def dump_detailed(self):
logger = tsutil.get_pyaf_logger();
lForecastColumn = str(self.mSignal) + "_Forecast";
for h in range(0 , self.mHorizon):
lHorizonName = lForecastColumn + "_" + str(h + 1);
hn = lHorizonName;
logger.info("CONFIDENCE_INTERVAL_DUMP_FIT " +str(hn) + " " + str(self.mFitPerformances[hn].mL2) + " " + str(self.mFitPerformances[hn].mMAPE));
logger.info("CONFIDENCE_INTERVAL_DUMP_FORECAST " +str(hn) + " " + str(self.mForecastPerformances[hn].mL2) + " " + str(self.mForecastPerformances[hn].mMAPE));
logger.info("CONFIDENCE_INTERVAL_DUMP_TEST " +str(hn) + " " + str(self.mTestPerformances[hn].mL2) + " " + str(self.mTestPerformances[hn].mMAPE));
def dump(self):
logger = tsutil.get_pyaf_logger();
lForecastColumn = str(self.mSignal) + "_Forecast";
for h in range(0 , self.mHorizon):
lHorizonName = lForecastColumn + "_" + str(h + 1);
hn = lHorizonName;
logger.info("CONFIDENCE_INTERVAL_DUMP_FORECAST " + str(hn) + " " + str(self.mForecastPerformances[hn].mL2));
|
As one of Europe’s leading banks, UniCredit needs to provide competitive, innovative yet reliable products to their clients. With SEPA regulations in place, certain new problems surfaced. UniCredit was challenged by the fact that customers were not always providing BIC codes and in some cases BIC codes provided were incorrect. UniCredit aimed to provide a service that allowed customers to automatically retrieve and check BIC codes from any given IBAN.
|
#!/usr/bin/python3
# generate a 20x20 matrix
import os
def collectimps(Mods, Edges, maxbudlevel):
filedir = './'
outfilename = 'impoverss.dat'
outfilename = os.path.join('./', outfilename)
outfile = open(outfilename, 'w')
# for each bud level
for x in range(1, maxbudlevel+1):
impoverssallindex = []
# collect imp across each index on this level
for y in range(0,20):
filename = '{}_{}_AvgOverIndex.txt'.format(Mods[y], Edges[y])
filename = os.path.join(filedir, filename)
imps = open(filename, 'r')
for line in imps:
items = line.split()
if (items[0].isdigit()==False):
continue
budlevel = int(items[0])
if (budlevel != x):
continue
# find corresponding imp
if (budlevel == x):
impoverss = float(items[2])
# record to list
impoverssallindex.append(impoverss)
imps.close()
for y in range(0,len(impoverssallindex)):
outfile.write('%d\t%d\t%.2f\n'% (x, y, impoverssallindex[y]))
outfile.write('\n')
outfile.close()
if __name__=='__main__':
Mods = [5, 10, 15, 20, 25, 30, 35, 40, 45, 50,
55, 60, 65, 70, 75, 80, 85, 90, 95, 100]
Edges = [6, 15, 60, 80, 200, 300, 500, 500, 580, 500,
800, 900, 950, 950, 1000, 1200, 1200, 1600, 1600, 2000]
# 20 bud levels
collectimps(Mods, Edges, 20)
|
Need more information about booking Home 7328284 ?
Dar Jnane is a beautiful 17th century traditional house located in the heart of the Fez Medina – built around a lovely courtyard filled with light, with very calm neighborhood. Dar Jnane was restored by the best craftsmen of Fez, under the supervision of the restoration company Restore Plus. The restoration was completed with traditional materials such as mosaic (zellige), natural cedar wood, plaster, and simple hand-made furniture. Dar Jnane’s terrace has an amazing panoramic view to the south and east of Fes. With three double bedrooms with three bathrooms, Dar Jnane is air-conditioned and can accommodate six people comfortably or 8 with the extrat of sigle bedand additional banquette in the downstairs salon, or upstairs bedroom. Dar Jnane also has a lovely kitchen on the courtyard level, very calm house and highly recomanded for families.
-Do not bring other guys into the house without agreement of the owner.
You can get in touch with Hafid (the owner) through the contact form on this page.
Send Hafid (the owner) an email through the contact form on this page. Alternatively, if there’s a phone number on this page, you can call them.
How do I contact Hafid (the owner)?
If you’ve already booked, go to Manage your holiday booking to view Hafid the owner's contact details or send them a message.
Or send Hafid (the owner) an enquiry through the Contact the owner/manager form if you’d like a quote or more info first.
When you pay online through the Book your stay form, a booking request is sent to Hafid (the owner). You’ll find out within 24 hours whether they’ve accepted - no money will leave your account before then.
You can also request a quote from Hafid (the owner) through the contact form. The owner will email you the total cost for your dates and a link to book and pay online.
Use the Book your stay form to pay provisionally online. You can pay by credit/debit card or PayPal. Hafid (the owner) will have 24 hours to accept the booking and no money will leave your account before then.
Why can't I call Hafid (the owner) before booking?
You’ll need to contact Hafid (the owner) to do this. Send them a message in Manage your holiday booking or through the contact form on this page.
You can request to change the dates or number of people on your booking via the Manage your holiday booking page. To change your holiday dates use the Edit booking button. If you need to change the number of guests staying then send Hafid (the owner) a message.
If Hafid (the owner) agrees, they’ll send you a revised quote outlining the changes. You can review and accept/decline this from the Manage your holiday booking page. You’ll have 48 hours to take action before the revised quote expires.
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2005 Dan Williams <dcbw@redhat.com> and Red Hat, Inc.
import os, sys
from OpenSSL import SSL
import SSLConnection
import httplib
import socket
import SocketServer
def our_verify(connection, x509, errNum, errDepth, preverifyOK):
# print "Verify: errNum = %s, errDepth = %s, preverifyOK = %s" % (errNum, errDepth, preverifyOK)
# preverifyOK should tell us whether or not the client's certificate
# correctly authenticates against the CA chain
return preverifyOK
def CreateSSLContext(certs):
key_and_cert = certs['key_and_cert']
ca_cert = certs['ca_cert']
peer_ca_cert = certs['peer_ca_cert']
for f in key_and_cert, ca_cert, peer_ca_cert:
if f and not os.access(f, os.R_OK):
raise StandardError, "%s does not exist or is not readable" % f
ctx = SSL.Context(SSL.SSLv23_METHOD) # Use best possible TLS Method
ctx.use_certificate_file(key_and_cert)
ctx.use_privatekey_file(key_and_cert)
ctx.load_client_ca(ca_cert)
ctx.load_verify_locations(peer_ca_cert)
verify = SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT
ctx.set_verify(verify, our_verify)
ctx.set_verify_depth(10)
ctx.set_options(SSL.OP_NO_SSLv3 | SSL.OP_NO_SSLv2) # disable SSLv2 and SSLv3
return ctx
class PlgBaseServer(SocketServer.ThreadingTCPServer):
allow_reuse_address = 1
def __init__(self, server_addr, req_handler):
self._quit = False
self.allow_reuse_address = 1
SocketServer.ThreadingTCPServer.__init__(self, server_addr, req_handler)
def stop(self):
self._quit = True
def serve_forever(self):
while not self._quit:
self.handle_request()
self.server_close()
class PlgBaseSSLServer(PlgBaseServer):
""" SSL-enabled variant """
def __init__(self, server_address, req_handler, certs, timeout=None):
self._timeout = timeout
self.ssl_ctx = CreateSSLContext(certs)
PlgBaseServer.__init__(self, server_address, req_handler)
sock = socket.socket(self.address_family, self.socket_type)
con = SSL.Connection(self.ssl_ctx, sock)
self.socket = SSLConnection.SSLConnection(con)
if sys.version_info[:3] >= (2, 3, 0):
self.socket.settimeout(self._timeout)
self.server_bind()
self.server_activate()
host, port = self.socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
class PlgHTTPSConnection(httplib.HTTPConnection):
"This class allows communication via SSL."
response_class = httplib.HTTPResponse
def __init__(self, host, port=None, ssl_context=None, strict=None, timeout=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.ssl_ctx = ssl_context
self._timeout = timeout
def connect(self):
for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
con = SSL.Connection(self.ssl_ctx, sock)
self.sock = SSLConnection.SSLConnection(con)
if sys.version_info[:3] >= (2, 3, 0):
self.sock.settimeout(self._timeout)
self.sock.connect(sa)
if self.debuglevel > 0:
print "connect: (%s, %s) [ssl]" % (self.host, self.port)
except socket.error, msg:
if self.debuglevel > 0:
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
else:
raise socket.error, "failed to connect"
class PlgHTTPS(httplib.HTTP):
"""Compatibility with 1.5 httplib interface
Python 1.5.2 did not have an HTTPS class, but it defined an
interface for sending http requests that is also useful for
https.
"""
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
_connection_class = PlgHTTPSConnection
def __init__(self, host='', port=None, ssl_context=None, strict=None, timeout=None):
self._setup(self._connection_class(host, port, ssl_context, strict, timeout))
|
that honeysuckle has been spreading rapidly in wooded and fringe areas.
Many farm fields that have been abandoned are filling up with this plant.
wildlife cover plant. In some areas it is being viewed as an invasive pest.
bloomed heavily it was ignored by the bees who were busy on the honeysuckle.
not buying it in from who knows where.
|
import os
import logging
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#l*-2%5rhe+p5@=%!kq*)n7$2sho*yx$$9)c1e(l0%(ohmrxsc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'engine',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'scoring_engine.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'scoring_engine.wsgi.application'
# Supress noisy request logger
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
# Logging config
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format' : "[%(asctime)s] %(levelname)s [%(module)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S",
},
},
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': 'debug.log',
'formatter': 'verbose',
},
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
}
},
'loggers': {
'engine': {
'handlers': ['console', 'file'],
'filters': ['require_debug_true'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Override the default User model
AUTH_USER_MODEL = 'engine.User'
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
# Authentication
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
|
Thank you for your interest in World of Keely! You can follow me or like me on any of my social media, links can be found in the menu above. If you would like to contact me, you can email me at worldofkeely@gmail.com.
|
#!/usr/bin/python
#
# vim: tabstop=4 expandtab shiftwidth=4 noautoindent
#
# webcgi.py - CGI file for storing Mimix messages in a Pool
#
# Copyright (C) 2014 Steve Crook <steve@mixmin.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
import cgi
import cgitb
import sys
import os
from Crypto import Random
cgitb.enable()
print "Content-type:text/html\r\n\r\n"
form = cgi.FieldStorage()
content = form.getvalue('mimix')
if content is None:
sys.exit(0)
if '-----BEGIN MIMIX MESSAGE-----' in content:
while True:
fn = os.path.join('/home/crooks/mimix/inbound_pool',
'm' + Random.new().read(4).encode('hex'))
if not os.path.isfile(fn):
break
with open(fn, 'w') as f:
f.write(content)
|
In Endodontic, one of the most crucial problem that can result in the loss of teeth is periapical lesion. In the development of periapical lesion in periapical tissues, not only the presence of infected pulp but also the toxins, metabolic products, chemical agents, mechanical irritations, foreign substances, trauma and host defense of existing microorganisms also play a role. Radiographic findings, clinical oral examination and differential diagnosis are used in the diagnosis of periapical lesion. Even though endodontic treatment option combined with apical surgery could be an option for treatments of large periapical lesions, conventional root canal treatment should be the first choice that combined with long-term follow-up after treatment. The aim of this case report is to present a complete clinical and radiological healing of teeth relation with a large cyst-like periradicular lesion treated only with root canal treatment and without the need for a surgical treatment.
Keywords: Healing, large periapical lesion, non-surgical endodontic treatment.
Anahtar Kelimeler: Healing, large periapical lesion, non-surgical endodontic treatment.
|
#!/home/paulk/software/bin/python
from sys import argv,stderr,stdout
import argparse
parser = argparse.ArgumentParser(description="Script to replace a column in a file with another identifier given a map between identifiers.")
parser.add_argument('infile',help="the file whose column is to be swapped")
parser.add_argument('-m','--map',help="the map of current to required identifiers")
parser.add_argument('-c','--column',type=int,default=0,help="the 0-based index of the column to be swapped")
parser.add_argument('-o','--outfile',help="outfile; optional [default: stdout]")
args = parser.parse_args()
mapfile = args.map
colno = args.column
infile = args.infile
outfile = args.outfile
names = dict()
f = open(mapfile)
for row in f:
l = row.strip().split('\t')
names[l[0]] = l[1]
f.close()
count = 0
f = open(infile)
if outfile: g = open(outfile,'w')
else: g = stdout
for row in f:
l = row.strip().split('\t')
try:
# print names[l[0]]+"\t"+"\t".join(l[1:])
print >> g,"\t".join(l[:colno]+[names[l[colno]]]+l[colno+1:])
except KeyError:
count += 1
f.close()
if outfile: g.close()
print >> stderr,"missing %s" % count
|
The video signal and the quality of picture can be tested.
It has the basic operational test of PTZ products, function include pan/tilt, zoom in/out, preset setting and operation, speed adjustment etc; support multi-protocol and baud rate, communication via RS-232, RS422 simplex and RS485 port. RS485 protocol include: Pelco D,P, Samsung, Panasonic, Molynx. Additional Protocols may be added as per customer requests. Baud rate include: 2400, 4800, 9600, 19200.
The wiring condition (disconnected, short of UTP cable) can be tested and show in the screen clearly.
It can output Green, white Black and Blue screen to allow technician to inspect video monitor or DVR. The generating signal support PAL or NTSC (don’t support both meanwhile) format.
It can test the RS485 data sent from controlling device, display the hexadecimal data content for engineer to analyze.
|
# Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Utils
"""
import mock
import six
from nova.compute import flavors
from nova.compute import utils as compute_utils
from nova import exception
from nova import objects
from nova import rpc
from nova.scheduler import utils as scheduler_utils
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
from nova.tests import uuidsentinel as uuids
class SchedulerUtilsTestCase(test.NoDBTestCase):
"""Test case for scheduler utils methods."""
def setUp(self):
super(SchedulerUtilsTestCase, self).setUp()
self.context = 'fake-context'
def test_build_request_spec_without_image(self):
instance = {'uuid': uuids.instance}
instance_type = objects.Flavor(**test_flavor.fake_flavor)
with mock.patch.object(flavors, 'extract_flavor') as mock_extract:
mock_extract.return_value = instance_type
request_spec = scheduler_utils.build_request_spec(self.context,
None,
[instance])
mock_extract.assert_called_once_with({'uuid': uuids.instance})
self.assertEqual({}, request_spec['image'])
def test_build_request_spec_with_object(self):
instance_type = objects.Flavor()
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(instance, 'get_flavor') as mock_get:
mock_get.return_value = instance_type
request_spec = scheduler_utils.build_request_spec(self.context,
None,
[instance])
mock_get.assert_called_once_with()
self.assertIsInstance(request_spec['instance_properties'], dict)
@mock.patch.object(rpc, 'get_notifier', return_value=mock.Mock())
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(objects.Instance, 'save')
def test_set_vm_state_and_notify(self, mock_save, mock_add, mock_get):
expected_uuid = uuids.instance
request_spec = dict(instance_properties=dict(uuid='other-uuid'))
updates = dict(vm_state='fake-vm-state')
service = 'fake-service'
method = 'fake-method'
exc_info = 'exc_info'
payload = dict(request_spec=request_spec,
instance_properties=request_spec.get(
'instance_properties', {}),
instance_id=expected_uuid,
state='fake-vm-state',
method=method,
reason=exc_info)
event_type = '%s.%s' % (service, method)
scheduler_utils.set_vm_state_and_notify(self.context,
expected_uuid,
service,
method,
updates,
exc_info,
request_spec)
mock_save.assert_called_once_with()
mock_add.assert_called_once_with(self.context, mock.ANY,
exc_info, mock.ANY)
self.assertIsInstance(mock_add.call_args[0][1], objects.Instance)
self.assertIsInstance(mock_add.call_args[0][3], tuple)
mock_get.return_value.error.assert_called_once_with(self.context,
event_type,
payload)
def test_build_filter_properties(self):
sched_hints = {'hint': ['over-there']}
forced_host = 'forced-host1'
forced_node = 'forced-node1'
instance_type = objects.Flavor()
filt_props = scheduler_utils.build_filter_properties(sched_hints,
forced_host, forced_node, instance_type)
self.assertEqual(sched_hints, filt_props['scheduler_hints'])
self.assertEqual([forced_host], filt_props['force_hosts'])
self.assertEqual([forced_node], filt_props['force_nodes'])
self.assertEqual(instance_type, filt_props['instance_type'])
def test_build_filter_properties_no_forced_host_no_force_node(self):
sched_hints = {'hint': ['over-there']}
forced_host = None
forced_node = None
instance_type = objects.Flavor()
filt_props = scheduler_utils.build_filter_properties(sched_hints,
forced_host, forced_node, instance_type)
self.assertEqual(sched_hints, filt_props['scheduler_hints'])
self.assertEqual(instance_type, filt_props['instance_type'])
self.assertNotIn('forced_host', filt_props)
self.assertNotIn('forced_node', filt_props)
def _test_populate_filter_props(self, host_state_obj=True,
with_retry=True,
force_hosts=None,
force_nodes=None):
if force_hosts is None:
force_hosts = []
if force_nodes is None:
force_nodes = []
if with_retry:
if ((len(force_hosts) == 1 and len(force_nodes) <= 1)
or (len(force_nodes) == 1 and len(force_hosts) <= 1)):
filter_properties = dict(force_hosts=force_hosts,
force_nodes=force_nodes)
elif len(force_hosts) > 1 or len(force_nodes) > 1:
filter_properties = dict(retry=dict(hosts=[]),
force_hosts=force_hosts,
force_nodes=force_nodes)
else:
filter_properties = dict(retry=dict(hosts=[]))
else:
filter_properties = dict()
if host_state_obj:
class host_state(object):
host = 'fake-host'
nodename = 'fake-node'
limits = 'fake-limits'
else:
host_state = dict(host='fake-host',
nodename='fake-node',
limits='fake-limits')
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
enable_retry_force_hosts = not force_hosts or len(force_hosts) > 1
enable_retry_force_nodes = not force_nodes or len(force_nodes) > 1
if with_retry or enable_retry_force_hosts or enable_retry_force_nodes:
# So we can check for 2 hosts
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
if force_hosts:
expected_limits = None
else:
expected_limits = 'fake-limits'
self.assertEqual(expected_limits,
filter_properties.get('limits'))
if (with_retry and enable_retry_force_hosts
and enable_retry_force_nodes):
self.assertEqual([['fake-host', 'fake-node'],
['fake-host', 'fake-node']],
filter_properties['retry']['hosts'])
else:
self.assertNotIn('retry', filter_properties)
def test_populate_filter_props(self):
self._test_populate_filter_props()
def test_populate_filter_props_host_dict(self):
self._test_populate_filter_props(host_state_obj=False)
def test_populate_filter_props_no_retry(self):
self._test_populate_filter_props(with_retry=False)
def test_populate_filter_props_force_hosts_no_retry(self):
self._test_populate_filter_props(force_hosts=['force-host'])
def test_populate_filter_props_force_nodes_no_retry(self):
self._test_populate_filter_props(force_nodes=['force-node'])
def test_populate_filter_props_multi_force_hosts_with_retry(self):
self._test_populate_filter_props(force_hosts=['force-host1',
'force-host2'])
def test_populate_filter_props_multi_force_nodes_with_retry(self):
self._test_populate_filter_props(force_nodes=['force-node1',
'force-node2'])
def test_populate_retry_exception_at_max_attempts(self):
self.flags(max_attempts=2, group='scheduler')
msg = 'The exception text was preserved!'
filter_properties = dict(retry=dict(num_attempts=2, hosts=[],
exc_reason=[msg]))
nvh = self.assertRaises(exception.MaxRetriesExceeded,
scheduler_utils.populate_retry,
filter_properties, uuids.instance)
# make sure 'msg' is a substring of the complete exception text
self.assertIn(msg, six.text_type(nvh))
def _check_parse_options(self, opts, sep, converter, expected):
good = scheduler_utils.parse_options(opts,
sep=sep,
converter=converter)
for item in expected:
self.assertIn(item, good)
def test_parse_options(self):
# check normal
self._check_parse_options(['foo=1', 'bar=-2.1'],
'=',
float,
[('foo', 1.0), ('bar', -2.1)])
# check convert error
self._check_parse_options(['foo=a1', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
# check separator missing
self._check_parse_options(['foo', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
# check key missing
self._check_parse_options(['=5', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
def test_validate_filters_configured(self):
self.flags(enabled_filters='FakeFilter1,FakeFilter2',
group='filter_scheduler')
self.assertTrue(scheduler_utils.validate_filter('FakeFilter1'))
self.assertTrue(scheduler_utils.validate_filter('FakeFilter2'))
self.assertFalse(scheduler_utils.validate_filter('FakeFilter3'))
def test_validate_weighers_configured(self):
self.flags(weight_classes=[
'ServerGroupSoftAntiAffinityWeigher', 'FakeFilter1'],
group='filter_scheduler')
self.assertTrue(scheduler_utils.validate_weigher(
'ServerGroupSoftAntiAffinityWeigher'))
self.assertTrue(scheduler_utils.validate_weigher('FakeFilter1'))
self.assertFalse(scheduler_utils.validate_weigher(
'ServerGroupSoftAffinityWeigher'))
def test_validate_weighers_configured_all_weighers(self):
self.assertTrue(scheduler_utils.validate_weigher(
'ServerGroupSoftAffinityWeigher'))
self.assertTrue(scheduler_utils.validate_weigher(
'ServerGroupSoftAntiAffinityWeigher'))
def _create_server_group(self, policy='anti-affinity'):
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.name = 'pele'
group.uuid = uuids.fake
group.members = [instance.uuid]
group.policies = [policy]
return group
def _get_group_details(self, group, policy=None):
group_hosts = ['hostB']
with test.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
mock.patch.object(objects.InstanceGroup, 'get_hosts',
return_value=['hostA']),
) as (get_group, get_hosts):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(
self.context, 'fake_uuid', group_hosts)
self.assertEqual(
(set(['hostA', 'hostB']), [policy], group.members),
group_info)
def test_get_group_details(self):
for policy in ['affinity', 'anti-affinity',
'soft-affinity', 'soft-anti-affinity']:
group = self._create_server_group(policy)
self._get_group_details(group, policy=policy)
def test_get_group_details_with_no_instance_uuid(self):
group_info = scheduler_utils._get_group_details(self.context, None)
self.assertIsNone(group_info)
def _get_group_details_with_filter_not_configured(self, policy):
self.flags(enabled_filters=['fake'], group='filter_scheduler')
self.flags(weight_classes=['fake'], group='filter_scheduler')
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.uuid = uuids.fake
group.members = [instance.uuid]
group.policies = [policy]
with test.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
) as (get_group,):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
scheduler_utils._SUPPORTS_SOFT_AFFINITY = None
scheduler_utils._SUPPORTS_SOFT_ANTI_AFFINITY = None
self.assertRaises(exception.UnsupportedPolicyException,
scheduler_utils._get_group_details,
self.context, uuids.instance)
def test_get_group_details_with_filter_not_configured(self):
policies = ['anti-affinity', 'affinity',
'soft-affinity', 'soft-anti-affinity']
for policy in policies:
self._get_group_details_with_filter_not_configured(policy)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_in_filter_properties(self, mock_ggd):
mock_ggd.return_value = scheduler_utils.GroupDetails(
hosts=set(['hostA', 'hostB']), policies=['policy'],
members=['instance1'])
spec = {'instance_properties': {'uuid': uuids.instance}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, uuids.instance,
['hostC'])
expected_filter_props = {'group_updated': True,
'group_hosts': set(['hostA', 'hostB']),
'group_policies': ['policy'],
'group_members': ['instance1']}
self.assertEqual(expected_filter_props, filter_props)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_no_group(self, mock_ggd):
mock_ggd.return_value = None
spec = {'instance_properties': {'uuid': uuids.instance}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, uuids.instance,
['hostC'])
self.assertNotIn('group_updated', filter_props)
self.assertNotIn('group_policies', filter_props)
self.assertEqual(['hostC'], filter_props['group_hosts'])
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_filter_not_configured(self, mock_ggd):
mock_ggd.side_effect = exception.NoValidHost(reason='whatever')
spec = {'instance_properties': {'uuid': uuids.instance}}
filter_props = {'group_hosts': ['hostC']}
self.assertRaises(exception.NoValidHost,
scheduler_utils.setup_instance_group,
self.context, spec, filter_props)
|
Wall Street has turned its attention to a new stock market crash warning sign – an impending $1 trillion meltdown in the bond market.
According to estimates from Goldman Sachs Group Inc. (NYSE: GS), bondholders could lose a staggering $1 trillion if Treasury yields suddenly rise by 1%.
That could even surpass the hefty losses realized on mortgage bonds without government backing during the 2008 financial crisis.
"Some investor entities would likely experience significant distress," Goldman analysts wrote last week. "Rising yields should be on the short list of scenarios to be monitored by risk managers."
In the decade-long hunt for yield and safety, money managers and investors have piled into longer-dated securities. And when interest rates rise, which is the U.S. Federal Reserve's aim, bond prices will slip and long-term debt prices will slide.
With a large number of investors now holding long-term debt positions, the potential losses from rising rates have grown dramatically. And even the income investors receive from bonds will do little to shield them from the pain of having principal locked up at lower rates, Goldman Sachs warns.
Goldman still believes there is a 40% chance the Fed will raise rates in July. That is almost double what the bond market projects. But Goldman believes Fed Chair Janet Yellen made it clear this week that the U.S. central bank is intent on raising rates this year.
Speaking Monday at the World Affairs Council in Philadelphia, Yellen gave a largely upbeat assessment of the U.S. economic outlook and said interest rate hikes are coming. Still, Yellen gave little hints as to when the hike will take place.
Yet her comment was enough to send the bond market dropping. Ten-year Treasuries suffered their biggest decline in three weeks following Yellen's speech.
Goldman isn't the only one concerned about the fragile state of the bond market.
Legendary bond investor Bill Gross is also very worried about an implosion in the space. But his concerns stem from the growing use of negative interest rates.
The $10 trillion of negative-yielding global government bonds is a "supernova that will explode one day," Gross said in a tweet Thursday.
The founder of bond giant PIMCO and now a fund manager at Janus, Gross said global yields are at their lowest levels in 500 years of recorded history.
A number of other fund investors have voiced their fears over negative rates and a looming "black swan" event. The black swan theory is a metaphor describing an event that comes as a surprise, has a major effect, and is often inappropriately rationalized after the fact with the benefit of hindsight.
Capital Group, which manages $1.4 trillion, has cautioned that negative interest rates are distorting financial markets and economies and could lead to "potentially dangerous consequences."
DoubleLine's Jeffrey Gundlach recently said negative interest rates "are the stupidest idea I have ever experienced." He warned "the next major event [for markets] will be the moment when central banks in Japan and in Europe give up and cancel the experiment."
Larry Fink of BlackRock warned in his latest investor note that while low borrowing costs give an advantage to many companies and countries, they are a distinct disadvantage to savers. "There has been plenty of discussion about how the extended period of low interest rates has contributed to inflation in asset prices," Fink wrote. "Not nearly enough attention has been paid to the toll these low rates – and now negative rates – are taking on the ability of investors to save and plan for the future."
And if these warning signs lead to a stock market crash in 2016, you don't have to be caught off guard. Here are three ways investors can plan for and protect against a stock market crash.
Short the S&P 500: Take advantage of a pullback in stocks by shorting (betting against) the broader market. You can do this with the ProShares Short S&P 500 ETF (NYSE Arca: SH). This $3.03 billion market cap exchange-traded fund rises when the S&P 500 declines.
Hold cash: In a world awash in negative rates and diminishing currency valuations, the U.S. dollar stands out. Holding cash when stocks retreat means you have the means to buy beaten-down stocks at attractive prices.
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import sys
from quantumclient.quantum.v2_0.subnet import CreateSubnet
from quantumclient.quantum.v2_0.subnet import DeleteSubnet
from quantumclient.quantum.v2_0.subnet import ListSubnet
from quantumclient.quantum.v2_0.subnet import ShowSubnet
from quantumclient.quantum.v2_0.subnet import UpdateSubnet
from quantumclient.tests.unit.test_cli20 import CLITestV20Base
from quantumclient.tests.unit.test_cli20 import MyApp
class CLITestV20Subnet(CLITestV20Base):
def test_create_subnet(self):
"""Create subnet: --gateway gateway netid cidr."""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
gateway = 'gatewayvalue'
args = ['--gateway', gateway, netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, gateway]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnet_with_no_gateway(self):
"""Create subnet: --no-gateway netid cidr"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
args = ['--no-gateway', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, None]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnet_with_bad_gateway_option(self):
"""Create sbunet: --no-gateway netid cidr"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'cidrvalue'
gateway = 'gatewayvalue'
args = ['--gateway', gateway, '--no-gateway', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr', 'gateway_ip']
position_values = [4, netid, cidr, None]
try:
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
except:
return
self.fail('No exception for bad gateway option')
def test_create_subnet_tenant(self):
"""Create subnet: --tenant_id tenantid netid cidr."""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid', netid, cidr]
position_names = ['ip_version', 'network_id', 'cidr']
position_values = [4, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_tags(self):
"""Create subnet: netid cidr --tags a b."""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = [netid, cidr, '--tags', 'a', 'b']
position_names = ['ip_version', 'network_id', 'cidr']
position_values = [4, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tags=['a', 'b'])
def test_create_subnet_allocation_pool(self):
"""Create subnet: --tenant_id tenantid <allocation_pool> netid cidr.
The <allocation_pool> is --allocation_pool start=1.1.1.10,end=1.1.1.20
"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation_pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr]
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pool = [{'start': '1.1.1.10', 'end': '1.1.1.20'}]
position_values = [4, pool, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_allocation_pools(self):
"""Create subnet: --tenant-id tenantid <pools> netid cidr.
The <pools> are --allocation_pool start=1.1.1.10,end=1.1.1.20 and
--allocation_pool start=1.1.1.30,end=1.1.1.40
"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation_pool', 'start=1.1.1.10,end=1.1.1.20',
'--allocation_pool', 'start=1.1.1.30,end=1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_host_route(self):
"""Create subnet: --tenant_id tenantid <host_route> netid cidr.
The <host_route> is
--host-route destination=172.16.1.0/24,nexthop=1.1.1.20
"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--host-route', 'destination=172.16.1.0/24,nexthop=1.1.1.20',
netid, cidr]
position_names = ['ip_version', 'host_routes', 'network_id',
'cidr']
route = [{'destination': '172.16.1.0/24', 'nexthop': '1.1.1.20'}]
position_values = [4, route, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_host_routes(self):
"""Create subnet: --tenant-id tenantid <host_routes> netid cidr.
The <host_routes> are
--host-route destination=172.16.1.0/24,nexthop=1.1.1.20 and
--host-route destination=172.17.7.0/24,nexthop=1.1.1.40
"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--host-route', 'destination=172.16.1.0/24,nexthop=1.1.1.20',
'--host-route', 'destination=172.17.7.0/24,nexthop=1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'host_routes', 'network_id',
'cidr']
routes = [{'destination': '172.16.1.0/24', 'nexthop': '1.1.1.20'},
{'destination': '172.17.7.0/24', 'nexthop': '1.1.1.40'}]
position_values = [4, routes, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_dns_nameservers(self):
"""Create subnet: --tenant-id tenantid <dns-nameservers> netid cidr.
The <dns-nameservers> are
--dns-nameserver 1.1.1.20 and --dns-nameserver 1.1.1.40
"""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--dns-nameserver', '1.1.1.20',
'--dns-nameserver', '1.1.1.40',
netid, cidr]
position_names = ['ip_version', 'dns_nameservers', 'network_id',
'cidr']
nameservers = ['1.1.1.20', '1.1.1.40']
position_values = [4, nameservers, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_with_disable_dhcp(self):
"""Create subnet: --tenant-id tenantid --disable-dhcp netid cidr."""
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--disable-dhcp',
netid, cidr]
position_names = ['ip_version', 'enable_dhcp', 'network_id',
'cidr']
position_values = [4, False, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_single_plurar(self):
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation-pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr,
'--allocation-pools', 'list=true', 'type=dict',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_plurar(self):
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
netid, cidr,
'--allocation-pools', 'list=true', 'type=dict',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_subnet_merge_single_single(self):
resource = 'subnet'
cmd = CreateSubnet(MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
cidr = 'prefixvalue'
args = ['--tenant_id', 'tenantid',
'--allocation-pool', 'start=1.1.1.10,end=1.1.1.20',
netid, cidr,
'--allocation-pool',
'start=1.1.1.30,end=1.1.1.40']
position_names = ['ip_version', 'allocation_pools', 'network_id',
'cidr']
pools = [{'start': '1.1.1.10', 'end': '1.1.1.20'},
{'start': '1.1.1.30', 'end': '1.1.1.40'}]
position_values = [4, pools, netid, cidr]
_str = self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_list_subnets_detail(self):
"""List subnets: -D."""
resources = "subnets"
cmd = ListSubnet(MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, True)
def test_list_subnets_tags(self):
"""List subnets: -- --tags a b."""
resources = "subnets"
cmd = ListSubnet(MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, tags=['a', 'b'])
def test_list_subnets_detail_tags(self):
"""List subnets: -D -- --tags a b."""
resources = "subnets"
cmd = ListSubnet(MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, detail=True, tags=['a', 'b'])
def test_list_subnets_fields(self):
"""List subnets: --fields a --fields b -- --fields c d."""
resources = "subnets"
cmd = ListSubnet(MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd,
fields_1=['a', 'b'], fields_2=['c', 'd'])
def test_update_subnet(self):
"""Update subnet: myid --name myname --tags a b."""
resource = 'subnet'
cmd = UpdateSubnet(MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--tags', 'a', 'b'],
{'name': 'myname', 'tags': ['a', 'b'], }
)
def test_show_subnet(self):
"""Show subnet: --fields id --fields name myid."""
resource = 'subnet'
cmd = ShowSubnet(MyApp(sys.stdout), None)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id,
args, ['id', 'name'])
def test_delete_subnet(self):
"""Delete subnet: subnetid."""
resource = 'subnet'
cmd = DeleteSubnet(MyApp(sys.stdout), None)
myid = 'myid'
args = [myid]
self._test_delete_resource(resource, cmd, myid, args)
|
Strategic Resources is an international, full-service executive search firm specializing in the recruitment of top performing, upper and mid-level management professionals. We are a boutique style, high-touch, head hunting organization using our network of contacts and the newest technologies to identify and recruit the best of the best talent for our clients. We are celebrating our 20th year of successfully bringing together people, companies and opportunities, always with Integrity, Ethics and Professional Excellence.
using our services and expertise to identify and recruit the best of the best talent for the industry’s top performing companies.
recognized for our professionalism and confidential access to exceptional candidates.
supported by Ethics, Leadership and Professional Excellence.
Our Practice Leaders are specialists, differentiated not only by their industry knowledge, but trained in executive search with an ability to network with and attract candidates who are in the top 10-15 percent of the talent pool. Because these individuals are successful, generally happy and well compensated, effectively recruiting them requires confidentiality, excellent communication and a unique value proposition. Each of our Practice Leaders is trained to deliver on all accounts.
|
# -*- coding:utf-8 -*-
# Adopted from https://github.com/yzhangcs/parser
# MIT License
#
# Copyright (c) 2020 Yu Zhang
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
from torch import nn
from hanlp.components.parsers.constituency.treecrf import CRFConstituency
from hanlp.components.parsers.alg import cky
from hanlp.components.parsers.biaffine.biaffine import Biaffine
from hanlp.components.parsers.biaffine.mlp import MLP
class CRFConstituencyDecoder(nn.Module):
r"""
The implementation of CRF Constituency Parser,
also called FANCY (abbr. of Fast and Accurate Neural Crf constituencY) Parser.
References:
- Yu Zhang, Houquan Zhou and Zhenghua Li. 2020.
`Fast and Accurate Neural CRF Constituency Parsing`_.
Args:
n_words (int):
The size of the word vocabulary.
n_feats (int):
The size of the feat vocabulary.
n_labels (int):
The number of labels.
feat (str):
Specifies which type of additional feature to use: ``'char'`` | ``'bert'`` | ``'tag'``.
``'char'``: Character-level representations extracted by CharLSTM.
``'bert'``: BERT representations, other pretrained langugae models like XLNet are also feasible.
``'tag'``: POS tag embeddings.
Default: 'char'.
n_embed (int):
The size of word embeddings. Default: 100.
n_feat_embed (int):
The size of feature representations. Default: 100.
n_char_embed (int):
The size of character embeddings serving as inputs of CharLSTM, required if ``feat='char'``. Default: 50.
bert (str):
Specifies which kind of language model to use, e.g., ``'bert-base-cased'`` and ``'xlnet-base-cased'``.
This is required if ``feat='bert'``. The full list can be found in `transformers`.
Default: ``None``.
n_bert_layers (int):
Specifies how many last layers to use. Required if ``feat='bert'``.
The final outputs would be the weight sum of the hidden states of these layers.
Default: 4.
mix_dropout (float):
The dropout ratio of BERT layers. Required if ``feat='bert'``. Default: .0.
embed_dropout (float):
The dropout ratio of input embeddings. Default: .33.
n_hidden (int):
The size of LSTM hidden states. Default: 400.
n_lstm_layers (int):
The number of LSTM layers. Default: 3.
lstm_dropout (float):
The dropout ratio of LSTM. Default: .33.
n_mlp_span (int):
Span MLP size. Default: 500.
n_mlp_label (int):
Label MLP size. Default: 100.
mlp_dropout (float):
The dropout ratio of MLP layers. Default: .33.
feat_pad_index (int):
The index of the padding token in the feat vocabulary. Default: 0.
pad_index (int):
The index of the padding token in the word vocabulary. Default: 0.
unk_index (int):
The index of the unknown token in the word vocabulary. Default: 1.
.. _Fast and Accurate Neural CRF Constituency Parsing:
https://www.ijcai.org/Proceedings/2020/560/
.. _transformers:
https://github.com/huggingface/transformers
"""
def __init__(self,
n_labels,
n_hidden=400,
n_mlp_span=500,
n_mlp_label=100,
mlp_dropout=.33,
**kwargs
):
super().__init__()
# the MLP layers
self.mlp_span_l = MLP(n_in=n_hidden, n_out=n_mlp_span, dropout=mlp_dropout)
self.mlp_span_r = MLP(n_in=n_hidden, n_out=n_mlp_span, dropout=mlp_dropout)
self.mlp_label_l = MLP(n_in=n_hidden, n_out=n_mlp_label, dropout=mlp_dropout)
self.mlp_label_r = MLP(n_in=n_hidden, n_out=n_mlp_label, dropout=mlp_dropout)
# the Biaffine layers
self.span_attn = Biaffine(n_in=n_mlp_span, bias_x=True, bias_y=False)
self.label_attn = Biaffine(n_in=n_mlp_label, n_out=n_labels, bias_x=True, bias_y=True)
self.crf = CRFConstituency()
self.criterion = nn.CrossEntropyLoss()
def forward(self, x, **kwargs):
r"""
Args:
x (~torch.FloatTensor): ``[batch_size, seq_len, hidden_dim]``.
Hidden states from encoder.
Returns:
~torch.Tensor, ~torch.Tensor:
The first tensor of shape ``[batch_size, seq_len, seq_len]`` holds scores of all possible spans.
The second of shape ``[batch_size, seq_len, seq_len, n_labels]`` holds
scores of all possible labels on each span.
"""
x_f, x_b = x.chunk(2, -1)
x = torch.cat((x_f[:, :-1], x_b[:, 1:]), -1)
# apply MLPs to the BiLSTM output states
span_l = self.mlp_span_l(x)
span_r = self.mlp_span_r(x)
label_l = self.mlp_label_l(x)
label_r = self.mlp_label_r(x)
# [batch_size, seq_len, seq_len]
s_span = self.span_attn(span_l, span_r)
# [batch_size, seq_len, seq_len, n_labels]
s_label = self.label_attn(label_l, label_r).permute(0, 2, 3, 1)
return s_span, s_label
def loss(self, s_span, s_label, charts, mask, mbr=True):
r"""
Args:
s_span (~torch.Tensor): ``[batch_size, seq_len, seq_len]``.
Scores of all spans
s_label (~torch.Tensor): ``[batch_size, seq_len, seq_len, n_labels]``.
Scores of all labels on each span.
charts (~torch.LongTensor): ``[batch_size, seq_len, seq_len]``.
The tensor of gold-standard labels, in which positions without labels are filled with -1.
mask (~torch.BoolTensor): ``[batch_size, seq_len, seq_len]``.
The mask for covering the unpadded tokens in each chart.
mbr (bool):
If ``True``, returns marginals for MBR decoding. Default: ``True``.
Returns:
~torch.Tensor, ~torch.Tensor:
The training loss and
original span scores of shape ``[batch_size, seq_len, seq_len]`` if ``mbr=False``, or marginals otherwise.
"""
span_mask = charts.ge(0) & mask
span_loss, span_probs = self.crf(s_span, mask, span_mask, mbr)
label_loss = self.criterion(s_label[span_mask], charts[span_mask])
loss = span_loss + label_loss
return loss, span_probs
def decode(self, s_span, s_label, mask):
r"""
Args:
s_span (~torch.Tensor): ``[batch_size, seq_len, seq_len]``.
Scores of all spans.
s_label (~torch.Tensor): ``[batch_size, seq_len, seq_len, n_labels]``.
Scores of all labels on each span.
mask (~torch.BoolTensor): ``[batch_size, seq_len, seq_len]``.
The mask for covering the unpadded tokens in each chart.
Returns:
list[list[tuple]]:
Sequences of factorized labeled trees traversed in pre-order.
"""
span_preds = cky(s_span, mask)
label_preds = s_label.argmax(-1).tolist()
return [[(i, j, labels[i][j]) for i, j in spans] for spans, labels in zip(span_preds, label_preds)]
class CRFConstituencyModel(nn.Module):
def __init__(self, encoder, decoder: CRFConstituencyDecoder) -> None:
super().__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, batch):
r"""
Args:
batch (~dict):
Batch of input data.
Returns:
~torch.Tensor, ~torch.Tensor:
The first tensor of shape ``[batch_size, seq_len, seq_len]`` holds scores of all possible spans.
The second of shape ``[batch_size, seq_len, seq_len, n_labels]`` holds
scores of all possible labels on each span.
"""
x = self.encoder(batch)
return self.decoder(x)
|
Mysoline (primidone) is an anti-seizure medication that is approved for prevention of seizures in people who have epilepsy. It is also commonly used off-label for treatment of a condition called benign essential tremor.
Mysoline is in the barbiturate class of medications. It becomes metabolized to phenobarbital, a well-known barbiturate. This class of medications suppresses electrical activity in the brain, specifically the sodium channels, which can diminish the erratic electrical activity associated with seizures. Barbiturates are also known to interact with the inhibitory neurotransmitter GABA, slowing down electrical activity in the brain.
The mechanism by which Mysoline helps with tremor is likely due to the same effects on electrical activity and GABA.
This mechanism of action is responsible for the therapeutic effects, as well as the side effects of the medication.
Mysoline is an anticonvulsant (an anti-seizure medication) indicated for generalized tonic-clonic seizures as well as complex partial seizures.
Generalized seizures are seizures that affect the whole brain, typically resulting in impairment of or loss of consciousness.
Tonic-clonic seizures are seizures characterized by shaking and jerking of the body.
Generalized tonic-clonic seizures are characterized by jerking movements with impairment of consciousness.
Partial seizures are seizures that only impact brain activity in a limited region of the brain, and complex seizures affect consciousness. Complex partial seizures are characterized by movements that typically begin in one part of the body and involve impairment of consciousness.
Because Mysoline is indicated for all of these types of seizures, it can be used for a variety of types of epilepsy. Mysoline can be used as monotherapy (as the only anticonvulsant to control seizures) or it can be used in combination with one or more other anticonvulsants.
Mysoline is indicated for adults and children of all ages. It is considered a maintenance medication that you would have to take on a regular basis for the prevention of seizures, and it is not generally used to stop seizures in an emergency situation.
Mysoline is often used to reduce tremors in people who have a condition called benign essential tremor. Benign essential tremor is a condition in which people experience frequent tremors, typically of the hands and/or mouth. Tremors of the mouth, throat or larynx (voice box) can manifest as a shaky voice.
It is not the same as Parkinson’s disease. Parkinson’s disease is characterized by a tremor that occurs at rest, as well as shuffling when walking, and an expressionless face that is described as a masked face. Parkinson’s disease generally worsens over time, is associated with dopamine deficiency, and is treated with dopaminergic medications.
Benign essential tremor, on the other hand, is a fine motor tremor that is worse with action and often worsens with anxiety. It is not typically associated with the other characteristics of Parkinson’s disease. The two conditions are treated with different medications. The cause of essential tremor may be genetic, but often it is not known why someone develops the condition.
Mysoline comes in oral (by mouth) form and generally should be taken with food. It normally comes as a tablet, but it can be given as an oral suspension liquid form for young babies or for people who cannot swallow pills.
The dose of Mysoline is higher when it is used for seizure control than when it is used for control of tremors in benign essential tremor.
Mysoline comes in 50 mg and 250 mg tablets. The usual dose for an adult for seizure prevention is 250 mg, three times a day or four times per day, but lower doses may also be effective, especially if Mysoline is taken along with another anticonvulsant. It should be started at a dose of about 100 mg or 125 mg per day and gradually increased to the target dose over a period of approximately one week.
For children, the target dose for seizure prevention is 10-25 mg/kg/day, and, as with adults, it is usually started at a lower dose and gradually increased over a period of approximately a week.
When you take Mysoline for seizure control, it is important to maintain a steady state of the medication in your body, as variations in the medication concentration can make seizures more likely.
When Mysoline is used for control of benign essential tremors, the recommended dose is lower than it is for seizures, typically 100 mg/ day, and is generally taken as 50 mg twice per day. Benign essential tremor is a condition that normally affects older adults and not children.
While maintaining a steady state of the medication is not as vital for treatment of tremors as it is for seizures, abrupt withdrawal is dangerous in both situations because it can trigger a seizure.
If you are not happy with the effects of the medication, either because it is not controlling your symptoms or because intolerable side effects, then you need to discuss your concerns with your doctor instead of stopping the medication on your own. If you have epilepsy, your doctor may need to start another anti-seizure medication as you slowly decrease your Mysoline dose.
If you have tremors, you will probably gradually discontinue the Mysoline and may begin another medication to control the tremors after you stop taking Mysoline completely.
There are a number of side effects of Mysoline, including drowsiness, dizziness, and nausea. If you experience these side effects, you should tell your doctor.
People who have certain conditions cannot take Mysoline. These conditions include porphyria, and anemia, which are red blood cell disorders, and disorders of platelets, which are cells that the body uses for blood clotting.
As with many other anticonvulsants, you should not drink alcohol if you take Mysoline. Alcohol can interfere with seizure control, and taking Mysoline and alcohol in combination can make you drowsy, and may dangerously increase your chances of loss of consciousness.
Mysoline can cause birth defects if taken during pregnancy. If you are pregnant or planning to become pregnant, your doctor will carefully consider your anticonvulsant medications, because seizures during pregnancy are also dangerous for the mother and the baby.
Mysoline is an anti-seizure medication and is also used to control tremors for people who have benign essential tremor. Do not be concerned if your doctor prescribes Mysoline for your tremors—this does not mean that you have seizures or will get seizures from the medication.
If you are taking Mysoline for epilepsy, rest assured that epilepsy is a treatable condition, and that medication can control your seizures. In the rare instance that your epilepsy cannot be controlled with medication, you might need to have epilepsy surgery, which is a safe and effective option.
Ondo W. Essential Tremor: What We Can Learn from Current Pharmacotherapy. Tremor Other Hyperkinet Mov (N Y). 2016 Mar 4;6:356. doi: 10.7916/D8K35TC3. eCollection 2016.
|
#!/bin/python
# coding: utf-8
"""
Python API to retrieve Caller ID from phone number using OpenCNAM api.
"""
import requests
from bs4 import BeautifulSoup
import json
import phonenumbers
class OpenCNAMAPI(object):
"""
OpenCNAMAPI Main Handler
"""
_instance = None
_verbose = False
def __init__(self, arg=None):
pass
def __new__(cls, *args, **kwargs):
"""
__new__ builtin
"""
if not cls._instance:
cls._instance = super(OpenCNAMAPI, cls).__new__(
cls, *args, **kwargs)
if (args and args[0] and args[0]['verbose']):
cls._verbose = True
return cls._instance
def display_message(self, s):
if (self._verbose):
print('[verbose] %s' % s)
def format_number(self, phone_number):
parsed_number = phonenumbers.parse(phone_number, 'US')
return phonenumbers.format_number(parsed_number, phonenumbers.PhoneNumberFormat.E164)
def get(self, phone_number):
formatted_number = self.format_number(phone_number)
s = requests.Session()
req = s.get('https://ACc8aa48a044604425ba66940a2f6bdb54:AUfb0f7a1fd66f489c9f9e6d22426ccaa9@api.opencnam.com/v2/phone/%s?format=json' % formatted_number)
soup = BeautifulSoup(req.content)
json_result = json.loads(str(soup))
dataJson = json.dumps(json_result)
full_name = json_result['name']
phone_number = json_result['number']
return {'dataJson': dataJson, 'full_name': full_name, 'phone_number': phone_number}
|
Pours a medium brown color producing a small, fizzy, tan colored head with short duration. Body clarity is brilliant with very little carbonation, and has settled a slightly lighter shade of brown. Head has completely diminished leaving wispy strings of lacing behind. A yeasty aroma is detected at first, then the beer mellows out a bit and the malty, sweet aroma is dominant. It smells similar to the sweetness of a Scotch Ale with a molasses presence. Initial taste is lightly bittersweet, but the sweetness takes over towards the finish with a gentle alcohol backbone. Light bodied, it starts sweet and finishes sweet with a watery texture and a smooth mouthfeel. The palate is left slightly oily and, as time goes on, slightly dry. Additionally, there is a slight fruity note at the end.
Navigator is crisp and very easy to drink. The hop presence is not detectable, but the malty/fruity presence is appreciated and makes this doppelbock very tasty.
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Bunch
from bunch import Bunch
# Zato
from zato.common import zato_namespace
from zato.common.test import rand_bool, rand_int, rand_string, ServiceTestCase
from zato.server.service import Boolean
from zato.server.service.internal.outgoing.ftp import GetList, Create, Edit, Delete, ChangePassword
##############################################################################
class GetListTestCase(ServiceTestCase):
def setUp(self):
self.service_class = GetList
self.sio = self.service_class.SimpleIO
def get_request_data(self):
return {'cluster_id': rand_int()}
def get_response_data(self):
return Bunch(
{'id':rand_int(), 'name':rand_string(), 'is_active':rand_bool(), 'host':rand_string(),
'port':rand_int(), 'user':rand_string(), 'acct':rand_string(),
'timeout':rand_int(), 'dircache':rand_bool()}
)
def test_sio(self):
self.assertEquals(self.sio.request_elem, 'zato_outgoing_ftp_get_list_request')
self.assertEquals(self.sio.response_elem, 'zato_outgoing_ftp_get_list_response')
self.assertEquals(self.sio.input_required, ('cluster_id',))
self.assertEquals(self.sio.output_required, ('id', 'name', 'is_active', 'host', 'port'))
self.assertEquals(self.sio.output_optional, ('user', 'acct', 'timeout', self.wrap_force_type(Boolean('dircache'))))
self.assertEquals(self.sio.namespace, zato_namespace)
self.assertRaises(AttributeError, getattr, self.sio, 'input_optional')
def test_impl(self):
self.assertEquals(self.service_class.get_name(), 'zato.outgoing.ftp.get-list')
##############################################################################
class CreateTestCase(ServiceTestCase):
def setUp(self):
self.service_class = Create
self.sio = self.service_class.SimpleIO
def get_request_data(self):
return {'cluster_id':rand_int(), 'name':rand_string(), 'is_active':rand_bool(), 'host':rand_string(),
'port':rand_int(),'dircache':rand_bool(), 'user':rand_string(), 'acct':rand_string(), 'timeout':rand_int()}
def get_response_data(self):
return Bunch({'id':self.id, 'name':self.name})
def test_sio(self):
self.assertEquals(self.sio.request_elem, 'zato_outgoing_ftp_create_request')
self.assertEquals(self.sio.response_elem, 'zato_outgoing_ftp_create_response')
self.assertEquals(self.sio.input_required, ('cluster_id', 'name', 'is_active', 'host', 'port', self.wrap_force_type(Boolean('dircache'))))
self.assertEquals(self.sio.input_optional, ('user', 'acct', 'timeout'))
self.assertEquals(self.sio.output_required, ('id', 'name'))
self.assertEquals(self.sio.namespace, zato_namespace)
self.assertRaises(AttributeError, getattr, self.sio, 'output_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_repeated')
def test_impl(self):
self.assertEquals(self.service_class.get_name(), 'zato.outgoing.ftp.create')
##############################################################################
class EditTestCase(ServiceTestCase):
def setUp(self):
self.service_class = Edit
self.sio = self.service_class.SimpleIO
def get_request_data(self):
return {'id':rand_int(), 'cluster_id':rand_int(), 'name':rand_string(), 'is_active':rand_bool(),
'host':rand_string(),'port':rand_int(), 'dircache':rand_bool(), 'user':rand_string(), 'acct':rand_string(),
'timeout':rand_int()}
def get_response_data(self):
return Bunch({'id':rand_int(), 'name':rand_string()})
def test_sio(self):
self.assertEquals(self.sio.request_elem, 'zato_outgoing_ftp_edit_request')
self.assertEquals(self.sio.response_elem, 'zato_outgoing_ftp_edit_response')
self.assertEquals(self.sio.input_required, ('id', 'cluster_id', 'name', 'is_active', 'host', 'port',
self.wrap_force_type(Boolean('dircache'))))
self.assertEquals(self.sio.input_optional, ('user', 'acct', 'timeout'))
self.assertEquals(self.sio.output_required, ('id', 'name'))
self.assertEquals(self.sio.namespace, zato_namespace)
self.assertRaises(AttributeError, getattr, self.sio, 'output_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_repeated')
def test_impl(self):
self.assertEquals(self.service_class.get_name(), 'zato.outgoing.ftp.edit')
##############################################################################
class DeleteTestCase(ServiceTestCase):
def setUp(self):
self.service_class = Delete
self.sio = self.service_class.SimpleIO
def get_request_data(self):
return {'id':rand_int()}
def get_response_data(self):
return Bunch()
def test_sio(self):
self.assertEquals(self.sio.request_elem, 'zato_outgoing_ftp_delete_request')
self.assertEquals(self.sio.response_elem, 'zato_outgoing_ftp_delete_response')
self.assertEquals(self.sio.input_required, ('id',))
self.assertEquals(self.sio.namespace, zato_namespace)
self.assertRaises(AttributeError, getattr, self.sio, 'input_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_required')
self.assertRaises(AttributeError, getattr, self.sio, 'output_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_repeated')
def test_impl(self):
self.assertEquals(self.service_class.get_name(), 'zato.outgoing.ftp.delete')
##############################################################################
class ChangePasswordTestCase(ServiceTestCase):
def setUp(self):
self.service_class = ChangePassword
self.sio = self.service_class.SimpleIO
def get_request_data(self):
return {'id':rand_int(), 'password1':rand_string(), 'password2':rand_string()}
def get_response_data(self):
return Bunch()
def test_sio(self):
self.assertEquals(self.sio.request_elem, 'zato_outgoing_ftp_change_password_request')
self.assertEquals(self.sio.response_elem, 'zato_outgoing_ftp_change_password_response')
self.assertEquals(self.sio.input_required, ('id', 'password1', 'password2'))
self.assertEquals(self.sio.namespace, zato_namespace)
self.assertRaises(AttributeError, getattr, self.sio, 'input_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_required')
self.assertRaises(AttributeError, getattr, self.sio, 'output_optional')
self.assertRaises(AttributeError, getattr, self.sio, 'output_repeated')
def test_impl(self):
self.assertEquals(self.service_class.get_name(), 'zato.outgoing.ftp.change-password')
|
I am a dog trainer myself and have read several of Cesar Millan's books, watched most of his DVDs and I am a regular viewer of the Dog Whisperer TV Show. I have seen several other dog shows on TV as well but prefer Cesar Millan as he teaches us about the true nature of dogs and how to restore their natural balance. His insight have made me better at what I do and it helped me develop a better understanding of the differences between dog rehabilitation and dog training; two concepts many people get confused. His approach of working with nature is in my experience a much better way to help dogs and their owners. He draws his insights from experience with and growing up around dogs. That he doesn't have any formal degrees, which some criticize, works in his favor in my view. He is not constrained by one particular paradigm and doesn't try to reinvent or improve nature like so may dog trainers or behaviorists with degrees. I do appreciate real-life experience over book knowledge when it comes to dogs. Cesar Millan 'gets' dogs and many bookworms just don't. Don't get me wrong. There are many great books on dog behavior worth reading but it doesn't substitute for 20+ years of hands on experience, like in Millan's case.
Cesar Millan returns dogs to a calm, stable state of mind by applying natures principles. Dogs by nature are calm, balanced animals. Only when they live with people do they develop all these issues you see and hear about so much, because most people don't understand how to effectively communicate with their dogs and what their own state of mind does to their companions. Cesar Millan helps them understand how they are affecting their dogs so they can change and as a result, improve their dogs state of mind and behavior.
I often hear Cesar Millan's critics, praise Victoria Stillwell and I would at some point like to see a compelling argument for Victoria Stillwell's approach of 'positive reinforcement only'. In one of Stilwell's 2009 episodes I saw her advocating—together with a vet—that a particular dog with separation anxiety should be put on anti-anxiety medication. In my view that is true animal cruelty and a complete humanization of an animal that is really not to its benefit. Anyone who thinks that it is a 'positive' approach to drugging a dog your methods don't work on, should really take a look at the Cesar Millan DVD 'Common Canine Misbehaviors', where he shows how to address separation anxiety with calm-assertive leadership. He teaches the owners how to bring their dog back to balance and resolve this issue and he doesn't touch the dog even once. Same issue, different methods, which is healthier for the dog?
Also, in the episode 'Untamed and Untrained' from 2008, Victoria Stillwell diagnosed two dogs—Olivia and Sophia—as never be able to be together in one room, alone, off-leash as one was attacking the other. She also spoke of the dogs being 'emotional' in each other's presence. I have to say, she is not a very impressive trainer. These dogs can absolutely be returned to a balanced co-existence and dogs are not emotional about other dogs in ways like 'they don't like each other.' Stillwell did make some progress with Olivia and Sophia but didn't seem to fully understand that this was just the beginning of where this can be taken. If she would have only been able to teach the owners how to be true calm-assertive pack leaders and reclaim the position, they lost to their dogs, they could have made it all the way. Stillwell doesn't really seem to understand the true nature of dogs and once again humanizes them—although she claims she doesn't. Instead of criticizing Cesar Millan on a regular basis, she would benefit from getting some lessons from him on how to deal with imbalances in dogs properly. She showed some good positive reinforcement techniques—which Cesar Millan uses too where appropriate—but would really do herself and her clients a huge favor by not denouncing things she clearly doesn't understand. If I knew who these poor people where, I might go there myself just to help them, as I feel really sorry for them having to think, their dogs can never get along—Stillwell should really be ashamed of herself.
In the western world people have a tendency to view their dogs as another kid of the family. In my work I find that to be one of the biggest problems. People spoil their dogs based on their own desire to shower them with love. And while most people have the best, most loving—from their point of view—intentions, they do not really love their dog in the true meaning of the word. Loving your dog means to accept its nature and provide what it needs—not what we think they should enjoy. This is what Cesar Millan advocates. Based on this, it is not surprising that we have a lot of people that find positive reinforcement training methods better (or more modern) because it is essentially an extension of our tendency to spoil our dogs and not correct them when called for. But just because more people find it 'nicer' doesn't mean they are correct. And just because the true nature of animals is something we don't want to think about, doesn't mean ignoring it delivers better outcomes—quite the opposite.
Cesar’s critics would be well advised to keep an open mind and re-evaluate their myopic views. Cesar Millan does have insights many don't quite get but if you work with dogs like I do, you just see every day that he is spot on—something I can't say for many 'formally' educated trainers and authors who have a made-up degree from a self-proclaimed animal behavior school.
Let's try to understand and admire dog's true nature as animals and dogs first—there is so much we can learn if we just open our eyes.
|
import os
import re
import yaml
from .apps import *
from django.template.defaultfilters import slugify
from pombola.core.logging_filters import skip_unreadable_post
from pombola.hansard.constants import NAME_SUBSTRING_MATCH, NAME_SET_INTERSECTION_MATCH
IN_TEST_MODE = False
# Work out where we are to set up the paths correctly and load config
base_dir = os.path.abspath( os.path.join( os.path.split(__file__)[0], '..', '..' ) )
root_dir = os.path.abspath( os.path.join( base_dir, '..' ) )
# load the mySociety config
config_file = os.path.join( base_dir, 'conf', 'general.yml' )
config = yaml.load( open(config_file, 'r') )
if int(config.get('STAGING')):
STAGING = True
else:
STAGING = False
# switch on all debug when staging
DEBUG = STAGING
TEMPLATE_DEBUG = STAGING
ADMINS = (
(config.get('ERRORS_NAME'), config.get('ERRORS_EMAIL')),
)
SLUGGABLE_SLUGIFY_FUNCTION = slugify
DEFAULT_FROM_EMAIL = config.get('FROM_EMAIL')
# This is the From: address used for error emails to ADMINS
SERVER_EMAIL = DEFAULT_FROM_EMAIL
MANAGERS = (
(config.get('MANAGERS_NAME'), config.get('MANAGERS_EMAIL')),
)
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': config.get('POMBOLA_DB_NAME'),
'USER': config.get('POMBOLA_DB_USER'),
'PASSWORD': config.get('POMBOLA_DB_PASS'),
'HOST': config.get('POMBOLA_DB_HOST'),
'PORT': config.get('POMBOLA_DB_PORT'),
}
}
# Numberof seconds to keep a database connection open for
# in case it can be reused
CONN_MAX_AGE = 0 if STAGING else 300
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = config.get('ALLOWED_HOSTS', [])
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = config.get('TIME_ZONE')
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-GB'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.normpath( os.path.join( root_dir, "media_root/") )
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media_root/'
# Use django-pipeline for handling static files
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.normpath( os.path.join( root_dir, "collected_static/") )
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join( base_dir, "web/static/" ),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'pipeline.finders.FileSystemFinder',
# 'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
'pipeline.finders.CachedFileFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = config.get('DJANGO_SECRET_KEY')
CACHES = {
# by default use memcached locally. This is what get used by
# django.core.cache.cache
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
'KEY_PREFIX': config.get('POMBOLA_DB_NAME'),
},
# we also have a dummy cache that is used for all the page requests - we want
# the cache framework to auto-add all the caching headers, but we don't actually
# want to do the caching ourselves - rather we leave that to Varnish on the
# servers.
'dummy': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
CACHE_MIDDLEWARE_ALIAS='dummy'
if DEBUG:
CACHE_MIDDLEWARE_SECONDS = 0
else:
CACHE_MIDDLEWARE_SECONDS = 60 * 20 # twenty minutes
CACHE_MIDDLEWARE_KEY_PREFIX = config.get('POMBOLA_DB_NAME')
CACHE_MIDDLEWARE_ANONYMOUS_ONLY = True
# Always use the TemporaryFileUploadHandler as it allows us to access the
# uploaded file on disk more easily. Currently used by the CSV upload in
# scorecards admin.
FILE_UPLOAD_HANDLERS = (
# "django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.gzip.GZipMiddleware', # first in list so it is able to act last on response
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'pagination.middleware.PaginationMiddleware',
)
if config.get('DEBUG_TOOLBAR', True):
MIDDLEWARE_CLASSES += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', )
ROOT_URLCONF = 'pombola.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join( base_dir, "pombola/templates" ),
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"pombola.core.context_processors.add_settings",
)
MAPIT_AREA_SRID = 4326
MAPIT_RATE_LIMIT = ['127.0.0.1']
# MAPIT_COUNTRY should be set in the country-specific file
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'skip_unreadable_posts': {
'()': 'django.utils.log.CallbackFilter',
'callback': skip_unreadable_post,
},
},
'handlers': {
'mail_admins': {
'filters': ['require_debug_false', 'skip_unreadable_posts'],
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
},
'stream_to_stderr': {
'level': 'WARN',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['stream_to_stderr'],
'level': 'INFO',
'propagate': True,
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'elasticsearch': {
'handlers': ['stream_to_stderr'],
'level': 'INFO',
'propagate': True,
},
'elasticsearch.trace': {
'handlers': ['stream_to_stderr'],
'level': 'INFO',
'propagate': True,
},
}
}
# Configure the Hansard app
HANSARD_CACHE = os.path.join( base_dir, "../hansard_cache" )
KENYA_PARSER_PDF_TO_HTML_HOST = config.get('KENYA_PARSER_PDF_TO_HTML_HOST')
# The name of a Twitter account related to this website. This will be used to
# pull in the latest tweets on the homepage and in the share on twitter links.
TWITTER_USERNAME = config.get('TWITTER_USERNAME')
# The widget ID is used for displaying tweets on the homepage.
TWITTER_WIDGET_ID = config.get('TWITTER_WIDGET_ID')
# pagination related settings
PAGINATION_DEFAULT_PAGINATION = 10
PAGINATION_DEFAULT_WINDOW = 2
PAGINATION_DEFAULT_ORPHANS = 2
PAGINATION_INVALID_PAGE_RAISES_404 = True
# haystack config - interface to search engine
HAYSTACK_CONNECTIONS = {
#'default': {
# 'ENGINE': 'xapian_backend.XapianEngine',
# 'PATH': os.path.join( root_dir, "pombola_xapian" ),
#'PATH': os.path.join(os.path.dirname(__file__), 'xapian_index'),
#},
'default': {
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
'URL': 'localhost:9200',
'INDEX_NAME': config.get('POMBOLA_DB_NAME'),
'EXCLUDED_INDEXES': [],
},
}
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
# Admin autocomplete
AJAX_LOOKUP_CHANNELS = {
'person_name' : dict(model='core.person', search_field='legal_name'),
'organisation_name' : dict(model='core.organisation', search_field='name'),
'place_name' : dict(model='core.place', search_field='name'),
'title_name' : dict(model='core.positiontitle', search_field='name'),
}
# misc settings
HTTPLIB2_CACHE_DIR = os.path.join( root_dir, 'httplib2_cache' )
GOOGLE_ANALYTICS_ACCOUNT = config.get('GOOGLE_ANALYTICS_ACCOUNT')
COUNTY_PERFORMANCE_EXPERIMENT_KEY = config.get('COUNTY_PERFORMANCE_EXPERIMENT_KEY')
YOUTH_EMPLOYMENT_BILL_EXPERIMENT_KEY = config.get('YOUTH_EMPLOYMENT_BILL_EXPERIMENT_KEY')
IEBC_API_ID = config.get('IEBC_API_ID')
IEBC_API_SECRET = config.get('IEBC_API_SECRET')
# Markitup settings
MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': True, 'extensions':['tables']})
MARKITUP_SET = 'markitup/sets/markdown'
# There are some models that are just for testing, so they are not included in
# the South migrations.
SOUTH_TESTS_MIGRATE = False
# Use nose as the test runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--with-doctest', '--with-yanc']
# For the disqus comments
DISQUS_SHORTNAME = config.get( 'DISQUS_SHORTNAME', None )
# At some point we should deprecate this. For now it defaults to true so that
# no entry in the config does the right thing.
DISQUS_USE_IDENTIFIERS = config.get( 'DISQUS_USE_IDENTIFIERS', True )
FACEBOOK_APP_ID = config.get('FACEBOOK_APP_ID')
# Polldaddy widget ID - from http://polldaddy.com/
# Use the widget rather than embedding a poll direct as it will allow the poll
# to be changed without having to alter the settings or HTML. If left blank
# then no poll will be shown.
POLLDADDY_WIDGET_ID = config.get( 'POLLDADDY_WIDGET_ID', None );
# RSS feed to the blog related to this site. If present will cause the 'Latest
# News' to appear on the homepage.
BLOG_RSS_FEED = config.get( 'BLOG_RSS_FEED', None )
THUMBNAIL_DEBUG = True
# ZA Hansard settings
HANSARD_CACHE = os.path.join( root_dir, 'hansard_cache' )
COMMITTEE_CACHE = os.path.join( HANSARD_CACHE, 'committee' )
ANSWER_CACHE = os.path.join( HANSARD_CACHE, 'answers' )
QUESTION_CACHE = os.path.join( HANSARD_CACHE, 'questions' )
ANSWER_JSON_CACHE = os.path.join( HANSARD_CACHE, 'answers_json' )
QUESTION_JSON_CACHE = os.path.join( HANSARD_CACHE, 'questions_json' )
PMG_COMMITTEE_USER = config.get('PMG_COMMITTEE_USER', '')
PMG_COMMITTEE_PASS = config.get('PMG_COMMITTEE_PASS', '')
PMG_API_KEY = config.get('PMG_API_KEY', '')
# Algorithm to use for matching names when scraping hansard
# NAME_SUBSTRING_MATCH
# - strips the title from the name and then searches for current politicians
# with names containing that string (used by Kenya).
# NAME_SET_INTERSECTION_MATCH
# - splits the name, including title, into words, and then compares the
# set of these words with similar sets from current politicians,
# looking for the largest intersection.
HANSARD_NAME_MATCHING_ALGORITHM = NAME_SET_INTERSECTION_MATCH
# Which popit instance to use
POPIT_API_URL = config.get('POPIT_API_URL')
BREADCRUMB_URL_NAME_MAPPINGS = {
'info' : ('Information', '/info/'),
'organisation' : ('Organisations', '/organisation/all/'),
'person' : ('Politicians', '/person/all/'),
'place' : ('Places', '/place/all/'),
'search' : ('Search', '/search/')
}
# Info page settings
INFO_POSTS_PER_LIST_PAGE = 10
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.gis',
'pombola.admin_additions',
'django.contrib.admin',
'django.contrib.admindocs',
'south',
'ajax_select',
'autocomplete_light',
'markitup',
'pipeline',
'mapit',
'pombola.images',
'sorl.thumbnail',
'haystack',
'pombola.slug_helpers',
'pombola.info',
'pombola.tasks',
'pombola.core',
'pombola.feedback',
'pombola.scorecards',
'pombola.search',
'pombola.file_archive',
'pombola.map',
'pombola.experiments',
'pombola.budgets',
'pagination',
'django_nose',
)
if config.get('DEBUG_TOOLBAR', True):
INSTALLED_APPS += ('debug_toolbar',)
def insert_after(sequence, existing_item, item_to_put_after):
"""A helper method for inserting an item after another in a sequence
This returns a new list with 'item_to_put_after' inserted after
'existing_item' in 'sequence'; this is useful for putting items
into the expected position in INSTALLED_APPS. Note that this will
return a list even if sequence is a tuple, but Django doesn't mind
if INSTALLED_APPS is a list."""
l = list(sequence)
i = l.index(existing_item)
l.insert(i + 1, item_to_put_after)
return l
def make_enabled_features(installed_apps, all_optional_apps):
result = {}
for key in all_optional_apps:
key = re.sub(r'^pombola\.', '', key)
result[key] = ('pombola.' + key in installed_apps) or (key in installed_apps)
return result
# Set up the core CSS and JS files:
PIPELINE_CSS = {
'core': {
'source_filenames': (
# .css files from core:
'css/jquery-ui-1.8.17.custom.css',
),
'output_filename': 'css/core.css',
},
'countdown': {
'source_filenames': (
'css/jquery.countdown-v1.6.0.css',
'sass/countdown.scss',
),
'output_filename': 'css/countdown.css',
},
'admin': {
'source_filenames': (
# .scss files from core:
'sass/admin.scss',
),
'output_filename': 'css/admin.css',
},
}
# The packages in DYNAMICALLY_LOADED_PIPELINE_JS will all be loaded
# dynamically, and the only way we can do that without making changes
# to django-pipeline is to render the URLs that django-pipeline
# generates as Javascript array elements. So, keep these separate so
# that we can set a template that does that on each when including
# them in PIPELINE_JS.
DYNAMICALLY_LOADED_PIPELINE_JS = {
'desktop_only': {
'source_filenames': (
'js/libs/jquery-ui-1.8.17.custom.min.js',
'js/libs/jquery.ui.autocomplete.html.2010-10-25.js',
'js/libs/jquery.form-v2.94.js',
'js/desktop-functions.js',
),
'output_filename': 'js/desktop_only.js',
'template_name': 'pipeline/js-array.html',
},
'mobile_only': {
'source_filenames': (
'js/mobile-functions.js',
),
'output_filename': 'js/mobile_only.js',
'template_name': 'pipeline/js-array.html',
},
'desktop_and_mobile': {
'source_filenames': (
'js/twitter-embed.js',
),
'output_filename': 'js/desktop_and_mobile.js',
'template_name': 'pipeline/js-array.html',
},
'analytics': {
'source_filenames': (
'js/analytics.js',
),
'output_filename': 'js/analytics.js',
'template_name': 'pipeline/js-array.html',
},
'load-appearances': {
'source_filenames': (
'js/load-appearances.html',
),
'output_filename': 'js/load-appearances.js',
'template_name': 'pipeline/js-array.html',
},
'feeds': {
'source_filenames': (
'js/feeds.js',
),
'output_filename': 'js/feeds.js',
'template_name': 'pipeline/js-array.html',
},
'countdown': {
'source_filenames': (
'js/libs/jquery.countdown-v1.6.0.js',
),
'output_filename': 'js/countdown.js',
'template_name': 'pipeline/js-array.html',
},
'responsive-carousel': {
'source_filenames': (
'js/libs/responsive-carousel.js',
),
'output_filename': 'js/responsive-carousel.js',
'template_name': 'pipeline/js-array.html',
},
'map': {
'source_filenames': (
'js/map-drilldown.js',
),
'output_filename': 'js/map.js',
'template_name': 'pipeline/js-array.html',
},
}
PIPELINE_JS = {
'google-map': {
'source_filenames': (
'js/map.js',
),
'output_filename': 'js/google-map.js',
},
'modernizr_and_loader': {
'source_filenames': (
'js/libs/modernizr.js',
'js/loader.js',
),
'output_filename': 'js/modernizr_and_loader.js',
},
'hide-reveal': {
'source_filenames': (
'js/hide-reveal.js',
),
'output_filename': 'js/hide-reveal.js',
'template_name': 'pipeline/js-array.html',
},
}
for package_name, package in DYNAMICALLY_LOADED_PIPELINE_JS.items():
package['template_name'] = 'pipeline/js-array.html'
PIPELINE_JS[package_name] = package
# Only for debugging compression (the default is: 'not DEBUG' which is
# fine when not experimenting with compression)
# PIPELINE_ENABLED = True
PIPELINE_COMPILERS = (
'pipeline_compass.compass.CompassCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yui.YUICompressor'
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yui.YUICompressor'
PIPELINE_YUI_BINARY = '/usr/bin/env yui-compressor'
PIPELINE_DISABLE_WRAPPER = True
EXCLUDE_FROM_SEARCH = ()
# Settings for bleach, used by sayit to determine what html is allowed
BLEACH_ALLOWED_TAGS = [
'a', 'abbr', 'b', 'i', 'u', 'span', 'sub', 'sup', 'br',
'p',
'ol', 'ul', 'li',
'table', 'caption', 'tr', 'th', 'td',
]
BLEACH_ALLOWED_ATTRIBUTES = {
'*': [ 'id', 'title' ], # class, style
'a': [ 'href' ],
'li': [ 'value' ],
}
BLEACH_STRIP_TAGS = True
INFO_PAGES_ALLOW_RAW_HTML = False
if config.get('EMAIL_SETTINGS', None):
EMAIL_HOST = config.get('EMAIL_HOST', '')
EMAIL_HOST_USER = config.get('EMAIL_HOST_USER', '')
EMAIL_HOST_PASSWORD = config.get('EMAIL_HOST_PASSWORD', '')
port = config.get('EMAIL_PORT', None)
if port:
EMAIL_PORT = port
EMAIL_USE_TLS = config.get('EMAIL_USE_TLS', False)
|
Keep It Simple, Sweetie is the best way to cook a very nice piece of extra thick pork chops. In this pan-charred pork chop with a chipotle compound butter the mild flavor of the pork is boosted with a char flavor, then complemented with flavors from chipotle pepper compound butter. I char the pork chops on a Calphalon grill pan and then complete the cooking in the oven.
Where it's from: The rib section of the loin, from the shoulder to the middle of the loin (the rib bones attached to these chops are actually baby back ribs).
What it looks like: Large eye of lean loin meat and no tenderloin meat. There is a bone running along one side and sometimes a layer of fat on the outside. Rib chops from the blade end have more fat and connective tissue than chops from the shoulder end.
What it tastes like: The chops are very tender, have a little more fat than loin chops, and have a mild flavor.
How to cook it: Since the meat on these chops is lean, quick cooking like grilling, broiling, or sear-roasting are the best methods. Brining first will help keep these chops moist and tender.
There are four types of cuts for rib chops. For additional information, check out "A Complete Guide To Pork Chops Meat Basics".
Wash the pork chops and pat them dry. Allow the pork chops to reach room temperature, about 20 minutes.
Generously sprinkle salt on one side of the pork chops. (This step is very important! The salt helps to bring out the flavor of the pork, as well as aide in tenderizing and keeping it moist.) Sprinkle on the crushed pepper. Lightly cover with olive oil. Gently press the ingredients into the pork chops.
Turn the pork chops over and repeat the above steps.
Set your grill pan on your gas or electric burner and set the temperature to high. Get the pan very hot. I always look for a little smoke to emit from the pan.
Place each pork chop in the grill pan. Do not move the pork chops after you first place them in the pan. Cook for 3-4 minutes on the first side. Three minutes if you want a light char. Turn the chops over and sear for 3 minutes. Transfer the pan to the oven for 8 minutes, only 8 minutes!!
Take the pork chops out of the oven and transfer to a warm platter or plate. Allow to rest for at least 5-6 minutes. Note that the pork chops will continue to cook during this resting period. If you want to check for doneness , gently make a small cut along the bone and "peak." It's important not to cut into the meat which will allow any juices to escape and render a dry pork chop.
Note: If you purchase a 1" thick pork chop, sear to get a char as noted above. But reduce the cooking time in the oven by about 1 minutes. Better to have an under-cook pork chop that can be returned to the oven than an over-cooked pork chop that is dry!
To Serve: Top the pork chops with chipotle pepper compound butter. Garnish with sliced spring onions.
The pan-charred pork chop flavor is wonderfully enhanced with the chipotle pepper compound butter. It's easy to make with just a few fresh ingreidents, such as Italian flat-leaf parsley, garlic and the crushed smokey hot chipotle pepper. I like to make it a day or two ahead. Click on the link to get the complete recipe.
Hey! If you like this recipe, let me know. Please.
Just click on the like button below. Or better yet, share your thoughts in the comment box below.
|
import random
import math
from itertools import product
from itertools import chain
from thread_sum import ThreadSum
q = 2**252 + 27742317777372353535851937790883648493
r = []
v = []
alpha = []
def int_to_bin(number):
return [int(x) for x in bin(number)[2:]]
def bin_to_int(bit_list):
output = 0
for bit in bit_list:
output = output * 2 + bit
return output
def groupsof(n, xs):
if len(xs) < n:
return [xs]
else:
return chain([xs[0:n]], groupsof(n, xs[n:]))
class WindowAttack(object):
def generate_v_values(self, d, N):
for i in xrange(0, N):
value = d + (alpha[i]*q)
v.append(value)
def generate_alpha_js(self, N):
for i in xrange(1, N+1):
al = r[i] - r[0]
alpha.append(int(math.fabs(al)))
def generate_r_js(self, n, N):
for i in xrange(0, N+1):
a = random.getrandbits(n)
r.append(int(math.fabs(a)))
def bit_flip_random(self, bit_list, randomized_bits):
bit_list_t = bit_list[:]
pos_list = []
if len(bit_list) < randomized_bits:
raise Exception("Randomized bigger then d+(a*r)")
print "Lenght: ", len(bit_list)
for i in xrange(0, randomized_bits):
pos_bit_to_flip = random.randint(0, len(bit_list)-1)
while(pos_bit_to_flip in pos_list):
pos_bit_to_flip = random.randint(0, len(bit_list)-1)
pos_list.append(pos_bit_to_flip)
if bit_list_t[pos_bit_to_flip] == 1:
bit_list_t[pos_bit_to_flip] = 0
else:
bit_list_t[pos_bit_to_flip] = 1
return bit_list_t
def generate_v_values_with_bit_flip(self, d, N, randomized_bits):
for i in xrange(0, N):
value = d + (alpha[i]*q)
bit_list = int_to_bin(value)
#print len(bit_list)
bit_list_flipped = self.bit_flip_random(bit_list, randomized_bits)
value_flipped = bin_to_int(bit_list_flipped)
v.append(value_flipped)
def sum_all_ds(self, d_candidates, interval, mod_value, N):
pairs = {}
number_of_threads = 4
ds = list(groupsof(len(d_candidates)/number_of_threads, d_candidates))
#ds = zip(*[iter(d_candidates)]*number_of_threads)
threads = []
#print "DS: ", len(ds)
for i in xrange(0, number_of_threads):
threads.append(ThreadSum(i, ds[i], v, alpha, N, mod_value, interval))
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
key, d = t.return_result()
try:
if pairs[key] <> None:
val = pairs[key]
if val.count(1) > d.count(1):
pairs[key] = d
except Exception as e:
pairs[key] = d
#print pairs
#print key
#print pairs.keys()
return min(pairs.keys()) , pairs
def test_d(self, d, to_test):
""" Function to test the candidate to d. In our case, it is a comparasion
with the original d. However, in a real case could be the ciphered text with the original
and the candidate"""
return (d==to_test)
def wide_widow_attack(self, d, window_size = 10, n = 512, N = 200, randomized_bits = 30):
self.generate_r_js(n, N)
self.generate_alpha_js(N)
self.generate_v_values_with_bit_flip(d, N, randomized_bits)
print "d = ", int_to_bin(d), " len: ", len(int_to_bin(d))
print "Starting...."
w_prime = 0
w = window_size
d_prime = 0
variations = []
for i in product([0,1], repeat=window_size):
variations.append(list(i))
while(w < (n + window_size + window_size)):
print "w: ", w
print "w_prime: ", w_prime
mod_value = 2**w
d_prime = d_prime % mod_value
d_prime_bin = int_to_bin(d_prime)
to_iterate = []
for variation in variations:
to_iterate.append(variation+d_prime_bin)
sum_d , d_candidate = self.sum_all_ds(to_iterate, w, mod_value, N)
d_prime = bin_to_int(d_candidate[sum_d])
print "sum: ", sum_d, " d_candidate = ", int_to_bin(d_prime)
w_prime = w
w = w + window_size
if self.test_d(d, d_prime):
w = w+n
if (d == d_prime):
print "FOUND KEY."
else:
print "SORRY"
print "Finished."
|
Community Hospice is fortunate to have talented and knowledgeable physicians on staff. Each of our physicians brings their own unique skill set to our organization and the patients we serve. Our Physicians are all residents of our service area, which allows them greater understanding of the social and cultural environment. Their passion for, and dedication to, hospice care allows Community Hospice to provide up to date, culturally relevant medical care to all of our patients and families.
|
# vi: ts=4 expandtab
#
# Copyright (C) 2012 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# Copyright (C) 2012 Yahoo! Inc.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
# Author: Joshua Harlow <harlowja@yahoo-inc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit import handlers
from cloudinit import log as logging
from cloudinit import util
from cloudinit.settings import (PER_ALWAYS)
LOG = logging.getLogger(__name__)
BOOTHOOK_PREFIX = "#cloud-boothook"
class BootHookPartHandler(handlers.Handler):
def __init__(self, paths, datasource, **_kwargs):
handlers.Handler.__init__(self, PER_ALWAYS)
self.boothook_dir = paths.get_ipath("boothooks")
self.instance_id = None
if datasource:
self.instance_id = datasource.get_instance_id()
def list_types(self):
return [
handlers.type_from_starts_with(BOOTHOOK_PREFIX),
]
def _write_part(self, payload, filename):
filename = util.clean_filename(filename)
filepath = os.path.join(self.boothook_dir, filename)
contents = util.strip_prefix_suffix(util.dos2unix(payload),
prefix=BOOTHOOK_PREFIX)
util.write_file(filepath, contents.lstrip(), 0700)
return filepath
def handle_part(self, _data, ctype, filename, # pylint: disable=W0221
payload, frequency): # pylint: disable=W0613
if ctype in handlers.CONTENT_SIGNALS:
return
filepath = self._write_part(payload, filename)
try:
env = os.environ.copy()
if self.instance_id is not None:
env['INSTANCE_ID'] = str(self.instance_id)
util.subp([filepath], env=env)
except util.ProcessExecutionError:
util.logexc(LOG, "Boothooks script %s execution error", filepath)
except Exception:
util.logexc(LOG, "Boothooks unknown error when running %s",
filepath)
|
Viagra, with no prescription, overnight delivery is a top-rated service on the Internet. Many men, very often, may need the medicament urgently; so, they use the services of dubious online pharmacies. We will try to find out whether it is possible to get Viagra overnight and without prescription, delivered on the web, or not.
Viagra is a medicament that is released only by prescription. Therefore, to buy it (online or offline) a man needs to get a prescription from a healthcare professional. However, many male persons try to buy this medication without consulting with their doctors. Also, many sites sell Viagra without any needed prescription. Unfortunately, this practice isn’t safe.
1) The medicament can cause serious side effects.
2) Patients who have severe health problems (such as heart disease or diabetes) may miss these ailments because they have not visited a doctor.
3) Patients do not always read the instructions and are not aware of the fact that the medicament can dangerously react with other medical ingredients (e.g., nitrates). It is the doctor who warns patients about possible contraindications when writing out a prescription.
The company withdrew the application; but, it noticed that it was disagreeing with the decision of the European Medicines Agency. The problem was because a lot of people were still trying to walk around the rules, putting themselves at risk buying medicines from unknown sources. They are missing out on the opportunity to obtain the necessary information about the medicament from a qualified medical expert.
Large retailers such as Walgreens, Wal-Mart, Costco usually deliver goods within 4 to 10 days. However, some services offer the possibility of faster delivery for an additional fee, or a membership fee. Wal-Mart is considering the introduction of a quick delivery with the help of drones. Also, the US lawmakers have already begun to develop regulations on the use of drones for such purposes.
If you live in Europe (EU countries) or the United States, you cannot buy OTC Viagra. This drug is sold without a prescription in the post-Soviet countries, India, China and some other countries. Concerning overnight delivery, this feature is only possible if you live near the warehouse of the supplier. Sometimes overnight distribution is carried out for an additional cost. However, in the future, it may become more accessible thanks to new technologies such as the use of drones.
|
#!/usr/bin/env python
"""Tag a commit in the bup repository.
Creating a tag on a commit can be used for avoiding automatic cleanup from
removing this commit due to old age.
"""
import sys
import os
from bup import git, options
from bup.helpers import *
handle_ctrl_c()
optspec = """
bup tag
bup tag <tag name> <commit>
bup tag -d <tag name>
--
d,delete= Delete a tag
"""
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
git.check_repo_or_die()
if opt.delete:
tag_file = git.repo('refs/tags/%s' % opt.delete)
debug1("tag file: %s\n" % tag_file)
if not os.path.exists(tag_file):
log("bup: error: tag '%s' not found.\n" % opt.delete)
sys.exit(1)
try:
os.unlink(tag_file)
except OSError, e:
log("bup: error: unable to delete tag '%s': %s" % (opt.delete, e))
sys.exit(1)
sys.exit(0)
tags = [t for sublist in git.tags().values() for t in sublist]
if not extra:
for t in tags:
print t
sys.exit(0)
elif len(extra) < 2:
o.fatal('no commit ref or hash given.')
(tag_name, commit) = extra[:2]
if not tag_name:
o.fatal("tag name must not be empty.")
debug1("args: tag name = %s; commit = %s\n" % (tag_name, commit))
if tag_name in tags:
log("bup: error: tag '%s' already exists\n" % tag_name)
sys.exit(1)
if tag_name.startswith('.'):
o.fatal("'%s' is not a valid tag name." % tag_name)
try:
hash = git.rev_parse(commit)
except git.GitError, e:
log("bup: error: %s" % e)
sys.exit(2)
if not hash:
log("bup: error: commit %s not found.\n" % commit)
sys.exit(2)
pL = git.PackIdxList(git.repo('objects/pack'))
if not pL.exists(hash):
log("bup: error: commit %s not found.\n" % commit)
sys.exit(2)
tag_file = git.repo('refs/tags/%s' % tag_name)
try:
tag = file(tag_file, 'w')
except OSError, e:
log("bup: error: could not create tag '%s': %s" % (tag_name, e))
sys.exit(3)
tag.write(hash.encode('hex'))
tag.close()
|
5ban Graphics is a group of artists who have illustrated many cards for the Pokémon Trading Card Game from Black and White onward. The group has designed many Trainer Cards, as well as many Full Art cards and Pokémon cards for the Black and White, XY, and Sun & Moon TCG Series.
Little is known about the group. In 2012, a brief interview about the creation of card artwork was posted on the Creatures, Inc. PTCG blog in which they state they like to make the Pokémon look 3D along with exemplifying the best qualities of the Pokémon when possible so they stand out on their own.
For a list of cards illustrated by 5ban Graphics, go here.
|
#!/usr/bin/env python
# Import all necessary modules here so that if it fails, it fails early.
try:
import netCDF4 as NC
except:
import netCDF3 as NC
import subprocess
import numpy as np
import os
smb_name = "climatic_mass_balance"
temp_name = "ice_surface_temp"
def run(commands):
"""Run a list of commands (or one command given as a string)."""
if isinstance(commands, (list, tuple)):
for cmd in commands:
print "Running '%s'..." % cmd
subprocess.call(cmd.split(' '))
else:
run([commands])
def preprocess_ice_velocity():
"""
Download and preprocess the ~95Mb Antarctic ice velocity dataset from NASA MEASURES project
http://nsidc.org/data/nsidc-0484.html
"""
url = "ftp://anonymous@sidads.colorado.edu/pub/DATASETS/nsidc0484_MEASURES_antarc_vel_V01/"
input_filename = "Antarctica_ice_velocity.nc"
output_filename = os.path.splitext(input_filename)[0] + "_cutout.nc"
commands = ["wget -nc %s%s.gz" % (url, input_filename), # NSIDC supports compression on demand!
"gunzip %s.gz" % input_filename,
"ncrename -d nx,x -d ny,y -O %s %s" % (input_filename, input_filename)
]
if not os.path.exists(input_filename):
run(commands)
nc = NC.Dataset(input_filename, 'a')
# Create x and y coordinate variables and set projection parameters; cut
# out the Ross area.
# Metadata provided with the dataset describes the *full* grid, so it is a
# lot easier to modify this file instead of adding grid information to the
# "cutout" file.
if 'x' not in nc.variables and 'y' not in nc.variables:
nx = nc.nx
ny = nc.ny
x_min = float(nc.xmin.strip().split(' ')[0])
y_max = float(nc.ymax.strip().split(' ')[0])
x_max = y_max
y_min = x_min
x = np.linspace(x_min, x_max, nx)
y = np.linspace(y_max, y_min, ny)
nc.projection = "+proj=stere +ellps=WGS84 +datum=WGS84 +lon_0=0 +lat_0=-90 +lat_ts=-71 +units=m"
try:
x_var = nc.createVariable('x', 'f8', ('x',))
y_var = nc.createVariable('y', 'f8', ('y',))
except:
x_var = nc.variables['x']
y_var = nc.variables['y']
x_var[:] = x
y_var[:] = y
x_var.units = "meters"
x_var.standard_name = "projection_x_coordinate"
y_var.units = "meters"
y_var.standard_name = "projection_y_coordinate"
nc.close()
if not os.path.exists(output_filename):
cmd = "ncks -d x,2200,3700 -d y,3500,4700 -O %s %s" % (input_filename, output_filename)
run(cmd)
nc = NC.Dataset(output_filename, 'a')
# fix units of 'vx' and 'vy'
nc.variables['vx'].units = "m / year"
nc.variables['vy'].units = "m / year"
# Compute and save the velocity magnitude
if 'magnitude' not in nc.variables:
vx = nc.variables['vx'][:]
vy = nc.variables['vy'][:]
v_magnitude = np.zeros_like(vx)
v_magnitude = np.sqrt(vx**2 + vy**2)
magnitude = nc.createVariable('v_magnitude', 'f8', ('y', 'x'))
magnitude.units = "m / year"
magnitude[:] = v_magnitude
nc.close()
return output_filename
def preprocess_albmap():
"""
Download and preprocess the ~16Mb ALBMAP dataset from http://doi.pangaea.de/10.1594/PANGAEA.734145
"""
url = "http://store.pangaea.de/Publications/LeBrocq_et_al_2010/ALBMAPv1.nc.zip"
input_filename = "ALBMAPv1.nc"
output_filename = os.path.splitext(input_filename)[0] + "_cutout.nc"
commands = ["wget -nc %s" % url, # download
"unzip -n %s.zip" % input_filename, # unpack
"ncks -O -d x1,439,649 -d y1,250,460 %s %s" % (input_filename, output_filename), # cut out
"ncks -O -v usrf,lsrf,topg,temp,acca %s %s" % (output_filename, output_filename), # trim
"ncrename -O -d x1,x -d y1,y -v x1,x -v y1,y %s" % output_filename, # fix metadata
"ncrename -O -v temp,%s -v acca,%s %s" % (temp_name, smb_name, output_filename)]
run(commands)
nc = NC.Dataset(output_filename, 'a')
# fix acab
acab = nc.variables[smb_name]
acab.units = "m / year"
acab.standard_name = "land_ice_surface_specific_mass_balance"
SMB = acab[:]
SMB[SMB == -9999] = 0
acab[:] = SMB
# fix artm and topg
nc.variables[temp_name].units = "Celsius"
nc.variables["topg"].standard_name = "bedrock_altitude"
# compute ice thickness
if 'thk' not in nc.variables:
usrf = nc.variables['usrf'][:]
lsrf = nc.variables['lsrf'][:]
thk = nc.createVariable('thk', 'f8', ('y', 'x'))
thk.units = "meters"
thk.standard_name = "land_ice_thickness"
thk[:] = usrf - lsrf
nc.projection = "+proj=stere +ellps=WGS84 +datum=WGS84 +lon_0=0 +lat_0=-90 +lat_ts=-71 +units=m"
nc.close()
# Remove usrf and lsrf variables:
command = "ncks -x -v usrf,lsrf -O %s %s" % (output_filename, output_filename)
run(command)
return output_filename
def final_corrections(filename):
"""
* replaces missing values with zeros
* computes Dirichlet B.C. locations
"""
nc = NC.Dataset(filename, 'a')
# replace missing values with zeros
for var in ['u_ssa_bc', 'v_ssa_bc', 'magnitude']:
tmp = nc.variables[var][:]
tmp[tmp.mask == True] = 0
nc.variables[var][:] = tmp
thk = nc.variables['thk'][:]
topg = nc.variables['topg'][:]
# compute the grounded/floating mask:
mask = np.zeros(thk.shape, dtype='i')
rho_ice = 910.0
rho_seawater = 1028.0
ice_free = 0
grounded = 1
floating = 2
My, Mx = thk.shape
for j in xrange(My):
for i in xrange(Mx):
if topg[j,i] + thk[j,i] > 0 + (1 - rho_ice/rho_seawater) * thk[j,i]:
mask[j,i] = grounded
else:
if thk[j,i] < 1:
mask[j,i] = ice_free
else:
mask[j,i] = floating
# compute the B.C. locations:
bcflag_var = nc.createVariable('bcflag', 'i', ('y', 'x'))
bcflag_var[:] = mask == grounded
# mark floating cells next to grounded ones too:
row = np.array([-1, 0, 1, -1, 1, -1, 0, 1])
col = np.array([-1, -1, -1, 0, 0, 1, 1, 1])
for j in xrange(1, My-1):
for i in xrange(1, Mx-1):
nearest = mask[j + row, i + col]
if mask[j,i] == floating and np.any(nearest == grounded):
bcflag_var[j,i] = 1
topg[j,i]=-2000
#modifications for prognostic run
tempma = nc.variables[temp_name][:]
for j in xrange(My):
for i in xrange(Mx):
if bcflag_var[j,i] == 0:
topg[j,i]=-2000 # to avoid grounding
if tempma[j,i] > -20.0:
tempma[j,i]=-20.0 # to adjust open ocean temperatures
nc.variables[temp_name][:] = tempma
nc.variables['topg'][:] = topg
nc.close()
if __name__ == "__main__":
velocity = preprocess_ice_velocity()
albmap = preprocess_albmap()
albmap_velocity = os.path.splitext(albmap)[0] + "_velocity.nc" # ice velocity on the ALBMAP grid
output = "Ross_combined_prog.nc"
commands = ["nc2cdo.py %s" % velocity,
"nc2cdo.py %s" % albmap,
"cdo remapbil,%s %s %s" % (albmap, velocity, albmap_velocity),
"ncks -x -v mask -O %s %s" % (albmap, output),
"ncks -v vx,vy,v_magnitude -A %s %s" % (albmap_velocity, output),
"ncrename -v vx,u_ssa_bc -v vy,v_ssa_bc -v v_magnitude,magnitude -O %s" % output]
run(commands)
final_corrections(output)
|
Written by Mike Winters on Mon, 9 Jul 2018 , under Announcements category.
This post also appeared on the Zeebe blog. In recent months, we’ve seen more and more users building solutions with Camunda BPM to address microservices orchestration challenges. The external task clients, newly added in version 7.9.0, is one example of how Camunda BPM has evolved to meet user needs in this area. Camunda is running a survey about microservices orchestration so that we can learn more about how you’re approaching this problem and how Camunda can help.
|
from sys import argv, stdout
import networkx as nx
import subprocess as proc
import os
INFTY = 999999
def read_rpp(fname):
with open(fname) as f:
lines = [s for s in [l.strip() for l in f] if s and not s.startswith('#')]
n, m = lines[0].split()
print('N:', n, 'M:', m)
edges = [(int(u), int(v)) for u, v in [p.split('-') for p in lines[2].split()]]
costs = {e: c for e, c in zip(edges, [int(c) for c in lines[3].split()])}
required = [e for e, r in zip(edges, [c == '1' for c in lines[4].split()]) if r]
return edges, costs, required
def rpp_to_atsp(rpp):
""" Converts an RPP problem into an asymmetric TSP problem.
See section 4.10 in http://logistik.bwl.uni-mainz.de/Dateien/LM-2012-03.pdf. """
edges, costs, required = rpp
g = nx.Graph()
g.add_edges_from([(u, v, {'cost': c}) for (u, v), c in costs.items()])
atsp = []
spaths = {}
for u, v in required:
lens = []
for s, t in required:
if u == s and v == t:
lens.append(INFTY)
else:
spath = nx.shortest_path(g, v, s, 'cost')
spath = [e if e in costs else e[::-1] for e in zip(spath[:-1], spath[1:])]
spaths[v, s] = spath
print('spaths[' + str(v) + ', ' + str(s) + '] = ' + str(spath))
lens.append(costs[u, v] + sum(costs[e] for e in spath))
atsp.append(lens)
return atsp, spaths
def save_atsp_as_tsp(atsp, fname):
""" Transforms an asymmetric TSP of size n into a symmetric TSP of size 2n and saves it in TSPLIB format.
See http://en.wikipedia.org/wiki/Travelling_salesman_problem#Solving_by_conversion_to_symmetric_TSP. """
n = len(atsp)
with open(fname, 'wt') as f:
f.write(
'NAME: ' + fname + '\n'
'TYPE: TSP\n'
'DIMENSION: ' + str(n * 2) + '\n'
'EDGE_WEIGHT_TYPE: EXPLICIT\n'
'EDGE_WEIGHT_FORMAT: LOWER_DIAG_ROW\n'
'EDGE_WEIGHT_SECTION\n')
for i in range(n):
f.write(' '.join([str(INFTY)] * (i + 1)) + '\n')
for i in range(n):
row = [str(d) for d in atsp[i]]
row[i] = '0'
f.write(' '.join(row + [str(INFTY)] * (i + 1)) + '\n')
def solve_atsp(atsp, name, concorde):
# Concorde cannot solve ATSP, so we need to transform to TSP first.
tsp_file = name + '.tsp'
save_atsp_as_tsp(atsp, tsp_file)
sol_file = name + '.tsp.sol'
if os.path.exists(sol_file):
os.remove(sol_file)
stdout.flush()
proc.call([concorde, '-x', '-o', sol_file, tsp_file])
with open(sol_file) as f:
tour = [int(s) for s in f.read().split()[1:]]
n = len(atsp)
if tour[1] - tour[0] != n:
tour = (tour[1:] + tour[0:1])[::-1]
for i, j in zip(tour[::2], tour[1::2]):
if j - i != n:
raise Exception('ERROR: Invalid ATSP tour produced by CONCORDE, (i, j) = ' + str((i, j)))
return tour[::2]
def atsp_sol_to_rpp_sol(rpp, atsp_tour, spaths):
edges, costs, required = rpp
rpp_tour = []
for i1, i2 in zip(atsp_tour, atsp_tour[1:] + atsp_tour[0:1]):
e1 = required[i1]
e2 = required[i2]
rpp_tour.append(e1)
rpp_tour += spaths[e1[1], e2[0]]
#rpp_tour = [(0, 1), (1, 2), (2, 3), (2, 3), (2, 4), (4, 5), (5, 6), (6, 7), (3, 7), (3, 4), (4, 10), (9, 10), (8, 9), (8, 9), (0, 9)]
#for e in rpp_tour:
#print(str(e) + ': ' + str(1 if e in required else 0))
print('RPP tour:', rpp_tour)
print('RPP tour cost:', sum(costs[e] for e in rpp_tour))
return [edges.index(e) for e in rpp_tour]
def main():
fname = argv[1] if len(argv) > 1 else 'I01.grp'
concorde = argv[2] if len(argv) > 2 else 'concorde'
print('Reading RPP instance...')
rpp = read_rpp(fname)
print('Costs:', rpp[1])
print('Required:', rpp[2])
print('Transforming to ATSP...')
atsp, aux = rpp_to_atsp(rpp)
print('ATSP:')
[print(r) for r in atsp]
print('Solving with CONCORDE...')
atsp_tour = solve_atsp(atsp, fname, concorde)
print('ATSP tour:', atsp_tour)
sol = atsp_sol_to_rpp_sol(rpp, atsp_tour, aux)
print(sol)
if __name__ == '__main__':
main()
|
ERA Planning have been engaged by Parks and Wildlife Service to provide planning services associated with the implementation of the Cradle Mountain Visitor Experience Master Plan.
The Dove Lake visitor experience.
ERA is coordinating the preparation of development applications with the appointed architects – Cumulus Studios – for both the Dove Lake Shelter and stage one of the Cradle Gateway Village. This work includes preparation of supporting planning submissions. Approval for Stage One of the Cradle Gateway Village has been received.
The Dove Lake Shelter requires approval through the Reserve Activity Assessment (RAA) process which ERA is also undertaken on behalf of Parks and Wildlife Services as the proponent. This has involved the preparation of a Development Proposal and Environmental Impact Statement (DPEIS) to inform the Parks assessment process. The DPEIS outlines the proposal, the existing site values, the potential for environmental impact and mitigation measures and monitoring requirements. The DPEIS is currently on public exhibit.
For stage two of the Cradle Gateway Village, ERA has prepared a planning scheme amendment application in consultation with Kentish Council to replace the existing Specific Area Plan for Cradle Valley with one that aligns with the current Master Plan. The Tasmanian Planning Commission are currently considering the amendment.
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Dataset'
db.create_table('datasets_dataset', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('creation_time', self.gf('django.db.models.fields.TimeField')(auto_now_add=True, blank=True)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('datasets', ['Dataset'])
# Adding model 'Datum'
db.create_table('datasets_datum', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('dataset', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['datasets.Dataset'])),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('creation_time', self.gf('django.db.models.fields.TimeField')(auto_now_add=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=256)),
))
db.send_create_signal('datasets', ['Datum'])
def backwards(self, orm):
# Deleting model 'Dataset'
db.delete_table('datasets_dataset')
# Deleting model 'Datum'
db.delete_table('datasets_datum')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'datasets.dataset': {
'Meta': {'object_name': 'Dataset'},
'creation_time': ('django.db.models.fields.TimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'datasets.datum': {
'Meta': {'object_name': 'Datum'},
'creation_time': ('django.db.models.fields.TimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['datasets.Dataset']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '256'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['datasets']
|
What is the world turning to?
disrespects a woman will live a miserable life.
their wills into war fronts.
This is just to provide a premise for the Ese Oruru saga.
ehmmm…. Islamisation! Ha! Everyone went into overdrive.
the media for almost two weeks.
turning this country more and more into a nation of enemies.
fostering cooperation but more on that later.
become a producer of babies for sale! I shudder at the thought.
the oyinbo way of thinking in its entirety.
|
import os
from flask.ext.script import Manager, Shell, Command
from flask.ext.migrate import Migrate, MigrateCommand
from server import app, db
from server.models import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
def make_shell_context():
return dict(app=app, db=db, **model_dict)
manager.add_command('shell', Shell(make_context=make_shell_context))
@manager.command
def add_data():
# add a mock user
u = User(login_name='leasunhy', email='leasunhy@example.com')
# add there problems
p1 = Problem()
p1.title = 'A+B'
p1.problem_desc = 'Given two numbers, calculate their sum.'
p1.input_desc = 'A single line containing two integers separated by a space.'
p1.output_desc = 'A single line containing the sum.'
p1.sample_input = '1 2'
p1.sample_output = '3'
p1.source = 'Classical'
p2 = Problem(title='A-B')
p3 = Problem(title='A*B')
# add a contest
import datetime
c = Contest(title='Newbie Corner')
c.start_time = datetime.datetime.now()
c.end_time = c.start_time + datetime.timedelta(1)
c.owner = u
c.problems.append(p2)
# add a submission
s = Submission()
s.owner = u
s.problem = p1
s.compiler_id = 1
s.verdict = 'Accepted'
s.time_usage = 100
s.memory_usage = 600
s.code_length = 233
# add posts
po1 = Tutorial(title='Introduction to Dynamic Programming', content='Abandon.')
po1.create_time = datetime.datetime.now()
po1.owner = u
po2 = Notification(title='Air pollution detected.', content='Evacuate. NOW!')
po2.create_time = datetime.datetime.now()
po2.owner = u
po2.importance = 233
po3 = Solution(title='How to attack A+B?', content='Hand calculate.')
po3.create_time = datetime.datetime.now()
po3.owner = u
po3.problem = p1
db.session.add_all([u, p1, p2, p3, c, s, po1, po2, po3])
db.session.commit()
if __name__ == '__main__':
manager.run()
|
I’m a WordPress Developer, an entrepreneur, a husband, a father, an avid reader, and a winemaker living in Georgetown, TX.
For fourteen years I have been building custom WordPress websites for businesses and individuals. I specialize in crafting easy-to-use backend experiences so my clients can easily manage their own website. I often work with attorneys, publishers, and nonprofits.
I have 23 free plugins on WordPress.org, which have been downloaded 1,245,733 times.
I’ve written many tutorials and code snippets to help others learn WordPress.
|
"""
Django settings for ecommerce project.
Generated by 'django-admin startproject' using Django 1.9.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'il)j1ja&yz35pjspxh$kf5v)4g(37b-)egng#*x5%(yosvh26f'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
BROKER_URL = 'redis://localhost:6379/0'
REST_FRAMEWORK = {'PAGE_SIZE':5}
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'products',
'tweets',
'rest_framework',
'debug_toolbar',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INTERNAL_IPS = ('127.0.0.1',)
ROOT_URLCONF = 'ecommerce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ecommerce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
stands for pretty young thing/thang. lol it's one of those, but i'm more comfortable saying "thing" ... "thang" really isn't a word i can pull off properly.
seriously, hottest and cutest supermodel ever. i'm glad our paths have crossed enough times that we got the chance to become friends. hopefully one day soon i can style her for a photo shoot! besties with design student marilyn tang [matafact.com], these two are an unstoppable force.
starting a project soon with marilyn so stay tunned to see what happens!
|
import tempfile
import json
import os
import shapely.wkt
import shapely.geometry
def first_from_filename(filename):
"""read the first geometry from filename"""
import ogr
ds = ogr.Open(filename)
layer = ds.GetLayer(0)
feature = layer.GetFeature(0)
geometry = feature.geometry()
wkt = geometry.ExportToWkt()
return shapely.wkt.loads(wkt)
def first_from_bytes(bytes):
"""read the first geometry from bytes"""
import ogr
with tempfile.NamedTemporaryFile(prefix="pywpsInput",dir=os.curdir) as f:
open(f.name, 'w').write(bytes)
ds = ogr.Open(f.name)
layer = ds.GetLayer(0)
feature = layer.GetFeature(0)
geometry = feature.geometry()
wkt = geometry.ExportToWkt()
return shapely.wkt.loads(wkt)
def decode(file_or_text):
"""combine several decoders to read geo data
>>> location_wkt = "POINT(54 2)"
>>> location_json = '{ "type": "LineString", "coordinates": [[51.0, 3.0], [52.0, 3.1]] }'
>>> location_gml = '''<?xml version="1.0" encoding="utf-8" ?>
... <root
... xmlns:gml="http://www.opengis.net/gml"
... >
... <gml:featureMember>
... <gml:geometryProperty>
... <gml:Point >
... <gml:coordinates>54,3.1</gml:coordinates>
... </gml:Point>
... </gml:geometryProperty>
... </gml:featureMember>
... </root>
... '''
>>> for location in [location_wkt, location_json, location_gml]:
... decode(location).type
'Point'
'LineString'
'Point'
"""
# decoders for file or text
decoders = {
True: [
lambda x: shapely.geometry.shape(json.loads(open(x,'r').read())),
lambda x: shapely.wkt.loads(open(x, 'r').read()),
first_from_filename
],
False: [
lambda x: shapely.geometry.shape(json.loads(x)),
shapely.wkt.loads,
first_from_bytes
]
}
for decoder in decoders[os.path.isfile(file_or_text)]:
try:
# try all the decoders and stop if it works
geom = decoder(file_or_text)
break
except:
# no worries, keep trying
pass
else:
# we have not found a working decoder
if os.path.isfile(file_or_text):
raise ValueError("could not decode %r from %s" % (open(file_or_text).read(), file_or_text))
else:
raise ValueError("could not decode %s" % (file_or_text, ))
return geom
if __name__ == '__main__':
import doctest
doctest.testmod()
|
NFP and Charities are loosing out in Europe it seems?
Tonight, Susan Bradley, myself and John Lennon (Not the singer, but a SBS Consultant in Northern Ireland) had a discussion on this subject.
It seems, though we are not certain, that whilst the USA has NFP / Charity pricing on SBS, Europe does not seem to. This is a major problem for resellers as it actually stops SBS getting into these organisations as it is cheaper to put in Windows 2003 and Exchange 2003 NFP licencing.
If this is indeed the case, it must change! We will make it happen…..
When I last wanted to buy SBS I approached pugh.co.uk who are MS partners for volume licensing : they sell everything except SBS and say MS won’t let them sell it to charity and educational users.
So the only way to get it is retail, the cheapest retailer I found was afterhours.co.uk and at least any profits go to a charitable cause.
I spent fortunes on SBS 2000 and then "wobbled" about the upgrade. In the end I stuck with SBS 4.5 until 2003 came out and I did the migration bypassing the 2000 version because I needed features in the 2003 Premium and IIS6.
Admittedly SBS 2003 is cheaper than previous versions, but there are loads of Voluntary and Community Organisations that would jump at the chance to buy it "cheapest" maybe using the extra savings to make sure they also have the best hardware to make it run, or put it to some other useful charitable cause.
In reality the US is no better due to MS cleverly hiding Charity info from the public and partners. Yeah we know it is there but no resources. It is the only facet of the MS system – including all academic paths – that does not appear on MS websites, or in relational comparison charts, or have a central subweb with real info, or have support or…. MSA partners appear to not get credit for serving NFP clients as opposed to points, etc for others. I realize MS is being nice and ‘profitless’ to help NFPs but we who have to serve these low budget operations have no incentive, put in just as much work as serving a full price customer, and MS doesn’t support us. I have 30 churches that would consider a staff upgrade but I’d go bankrupt doing it.
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(178, 80)
self.horizontalLayout_2 = QtGui.QHBoxLayout(Form)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.pushButton = QtGui.QPushButton(Form)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout.addWidget(self.pushButton)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.pushButton_3 = QtGui.QPushButton(Form)
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.horizontalLayout.addWidget(self.pushButton_3)
self.pushButton_2 = QtGui.QPushButton(Form)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.pushButton.setText(_translate("Form", "PushButton", None))
self.pushButton_3.setText(_translate("Form", "PushButton", None))
self.pushButton_2.setText(_translate("Form", "PushButton", None))
|
We can help you develop bespoke valves to meet your exact requirements. Even if you have no drawings or specification to work from, we can help you every step of the way through to final manufacturing and testing of your products.
As part of the Graco group, we have the resources to continually invest in our manufacturing and research & development capabilities. That is why we’re very well-placed to work with you to develop a bespoke valve product from start to finish, that precisely meets the needs of your specification.
First we will work with you to define your requirements. If we don’t have an existing product that meets those requirements, we can work with you to develop a bespoke valve product. Typically, most of our bespoke projects are to address challenges such as material requirements, connection sizes, unusual configurations, and demanding operating conditions such as high pressures and large temperature ranges.
We can help you visualise and specify your requirements through rapid production of 3D digital modelling and laser object scans to help you create a specification that results in a very precise end product.
Thanks to our CNC machinery, we can machine bespoke valves to your specification. Our Mazak machinery was a significant investment for us, and allows us to not only machine products more quickly and efficiently, but also with a greater level of precision.
The work was previously carried out on separate machines, meaning repeated loading, unloading and adjustments at each stage. We can now machine products in one simple, consistent operation, with greater precision. Once implementation and set up has been carried out, we can machine your bespoke valves from a single piece of metal in minutes rather than hours.
We recognise that sectors such as oil and gas are dynamic, demanding ones with changing customer needs. Our continued investment in R&D underlines our commitment to meeting those needs in the years to come.
New product developments – such as our XC range of double block and bleed valves – have come as a direct result of our R&D efforts. We continue to develop new products, test new materials and processes, and proactively put our existing products through qualification testing to ensure that they continue to meet your needs.
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.core.db import db
from indico.util.string import return_ascii
class LegacyCategoryMapping(db.Model):
"""Legacy category ID mapping
Legacy categories have non-numeric IDs which are not supported by
any new code. This mapping maps them to proper integer IDs to
avoid breaking things.
"""
__tablename__ = 'legacy_id_map'
__table_args__ = {'schema': 'categories'}
legacy_category_id = db.Column(
db.String,
primary_key=True,
index=True
)
category_id = db.Column(
db.Integer,
primary_key=True,
autoincrement=False
)
@return_ascii
def __repr__(self):
return '<LegacyCategoryMapping({}, {})>'.format(self.legacy_category_id, self.category_id)
|
On this page you can change the text size and colour scheme of the text only version of this website.
Select your preferred options below, then press the 'Change my options' button.
The settings you create on this page will be saved for future visits. If you wish to return to the default settings, please select 'Reset to defaults'.
|
"""
Simple example:
.. UIExample:: 300
import numpy as np
from bokeh.plotting import figure
from flexx import app, ui, event
x = np.linspace(0, 6, 50)
p1 = figure()
p1.line(x, np.sin(x))
p2 = figure()
p2.line(x, np.cos(x))
class Example(ui.Widget):
def init(self):
with ui.BoxPanel():
ui.BokehWidget(plot=p1)
ui.BokehWidget(plot=p2)
"""
import os
from ... import event, app
from ...pyscript.stubs import window, Bokeh
from . import Widget
def _load_bokeh(ext):
import bokeh.resources
dev = os.environ.get('BOKEH_RESOURCES', '') == 'relative-dev'
res = bokeh.resources.bokehjsdir()
if dev:
res = os.path.abspath(os.path.join(bokeh.__file__,
'..', '..', 'bokehjs', 'build'))
modname = 'bokeh' if dev else 'bokeh.min'
filename = os.path.join(res, ext, modname + '.' + ext)
return open(filename, 'rb').read().decode()
def _load_bokeh_js():
return _load_bokeh('js')
def _load_bokeh_css():
return _load_bokeh('css')
# Associate Bokeh asset, but in a "lazy" way, so that we don't attempt to
# import bokeh until the user actually instantiates a BokehWidget.
app.assets.associate_asset(__name__, 'bokeh.js', _load_bokeh_js)
app.assets.associate_asset(__name__, 'bokeh.css', _load_bokeh_css)
class BokehWidget(Widget):
""" A widget that shows a Bokeh plot object.
For Bokeh 0.12 and up. The plot's ``sizing_mode`` property is set to
``stretch_both`` unless it was set to something other than ``fixed``. Other
responsive modes are 'scale_width', 'scale_height' and 'scale_both`, which
all keep aspect ratio while being responsive in a certain direction.
"""
CSS = """
.flx-BokehWidget > .plotdiv {
overflow: hidden;
}
"""
@event.prop
def plot(self, plot=None):
""" The Bokeh plot object to display. In JS, this prop
provides the corresponding backbone model.
"""
try:
from bokeh.models import Plot
except ImportError:
from bokeh.models import PlotObject as Plot
if plot is None:
return None
if not isinstance(plot, Plot):
raise ValueError('%s.plot must be a Bokeh plot object.' % self.id)
# The sizing_mode is fixed by default, but that's silly in this context
if plot.sizing_mode == 'fixed':
plot.sizing_mode = 'stretch_both'
self._plot_components(plot)
return plot
@event.emitter
def _plot_components(self, plot):
from bokeh.embed import components
script, div = components(plot)
script = '\n'.join(script.strip().split('\n')[1:-1])
return dict(script=script, div=div, id=plot.ref['id'])
class JS:
@event.prop
def plot(self, plot=None):
return plot
@event.connect('_plot_components')
def __set_plot_components(self, *events):
ev = events[-1]
# Embed div
self.node.innerHTML = ev.div
# "exec" code
el = window.document.createElement('script')
el.innerHTML = ev.script
self.node.appendChild(el)
#eval(script)
# Get plot from id in next event-loop iter
def getplot():
self.plot = Bokeh.index[ev.id]
self.plot.model.document.resize()
#self.real_size._set(self.real_size) ???
window.setTimeout(getplot, 10)
@event.connect('size')
def __resize_plot(self, *events):
if self.plot and self.parent:
if self.plot.resize_width_height:
# Bokeh <= 0.11
cstyle = window.getComputedStyle(self.parent.node)
use_x = cstyle['overflow-x'] not in ('auto', 'scroll')
use_y = cstyle['overflow-y'] not in ('auto', 'scroll')
self.plot.resize_width_height(use_x, use_y)
else:
self.plot.model.document.resize()
|
A new “One Name” Armorial is in production for Shank and Shanks. The blazons for the Arms are online and I hope to add the various images as time permits.
If there are any errors or omissions, please do let me know.
I made posts about the seals at rec.heraldry, the Heraldry society of Scotland and the International Association of Amateur Heralds. My colleague, John Tunesi of Liongam, identified the Arms as those of DAUBUZ.
I have been contacted by Andrea Guidi di Bagno, the Chief Paintings Conservator for the Museum of Fine Arts, Houston, to see if I can assist in identifying three wax seals. The seals were found on the back of a painting by Thomas Gainsborough.
Seal C is the only one that is obviously heraldic.
A new “One Name” Armorial is in production for Morison and Morrison. The blazons for the Arms are online and I hope to add the various images as time permits.
|
#!/usr/bin/env python3
# coding=utf-8
"""
fritzbox_power_consumption - A munin plugin for Linux to monitor AVM Fritzbox
Copyright (C) 2015 Christian Stade-Schuldt
Author: Christian Stade-Schuldt
Like Munin, this plugin is licensed under the GNU GPL v2 license
http://www.opensource.org/licenses/GPL-2.0
Add the following section to your munin-node's plugin configuration:
[fritzbox_*]
env.fritzbox_ip [ip address of the fritzbox]
env.fritzbox_username [fritzbox username]
env.fritzbox_password [fritzbox password]
This plugin supports the following munin configuration parameters:
#%# family=auto contrib
#%# capabilities=autoconf
"""
import json
import os
import sys
import fritzbox_helper as fh
PAGE = "energy"
DEVICES = ["system", "cpu", "wifi", "dsl", "ab", "usb"]
def get_power_consumption():
"""get the current power consumption usage"""
server = os.environ["fritzbox_ip"]
username = os.environ["fritzbox_username"]
password = os.environ["fritzbox_password"]
session_id = fh.get_session_id(server, username, password)
xhr_data = fh.get_xhr_content(server, session_id, PAGE)
data = json.loads(xhr_data)
devices = data["data"]["drain"]
for i, device in enumerate(DEVICES):
print("%s.value %s" % (device, devices[i]["actPerc"]))
def print_config():
print("graph_title AVM Fritz!Box Power Consumption")
print("graph_vlabel %")
print("graph_category system")
print("graph_order system cpu wifi dsl ab usb")
print("system.label system")
print("system.type GAUGE")
print("system.graph LINE12")
print("system.min 0")
print("system.max 100")
print("system.info Fritzbox overall power consumption")
print("cpu.label cpu")
print("cpu.type GAUGE")
print("cpu.graph LINE1")
print("cpu.min 0")
print("cpu.max 100")
print("cpu.info Fritzbox central processor power consumption")
print("wifi.label wifi")
print("wifi.type GAUGE")
print("wifi.graph LINE1")
print("wifi.min 0")
print("wifi.max 100")
print("wifi.info Fritzbox wifi power consumption")
print("dsl.label dsl")
print("dsl.type GAUGE")
print("dsl.graph LINE1")
print("dsl.min 0")
print("dsl.max 100")
print("dsl.info Fritzbox dsl power consumption")
print("ab.label ab")
print("ab.type GAUGE")
print("ab.graph LINE1")
print("ab.min 0")
print("ab.max 100")
print("ab.info Fritzbox analog phone ports power consumption")
print("usb.label usb")
print("usb.type GAUGE")
print("usb.graph LINE1")
print("usb.min 0")
print("usb.max 100")
print("usb.info Fritzbox usb devices power consumption")
if os.environ.get("host_name"):
print("host_name " + os.environ["host_name"])
if __name__ == "__main__":
if len(sys.argv) == 2 and sys.argv[1] == "config":
print_config()
elif len(sys.argv) == 2 and sys.argv[1] == "autoconf":
print("yes")
elif len(sys.argv) == 1 or len(sys.argv) == 2 and sys.argv[1] == "fetch":
# Some docs say it'll be called with fetch, some say no arg at all
try:
get_power_consumption()
except:
sys.exit("Couldn't retrieve fritzbox power consumption")
|
2. 2nd, click on download link download the Samsung Clx 3300 Driver Windows 10 connected to it.
Decide on your OS through the listing of OS outlined above and download Samsung Clx 3300 Driver Windows 10 related to it. We're generally readily available to help you should you encounter any issue though downloading the driver. Please, feel totally free to speak to us.
SAMSUNG CLX 330X 330XW SERIES USER MANUAL Pdf Download.
View and Download Samsung CLX 330x 330xW Series user manual online. CLX 330x 330xW Series All in One Printer pdf manual download. Also for: Clx 330xfn 330xfw series, Clx 330x series, Clx 330xw series, Clx 330xfn series, Clx 330xfw series.
SAMSUNG CLX 3305W USER MANUAL Pdf Download.
View and Download Samsung CLX 3305W user manual online. User Manual Ver.2.01 (Spanish). CLX 3305W All in One Printer pdf manual download. Also for: Clx 330x series, Clx 330xw series, Clx 330xfn series, Clx 330xfw series.
Windows 7 Forums is the largest help and support community, providing friendly help and advice for Microsoft Windows 7 puters such as Dell, HP, Acer, Asus or a custom build.
|
"""
Django template tags for inserting Shrink The Web images into templates.
There is one templatetag:
- stwimage - supports all free and PRO features.
- shrinkthewebimage - the original image insertion templatetag that implements
the STW preview feature. This is DEPRECATED.
"""
from collections import OrderedDict
from six.moves.urllib import parse
from django.conf import settings
from django import template
class STWConfigError(template.TemplateSyntaxError):
pass
class FormatSTWImageNode(template.Node):
def __init__(self, url, alt, **kwargs):
self.url = url
self.alt = alt
params = OrderedDict()
# load defaults if any
params.update(settings.SHRINK_THE_WEB)
if 'stwembed' not in kwargs:
params['stwembed'] = 1 # default to image
# overwrite defaults for this tag instance
params.update(kwargs)
self.kwargs = params
self._validate()
@classmethod
def _resolve(cls, var, context):
"""if var is a string then return it otherwise use it to lookup a value in the current context"""
if var[0] == var[-1] and var[0] in ('"', "'"):
var = var[1:-1] # a string
else:
var = template.Variable(var).resolve(context)
return var
def _validate(self):
if 'stwaccesskeyid' not in self.kwargs:
raise STWConfigError("'stwaccesskeyid' must be defined in settings.SHRINK_THE_WEB")
def render(self, context):
url = self._resolve(self.url, context)
alt = self._resolve(self.alt, context)
encoded = parse.urlencode(self.kwargs)
if encoded:
encoded += '&'
result = '''<img src="https://images.shrinktheweb.com/xino.php?{0}stwurl={1}" alt="{2}"/>'''.format(encoded, url, alt)
return result
def do_stwimage(parser, token):
"""
Key value based templatetag supporting all STW features for Free and PRO accounts.
Usage::
{% load shrinkthewebtags %}
{% stwimage url alt key-value-pairs %}
Where:
``url``
is expected to be a variable instantiated from the context
or a quoted string to be used explicitly.
``key-value-pairs``
matching STW API values i.e. stwembed=0 stwinside=1
minimal validation of key value pairs is performed
Examples::
Given a template context variable "author" with attributes "url" and
"description" the following are valid entries in a template file:
{% load shrinkthewebtags %}
get image of the follow the full url (not just the top level page), wait
5 seconds, and return image in large size (this requires license with PRO
features:
{% stwimage author.url author.description stwinside=1 stwdelay=5 stwsize=lrg %}
"""
bits = token.split_contents()
if len(bits) < 3:
raise template.TemplateSyntaxError("'{}' tag takes at least 2 arguments".format(bits[0]))
# process keyword args
kwargs = {}
for bit in bits[3:]:
key, value = bit.split("=")
if value is '':
raise template.TemplateSyntaxError("'{0}' tag keyword: {1} has no argument".format(bits[0], key))
if key.startswith('stw'):
kwargs[str(key)] = value
else:
raise template.TemplateSyntaxError("'{0}' tag keyword: {1} is not a valid STW keyword".format(bits[0], key))
return FormatSTWImageNode(url=bits[1], alt=bits[2] , **kwargs)
register = template.Library()
register.tag('stwimage', do_stwimage)
|
The Blackburn Youth Zone within the Cathedral Quarter redevelopment zone of the town centre was designed by Eric Wright Group architect Mark Serventi. The project includes a sports hall with climbing wall, recreation area, arts and crafts zone, boxing gym, fitness suite, multi-purpose training rooms, synthetic turf pitch and offices to accommodate advice and support services. Eric Wright Construction were the main contractors.
|
from __future__ import print_function
import flopy
from flopy import utils as fu
import platform
import numpy as np
if 'window' in platform.platform().lower():
newln = '\n'
else:
newln = '\r\n'
print ('Starting to read HYDMOD data')
obs = flopy.utils.HydmodObs('freyberg.hyd.bin')
times = obs.get_times()
read_obsnames = obs.get_obsnames()
with open('freyberg.heads', 'w') as ofp:
ofp.write('obsname value{0}'.format(newln))
for coutname in read_obsnames:
if coutname.startswith('HDI001o'):
cv = obs.get_data(obsname=coutname,totim=times[1])
ofp.write('{0:20s} {1:15.6E} {2}'.format(coutname+'c', cv[0][1], newln))
for coutname in read_obsnames:
cv = obs.get_data(obsname=coutname,totim=times[2])
ofp.write('{0:20s} {1:15.6E} {2}'.format(coutname+'f', cv[0][-1], newln))
print('Now read River flux from the LIST file')
lst = fu.MfListBudget('freyberg.list')
RIV_flux = lst.get_incremental()['RIVER_LEAKAGE_IN']-lst.get_incremental()['RIVER_LEAKAGE_OUT']
with open('freyberg.rivflux', 'w') as ofp:
ofp.write('obsname value{0}'.format(newln))
ofp.write('rivflux_cal {1:15.6E}{0}rivflux_fore {2:15.6E}{0}'.format(newln, RIV_flux[0], RIV_flux[1]))
print('Finally read endpoint file to get traveltime')
endpoint_file = 'freyberg.mpenpt'
lines = open(endpoint_file, 'r').readlines()
items = lines[-1].strip().split()
travel_time = float(items[4]) - float(items[3])
with open('freyberg.travel', 'w') as ofp:
ofp.write('travetime {0:15.6e}{1}'.format(travel_time, newln))
print('Completed processing model output')
|
Budzowska Fiutowski and Partners law firm would like to kindly inform that we are in the progress of gathering a group of claimants who wish to raise their compensatory claims in connection with the tragic accident of El Faro ship on 2 October 2015. The potential claims have a unique character because of their cross-border nature.
Budzowska Fiutowski and Partners law firm would like to kindly inform that we are in the progress of gathering a group of claimants who wish to raise their compensatory claims in connection with the tragic accident of El Faro ship on 2 October 2015.
The potential claims have a unique character because of their cross-border nature.
Therefore, we would like to kindly provide you with the information that may prove essential for you in deciding about the possible cooperation.
• At 2.00 am on 30 September 2015, El Faro left Jacksonville, Florida, heading for Puerto Rico.
• At this time there was a tropical storm, called “Joaquin” several hundred miles away to the East.
• There were 33 crew members aboard – 28 US Citizens and 5 Polish Nationals.
• 3 hours prior to the ship’s departure the National Oceanic and Atmospheric Administration’s Hurricane Centre forecast that Joaquin was to become a hurricane.
• 6 hours after departure, at 8.00am, whilst the Ship was still in contact with the mainland it was declared that Joaquin was now a Hurricane.
• On 1 October 2015 at 7.00am the Master of the ship had satellite communications with the management offices of the ships owners ashore. They reported flooding on deck which had left the Vessel with a 15 degree list and also reported a loss of propulsion. At this time the winds were registered to be 125 mph - a category 4 Hurricane.
• The ships’ Emergency Position Indicating Radio Beacon was then activated and received by the Coastguard in Portsmouth, Virginia, who contacted the vessel’s owners.
• At 7.20am contact was lost with the vessel.
• On 2 October 2015 the Coastguard began a search.
• On 3 October 2015 the Coastguard declared the ship to be lost at sea.
• It seems that the vessel was not in good condition due to its age. The ship was built in 1975 and so was 40 years old. It has been suggested that the Captain had planned a course which would avoid the storm, however a mechanical problem with the main propulsion system left him in the path of the Hurricane. The reasons for the engine problems are currently unknown.
• However, the vessel was modernized in 1992 and 2006 and Coast Guard records show that it underwent its last safety inspection in March of this year.
• Furthermore the ship was loaded with 391 containers, and so reached a height which made it more exposed to the wind and waves. There were also 294 trailers and automobiles below deck adding to the vessel’s weight.
It is likely that these claims will be brought within the jurisdiction of the Courts of Florida, USA. Florida is the principal location of the Defendant companies, the point of departure for the El Faro on this voyage and it is also the home state of the majority of the U.S. victims. We believe that US jurisdiction extends to the families of the Polish victims.
On the facts as we know them, it appears likely that the Defendants will be found to have been negligent in setting sail in the prevailing conditions and then continuing even when the tropical storm became a hurricane. This will make them liable under Section 12 of the US Death on the High Seas Act (DOHSA) to provide fair compensation for the financial losses including loss of income, loss of support, loss of services and loss of inheritance of the deceased spouse, parent of a child, or other dependent relative.
It may also be possible to bring a claim under the US Jones Act if it can be established the deceased were seamen injured in the course of their employment, which on the face of it appears likely. However, further investigations need to be conducted into the employment status and history of the Polish crew. Under the Jones Act a claim can also be made for damages for pre-death suffering, the award for which is likely to be significant as it would be determined by an US jury if the terms of settlement were not agreed with the Defendant or their insurers.
One of the basis for a Jones Act claim is the unseaworthiness of the vessel. If this claim were successful compensation could include awards for pain and suffering, monetary losses, loss of consortium and other non-monetary losses.
It is likely that a claim will be advanced on all 3 bases. The claim has strong grounds for being brought in the US jurisdiction and ultimately would go before an US jury who would be likely to award of significant damages to the families of the deceased far in excess of any claims that might be brought by the families within the Polish legal system.
There is also the possibility of product liability claim relating to the loss of propulsion.
Further, if it is factually determined that the owner of the vessel proceeded into the storm in conscious disregard for the safety of its crew, we will be able to make a claim for punitive damages.
The Company that owns the Ship is Tote Maritime. This is a Jacksonville, Florida, based third party Ship Management Company. The Ship is reported to have been operated by Sea Star Line, which is a subsidiary of Tote.
It is the Defendants insurers that would actually carry out the defence of the claim. You should be wary about direct approaches from the insurers to settle your claim at an early stage and prior to you receiving legal advice and representation. If you were to accept such an offer it is likely to be considered a full and final settlement, even if you were to later discover your claim was worth far more as and when you did receive advice from lawyers experienced in international claims of this type.
Once instructed we would immediately press the Defendants and their insurers to make interim payments to the families to offset any immediate financial losses by way of early recognition of this tragedy.
It is very difficult at the start of a claim such as yours to give an accurate estimate as to how long it might take to bring the case to a satisfactory conclusion. A number of factors can have a bearing on how long a case takes to conclude, the stance adopted by the Defendant and their insurer being one of the most significant.
Clearly, cases in which the Defendant admits liability at an early stage generally take less time to conclude than cases where liability is denied, as the only issue to be determined is the value of the case. Most claims are completed within 18 months to two years. However, many cases can be concluded in a shorter period of time.
US claims for the victims of fatal accident are usually funded on a contingency fee basis where the lawyers will be entitled to a percentage of any damages you recover at the end of the case. The fees are generally set on a sliding scale, where the client gets the benefit of the lower fee initially, but if the case has to proceed to trial, the fee increases due to the significant increase in costs and time. You will not incur any out of pocket costs to proceed with the litigation in the United States.
Generally speaking maritime claims in Florida are subject to a 2-year limitation period. If you do not make your claim within this time limit it may become time barred.
(a) The members of the law firm would meet with you – to take your statement on the background to your loved ones work and to get a full understanding of how your life has been affected. The members of the law firm would also talk you through the funding of your claim and the terms of the contingency fee agreement.
(b) Contact the defendants and their insurers – early contact with the insurer will assist in liability enquiries and will help to determine the likely stance that the insurer will adopt. It will also allow us to obtain an interim payment.
(c) Contact the US authorities – to understand what if any prosecutions are to be brought and to obtain further details of the accident circumstances.
(d) Ensure preservation of documentary records including the service and maintenance history of the El Faro.
(e) Contact experts – if necessary, instruct and retain our maritime experts to consider the seaworthiness of the El Faro and the decision to sail notwithstanding the developing hurricane conditions.
Therefore, we would appreciate if you could treat this information as an immediate assistance of our law firm that we offer to the indirect victims of the said accident. Further assistance would require granting the relevant power of attorney on your part. Accordingly, if you express the willingness to obtain further information connected with the issue at hand, please feel free to contact us.
|
# -*- coding: utf-8 -*-
# Created by apple on 2017/2/5.
import os
from ..log import log
from ..config import Config
from sqlalchemy import func, desc
from sanic import Blueprint
from sanic.request import Request
from sanic.response import text
from ..exceptions import BadRequest
from ..utils import JsonResult, Regex, Date, DB
from sanic.views import HTTPMethodView
from ..db import Session, AppModel, AppVersionModel
apps_blueprint = Blueprint('apps', 'apps')
@apps_blueprint.route('/<app_type:iOS|android|all>/page/<page:int>', ['GET'])
async def get_apps(request: Request, app_type: str, page: int):
"""
获取app
- uri[app类型(all/iOS/android)-app_type: str, 页码(从1起)-page: int], format[时间s-t: int]
:param request:
:return:
"""
time = Date.time2datetime(request.args.get('t'))
if not time:
raise BadRequest('')
if page <= 0:
log.debug('page need greater zero')
raise BadRequest('')
kw = request.args.get('kw')
session = Session()
query = session.query(AppModel, AppVersionModel.version_code, AppVersionModel.version_name,
func.max(AppVersionModel.create_at).label('_update_at')) \
.join(AppVersionModel, AppModel.id == AppVersionModel.app_id) \
.filter(AppModel.create_at <= time)
if app_type != 'all': # 安装包类型过滤
query = query.filter(AppModel.type == app_type)
if kw:
query = query.filter(AppModel.name.like('%{}%'.format(kw)))
result = query.order_by(desc(AppModel.create_at)) \
.group_by(AppModel.short_chain_uri_) \
.offset((page - 1) * Config.apps_limit) \
.limit(Config.apps_limit) \
.all()
datas = []
for app, version_code, version_name, _ in result:
app.version_code = version_code
app.version_name = version_name
datas.append(app)
return JsonResult.ok(datas).response_json()
class AppsView(HTTPMethodView):
@staticmethod
async def options(request: Request, app_id: int):
return text('', headers={
'Access-Control-Allow-Methods': 'GET,PUT,DELETE,OPTIONS',
'Access-Control-Max-Age:': '62400',
})
@staticmethod
async def get(request: Request, app_id: int):
"""
获取app详情
- uri[app_id: int]
:param request:
:param app_id:
:return:
"""
session = Session()
query = DB.model_exists(session, AppModel, id=app_id)
if not query:
raise BadRequest('not find app id: {}'.format(app_id))
app = query.one()
return JsonResult.ok(app).response_json()
@staticmethod
async def delete(request: Request, app_id: int):
"""
删除app
- uri[app_id: int]
:param request:
:param app_id:
:return:
"""
session = Session()
app_query = DB.model_exists(session, AppModel, id=app_id)
if not app_query:
raise BadRequest('not find app id: {}'.format(app_id))
# 删除图标
app = app_query.one()
os.remove(app.icon_)
# 删除app的所有版本
app_version_query = session.query(AppVersionModel).filter(AppVersionModel.app_id == app_id)
for model in app_version_query.all():
os.remove(model.package_)
app_version_query.delete()
# 删除app
app_query.delete()
session.commit()
log.info('did delete app id: {}'.format(app_id))
return JsonResult.ok().response_json()
@staticmethod
async def put(request: Request, app_id: int):
"""
修改app信息
- uri[app_id: int], json(最少包含一个参数)[name: str, short_chain: str, detail: str]
:param request:
:param app_id:
:return:
"""
json = request.json
if not isinstance(json, dict):
log.debug('json it not a dict')
raise BadRequest('')
name = json['name'].strip() if isinstance(json.get('name'), str) else None
short_chain = json['short_chain'].strip() if isinstance(json.get('short_chain'), str) else None
detail = json['detail'].strip() if isinstance(json.get('detail'), str) else None
if not (name or short_chain) and detail is None:
log.debug('need name, short chain or detail, less one')
raise BadRequest('')
session = Session()
query = DB.model_exists(session, AppModel, id=app_id)
if not query:
raise BadRequest('not find app id: {}'.format(app_id))
if short_chain:
if not Regex.ShortChina.match(short_chain):
log.debug(
'short chain length need 5-15 and combination of letters, Numbers, underline')
raise BadRequest(
'short chain length need greater 5 and letter by the combination of letters, Numbers, underline')
elif session.query(AppModel).filter(AppModel.short_chain_uri_ == short_chain,
AppModel.id != app_id).count() != 0:
log.debug('short chain did exists')
raise BadRequest('short chain did exists')
app = query.one()
if name:
app.name = name
if short_chain:
app.short_chain_uri_ = short_chain
if detail is not None:
app.detail = detail
session.commit()
log.debug('did modify app: {}, {} - {} - {}'.format(app.package_name, name, short_chain, detail))
return JsonResult.ok().response_json()
apps_blueprint.add_route(AppsView.as_view(), '/<app_id:int>')
# @apps_blueprint.route('/search', ['GET'])
# async def search(request: Request):
# time = Date.time2datetime(request.args.get('t'))
# if not time:
# raise BadRequest('')
#
# page = request.args.get('page')
# if page <= 0:
# log.debug('page need greater zero')
# raise BadRequest('')
#
# kw = request.args.get('kw')
# if not kw:
# raise BadRequest('')
#
# app_type = request.args.get('type')
#
# session = Session()
# session.query(AppModel).filter(AppModel.create_at <= time, AppModel.type == app_type) \
# .offset((page - 1) * Config.apps_limit) \
# .limit(Config.apps_limit) \
# .all()
# session.commit()
|
Did you ever wish being able to send multiple inputs into a single video or content stream? Did you ever wish to stream a video in good quality during a collaboration session?
To learn how to enable this feature on your codec, please have a look at the how-to- post and check the release notes of the CE software release 9.x.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
# from __future__ import unicode_literals
if __name__ == '__main__':
import os
import argparse
from krpy import krseqsearch
from krpy import krio
from krpy import krbioio
from krpy import krseq
from krpy import krbionames
from krpy import krcl
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input_file', type=unicode,
help='')
parser.add_argument('-o', '--output_file', type=unicode,
help='')
parser.add_argument('-l', '--log_dir', type=unicode,
help='')
parser.add_argument('-n', '--ncbi_names_file', type=unicode,
help='')
parser.add_argument('-s', '--synonymy_file', type=unicode,
help='')
parser.add_argument('-u', '--unresolvable_taxonomy_file', type=unicode,
help='')
parser.add_argument('-k', '--keeplist_taxonomy_file', type=unicode,
help='')
parser.add_argument('-t', '--taxa_mappings_file', type=unicode,
help='')
parser.add_argument('-a', '--authority_file', type=unicode,
help='')
parser.add_argument('-c', '--hacks', type=unicode,
help='')
parser.add_argument('-d', '--hacks_data_location', type=unicode,
help='')
# record, ncbi_names_table, synonymy_table, auth_file,
# hacks, hacks_data_location, unresolvable_taxonomy_list,
# keeplist_taxonomy_list, taxa_mappings_list, log_dir
args = parser.parse_args()
hacks = None
hacks_data_location = None
if args.hacks:
hacks = args.hacks.split(',')
if args.hacks_data_location:
hacks_data_location = dict()
for i, hack in enumerate(hacks):
hacks_data_location[hack] = args.hacks_data_location.split(',')[i]
ncbi_names_table = None
if args.ncbi_names_file:
ncbi_names_table = krio.read_table_file(
path=args.ncbi_names_file,
has_headers=False,
headers=('tax_id', 'name_txt', 'unique_name', 'name_class'),
delimiter='\t|',
quotechar=None,
stripchar='"',
rettype='dict')
synonymy_table = None
if args.synonymy_file:
synonymy_table = krio.read_table_file(
path=args.synonymy_file,
has_headers=True, headers=None, delimiter=',')
unresolvable_taxonomy_list = None
if args.unresolvable_taxonomy_file:
unresolvable_taxonomy_list = krio.read_table_file(
path=args.unresolvable_taxonomy_file,
has_headers=True,
headers=None,
delimiter=',',
quotechar=None,
stripchar='"',
rettype='dict')
keeplist_taxonomy_list = None
if args.keeplist_taxonomy_file:
keeplist_taxonomy_list = krio.read_table_file(
path=args.keeplist_taxonomy_file,
has_headers=False,
headers=None,
delimiter=',',
quotechar=None,
stripchar='"',
rettype='set')
taxa_mappings_list = None
if args.taxa_mappings_file:
taxa_mappings_list = krio.read_table_file(
path=args.taxa_mappings_file,
has_headers=False,
headers=('accession', 'taxon'),
delimiter='\t',
quotechar=None,
stripchar='"',
rettype='dict')
input_file = None
output_file = None
authority_file = None
log_dir = None
if args.input_file:
input_file = args.input_file
if args.output_file:
output_file = args.output_file
if args.authority_file:
authority_file = args.authority_file
if args.log_dir:
log_dir = args.log_dir
records = krbioio.read_sequence_file(input_file, 'gb', ret_type='list')
ps = os.path.sep
tax_log_handle = krseqsearch.__tax_log_open(log_dir, ps)
tax_log_html_handle = krseqsearch.__tax_log_html_open(log_dir, ps)
#########
krcl.hide_cursor()
for i, record in enumerate(records):
krcl.print_progress(i, len(records), 50, '')
name = krseqsearch.check_organism_name(
record,
ncbi_names_table,
synonymy_table,
authority_file,
hacks,
hacks_data_location,
unresolvable_taxonomy_list,
keeplist_taxonomy_list,
taxa_mappings_list,
tax_log_handle,
tax_log_html_handle)
# tn = name[0]
an = name[1]
an_flat = krbionames.flatten_organism_name(an, ' ')
record.annotations['organism_old'] = record.annotations['organism']
record.annotations['organism'] = an_flat
record.annotations['source'] = an_flat
record.description = record.description.replace(record.annotations['organism'], '')
record.description = record.description.strip()
krcl.show_cursor()
#########
krseqsearch.__tax_log_close(tax_log_handle)
krseqsearch.__tax_log_html_close(tax_log_html_handle)
krbioio.write_sequence_file(records, output_file, 'gb')
|
Nothing feels better than a well-crafted to-do list, and the feeling of accomplishment at the end of the day when you just check off all of your tasks. A to-do list keeps you on track, it helps you move towards your goals, and makes you feel more productive and in control of your life. If you keep your to-do list sweet and short it is more likely that you’ll actually follow through.
A productivity journal is a great tool to help you stay organized, set and define goals, store important information, and track your progress. Writing things down is like a commitment you make to yourself – it prompts you to choose a specific goal and accomplish it, and it holds you accountable. It also helps you organize and analyze your thoughts in a way that would be difficult – if not impossible – to do if you were just thinking or dreaming about it.
I truly believe in the power of good habits and that “we are what we repeatedly do”. (Aristotle). But sometimes I’m one of the world’s biggest procrastinators. I find myself distracted with everyday household chores and I can easily get lost in watching Youtube videos or some other tempting TV series.
|
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
# Copyright (C) 2010 Kevin Mehall <km@kevinmehall.net>
# Copyright (C) 2012 Christopher Eby <kreed@kreed.org>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Pandora JSON v5 API
See http://6xq.net/playground/pandora-apidoc/json/ for API documentation.
"""
from .blowfish import Blowfish
# from Crypto.Cipher import Blowfish
from xml.dom import minidom
import re
import json
import logging
import time
import urllib.request, urllib.parse, urllib.error
import codecs
import ssl
import os
from enum import IntEnum
from socket import error as SocketError
from . import data
HTTP_TIMEOUT = 30
USER_AGENT = 'pithos'
RATE_BAN = 'ban'
RATE_LOVE = 'love'
RATE_NONE = None
class ApiError(IntEnum):
INTERNAL_ERROR = 0
MAINTENANCE_MODE = 1
URL_PARAM_MISSING_METHOD = 2
URL_PARAM_MISSING_AUTH_TOKEN = 3
URL_PARAM_MISSING_PARTNER_ID = 4
URL_PARAM_MISSING_USER_ID = 5
SECURE_PROTOCOL_REQUIRED = 6
CERTIFICATE_REQUIRED = 7
PARAMETER_TYPE_MISMATCH = 8
PARAMETER_MISSING = 9
PARAMETER_VALUE_INVALID = 10
API_VERSION_NOT_SUPPORTED = 11
COUNTRY_NOT_SUPPORTED = 12
INSUFFICIENT_CONNECTIVITY = 13
UNKNOWN_METHOD_NAME = 14
WRONG_PROTOCOL = 15
READ_ONLY_MODE = 1000
INVALID_AUTH_TOKEN = 1001
INVALID_LOGIN = 1002
LISTENER_NOT_AUTHORIZED = 1003
USER_NOT_AUTHORIZED = 1004
MAX_STATIONS_REACHED = 1005
STATION_DOES_NOT_EXIST = 1006
COMPLIMENTARY_PERIOD_ALREADY_IN_USE = 1007
CALL_NOT_ALLOWED = 1008
DEVICE_NOT_FOUND = 1009
PARTNER_NOT_AUTHORIZED = 1010
INVALID_USERNAME = 1011
INVALID_PASSWORD = 1012
USERNAME_ALREADY_EXISTS = 1013
DEVICE_ALREADY_ASSOCIATED_TO_ACCOUNT = 1014
UPGRADE_DEVICE_MODEL_INVALID = 1015
EXPLICIT_PIN_INCORRECT = 1018
EXPLICIT_PIN_MALFORMED = 1020
DEVICE_MODEL_INVALID = 1023
ZIP_CODE_INVALID = 1024
BIRTH_YEAR_INVALID = 1025
BIRTH_YEAR_TOO_YOUNG = 1026
# FIXME: They can't both be 1027?
# INVALID_COUNTRY_CODE = 1027
# INVALID_GENDER = 1027
DEVICE_DISABLED = 1034
DAILY_TRIAL_LIMIT_REACHED = 1035
INVALID_SPONSOR = 1036
USER_ALREADY_USED_TRIAL = 1037
PLAYLIST_EXCEEDED = 1039
# Catch all for undocumented error codes
UNKNOWN_ERROR = 100000
@property
def title(self):
# Turns RANDOM_ERROR into Pandora Error: Random Error
return 'Pandora Error: {}'.format(self.name.replace('_', ' ').title())
@property
def sub_message(self):
value = self.value
if value == 1:
return 'Pandora is performing maintenance.\nTry again later.'
elif value == 12:
return ('Pandora is not available in your country.\n'
'If you wish to use Pandora you must configure your system or Pithos proxy accordingly.')
elif value == 13:
return ('Out of sync. Correct your system\'s clock.\n'
'If the problem persists it may indicate a Pandora API change.\nA Pithos update may be required.')
if value == 1000:
return 'Pandora is in read-only mode.\nTry again later.'
elif value == 1002:
return 'Invalid username or password.'
elif value == 1003:
return 'A Pandora One account is required to access this feature.\nUncheck "Pandora One" in Settings.'
elif value == 1005:
return ('You have reached the maximum number of stations.\n'
'To add a new station you must first delete an existing station.')
elif value == 1010:
return 'Invalid Pandora partner keys.\nA Pithos update may be required.'
elif value == 1023:
return 'Invalid Pandora device model.\nA Pithos update may be required.'
elif value == 1039:
return 'You have requested too many playlists.\nTry again later.'
else:
return None
PLAYLIST_VALIDITY_TIME = 60*60
NAME_COMPARE_REGEX = re.compile(r'[^A-Za-z0-9]')
class PandoraError(IOError):
def __init__(self, message, status=None, submsg=None):
self.status = status
self.message = message
self.submsg = submsg
class PandoraAuthTokenInvalid(PandoraError): pass
class PandoraNetError(PandoraError): pass
class PandoraAPIVersionError(PandoraError): pass
class PandoraTimeout(PandoraNetError): pass
def pad(s, l):
return s + b'\0' * (l - len(s))
class Pandora:
"""Access the Pandora API
To use the Pandora class, make sure to call :py:meth:`set_audio_quality`
and :py:meth:`connect` methods.
Get information from Pandora using:
- :py:meth:`get_stations` which populates the :py:attr:`stations` attribute
- :py:meth:`search` to find songs to add to stations or create a new station with
- :py:meth:`json_call` call into the JSON API directly
"""
def __init__(self):
self.opener = self.build_opener()
self.connected = False
self.isSubscriber = False
def pandora_encrypt(self, s):
return b''.join([codecs.encode(self.blowfish_encode.encrypt(pad(s[i:i+8], 8)), 'hex_codec') for i in range(0, len(s), 8)])
def pandora_decrypt(self, s):
return b''.join([self.blowfish_decode.decrypt(pad(codecs.decode(s[i:i+16], 'hex_codec'), 8)) for i in range(0, len(s), 16)]).rstrip(b'\x08')
def json_call(self, method, args=None, https=False, blowfish=True):
if not args:
args = {}
url_arg_strings = []
if self.partnerId:
url_arg_strings.append('partner_id=%s'%self.partnerId)
if self.userId:
url_arg_strings.append('user_id=%s'%self.userId)
if self.userAuthToken:
url_arg_strings.append('auth_token=%s'%urllib.parse.quote_plus(self.userAuthToken))
elif self.partnerAuthToken:
url_arg_strings.append('auth_token=%s'%urllib.parse.quote_plus(self.partnerAuthToken))
url_arg_strings.append('method=%s'%method)
protocol = 'https' if https else 'http'
url = protocol + self.rpcUrl + '&'.join(url_arg_strings)
if self.time_offset:
args['syncTime'] = int(time.time()+self.time_offset)
if self.userAuthToken:
args['userAuthToken'] = self.userAuthToken
elif self.partnerAuthToken:
args['partnerAuthToken'] = self.partnerAuthToken
data = json.dumps(args).encode('utf-8')
logging.debug(url)
logging.debug(data)
if blowfish:
data = self.pandora_encrypt(data)
try:
req = urllib.request.Request(url, data, {'User-agent': USER_AGENT, 'Content-type': 'text/plain'})
with self.opener.open(req, timeout=HTTP_TIMEOUT) as response:
text = response.read().decode('utf-8')
except urllib.error.HTTPError as e:
logging.error("HTTP error: %s", e)
raise PandoraNetError(str(e))
except urllib.error.URLError as e:
logging.error("Network error: %s", e)
if e.reason.strerror == 'timed out':
raise PandoraTimeout("Network error", submsg="Timeout")
else:
raise PandoraNetError("Network error", submsg=e.reason.strerror)
except SocketError as e:
try:
error_string = os.strerror(e.errno)
except (TypeError, ValueError):
error_string = "Unknown Error"
logging.error("Network Socket Error: %s", error_string)
raise PandoraNetError("Network Socket Error", submsg=error_string)
logging.debug(text)
tree = json.loads(text)
if tree['stat'] == 'fail':
code = tree['code']
msg = tree['message']
try:
error_enum = ApiError(code)
except ValueError:
error_enum = ApiError.UNKNOWN_ERROR
logging.error('fault code: {} {} message: {}'.format(code, error_enum.name, msg))
if error_enum is ApiError.INVALID_AUTH_TOKEN:
raise PandoraAuthTokenInvalid(msg)
elif error_enum is ApiError.API_VERSION_NOT_SUPPORTED:
raise PandoraAPIVersionError(msg)
elif error_enum is ApiError.UNKNOWN_ERROR:
submsg = 'Undocumented Error Code: {}\n{}'.format(code, msg)
raise PandoraError(error_enum.title, code, submsg)
else:
submsg = error_enum.sub_message or 'Error Code: {}\n{}'.format(code, msg)
raise PandoraError(error_enum.title, code, submsg)
if 'result' in tree:
return tree['result']
def set_audio_quality(self, fmt):
"""Set the desired audio quality
Used by the :py:attr:`Song.audioUrl` property.
:param fmt: An audio quality format from :py:data:`pithos.pandora.data.valid_audio_formats`
"""
self.audio_quality = fmt
@staticmethod
def build_opener(*handlers):
"""Creates a new opener
Wrapper around urllib.request.build_opener() that adds
a custom ssl.SSLContext for use with internal-tuner.pandora.com
"""
ctx = ssl.create_default_context()
ctx.load_verify_locations(cadata=data.internal_cert)
https = urllib.request.HTTPSHandler(context=ctx)
return urllib.request.build_opener(https, *handlers)
def set_url_opener(self, opener):
self.opener = opener
def connect(self, client, user, password):
"""Connect to the Pandora API and log the user in
:param client: The client ID from :py:data:`pithos.pandora.data.client_keys`
:param user: The user's login email
:param password: The user's login password
"""
self.connected = False
self.partnerId = self.userId = self.partnerAuthToken = None
self.userAuthToken = self.time_offset = None
self.rpcUrl = client['rpcUrl']
self.blowfish_encode = Blowfish(client['encryptKey'].encode('utf-8'))
self.blowfish_decode = Blowfish(client['decryptKey'].encode('utf-8'))
partner = self.json_call('auth.partnerLogin', {
'deviceModel': client['deviceModel'],
'username': client['username'], # partner username
'password': client['password'], # partner password
'version': client['version']
},https=True, blowfish=False)
self.partnerId = partner['partnerId']
self.partnerAuthToken = partner['partnerAuthToken']
pandora_time = int(self.pandora_decrypt(partner['syncTime'].encode('utf-8'))[4:14])
self.time_offset = pandora_time - time.time()
logging.info("Time offset is %s", self.time_offset)
auth_args = {'username': user, 'password': password, 'loginType': 'user', 'returnIsSubscriber': True}
user = self.json_call('auth.userLogin', auth_args, https=True)
self.userId = user['userId']
self.userAuthToken = user['userAuthToken']
self.connected = True
self.isSubscriber = user['isSubscriber']
@property
def explicit_content_filter_state(self):
"""The User must already be authenticated before this is called.
returns the state of Explicit Content Filter and if the Explicit Content Filter is PIN protected
"""
get_filter_state = self.json_call('user.getSettings', https=True)
filter_state = get_filter_state['isExplicitContentFilterEnabled']
pin_protected = get_filter_state['isExplicitContentFilterPINProtected']
logging.info('Explicit Content Filter state: %s' %filter_state)
logging.info('PIN protected: %s' %pin_protected)
return filter_state, pin_protected
def set_explicit_content_filter(self, state):
"""The User must already be authenticated before this is called.
Does not take effect until the next playlist.
Valid desired states are True to enable and False to disable the Explicit Content Filter.
"""
self.json_call('user.setExplicitContentFilter', {'isExplicitContentFilterEnabled': state})
logging.info('Explicit Content Filter set to: %s' %(state))
def get_stations(self, *ignore):
stations = self.json_call('user.getStationList')['stations']
self.quickMixStationIds = None
self.stations = [Station(self, i) for i in stations]
if self.quickMixStationIds:
for i in self.stations:
if i.id in self.quickMixStationIds:
i.useQuickMix = True
return self.stations
def save_quick_mix(self):
stationIds = []
for i in self.stations:
if i.useQuickMix:
stationIds.append(i.id)
self.json_call('user.setQuickMix', {'quickMixStationIds': stationIds})
def search(self, query):
results = self.json_call(
'music.search',
{'includeGenreStations': True, 'includeNearMatches': True, 'searchText': query},
)
l = [SearchResult('artist', i) for i in results['artists'] if i['score'] >= 80]
l += [SearchResult('song', i) for i in results['songs'] if i['score'] >= 80]
l += [SearchResult('genre', i) for i in results['genreStations']]
l.sort(key=lambda i: i.score, reverse=True)
return l
def add_station_by_music_id(self, musicid):
d = self.json_call('station.createStation', {'musicToken': musicid})
station = Station(self, d)
if not self.get_station_by_id(station.id):
self.stations.append(station)
return station
def add_station_by_track_token(self, trackToken, musicType):
d = self.json_call('station.createStation', {'trackToken': trackToken, 'musicType': musicType})
station = Station(self, d)
if not self.get_station_by_id(station.id):
self.stations.append(station)
return station
def delete_station(self, station):
if self.get_station_by_id(station.id):
logging.info("pandora: Deleting Station")
self.json_call('station.deleteStation', {'stationToken': station.idToken})
self.stations.remove(station)
def get_station_by_id(self, id):
for i in self.stations:
if i.id == id:
return i
def add_feedback(self, trackToken, rating):
logging.info("pandora: addFeedback")
rating_bool = True if rating == RATE_LOVE else False
feedback = self.json_call('station.addFeedback', {'trackToken': trackToken, 'isPositive': rating_bool})
return feedback['feedbackId']
def delete_feedback(self, stationToken, feedbackId):
self.json_call('station.deleteFeedback', {'feedbackId': feedbackId, 'stationToken': stationToken})
class Station:
def __init__(self, pandora, d):
self.pandora = pandora
self.id = d['stationId']
self.idToken = d['stationToken']
self.isCreator = not d['isShared']
self.isQuickMix = d['isQuickMix']
self.isThumbprint = d.get('isThumbprint', False)
self.name = d['stationName']
self.useQuickMix = False
if self.isQuickMix:
self.pandora.quickMixStationIds = d.get('quickMixStationIds', [])
def transformIfShared(self):
if not self.isCreator:
logging.info("pandora: transforming station")
self.pandora.json_call('station.transformSharedStation', {'stationToken': self.idToken})
self.isCreator = True
def get_playlist(self):
logging.info("pandora: Get Playlist")
# Set the playlist time to the time we requested a playlist.
# It is better that a playlist be considered invalid a fraction
# of a sec early than be considered valid any longer than it actually is.
playlist_time = time.time()
playlist = self.pandora.json_call('station.getPlaylist', {
'stationToken': self.idToken,
'includeTrackLength': True,
'additionalAudioUrl': 'HTTP_32_AACPLUS,HTTP_128_MP3',
}, https=True)['items']
return [Song(self.pandora, i, playlist_time) for i in playlist if 'songName' in i]
@property
def info_url(self):
return 'http://www.pandora.com/stations/'+self.idToken
def rename(self, new_name):
if new_name != self.name:
self.transformIfShared()
logging.info("pandora: Renaming station")
self.pandora.json_call('station.renameStation', {'stationToken': self.idToken, 'stationName': new_name})
self.name = new_name
def delete(self):
self.pandora.delete_station(self)
def __repr__(self):
return '<{}.{} {} "{}">'.format(
__name__,
__class__.__name__,
self.id,
self.name,
)
class Song:
def __init__(self, pandora, d, playlist_time):
self.pandora = pandora
self.playlist_time = playlist_time
self.is_ad = None # None = we haven't checked, otherwise True/False
self.tired = False
self.message = ''
self.duration = None
self.position = None
self.bitrate = None
self.start_time = None
self.finished = False
self.feedbackId = None
self.bitrate = None
self.artUrl = None
self.album = d['albumName']
self.artist = d['artistName']
self.trackToken = d['trackToken']
self.rating = RATE_LOVE if d['songRating'] == 1 else RATE_NONE # banned songs won't play, so we don't care about them
self.stationId = d['stationId']
self.songName = d['songName']
self.songDetailURL = d['songDetailUrl']
self.songExplorerUrl = d['songExplorerUrl']
self.artRadio = d['albumArtUrl']
self.trackLength = d['trackLength']
self.trackGain = float(d.get('trackGain', '0.0'))
self.audioUrlMap = d['audioUrlMap']
# Optionally we requested more URLs
if len(d.get('additionalAudioUrl', [])) == 2:
if int(self.audioUrlMap['highQuality']['bitrate']) < 128:
# We can use the higher quality mp3 stream for non-one users
self.audioUrlMap['mediumQuality'] = self.audioUrlMap['highQuality']
self.audioUrlMap['highQuality'] = {
'encoding': 'mp3',
'bitrate': '128',
'audioUrl': d['additionalAudioUrl'][1],
}
else:
# And we can offer a lower bandwidth option for one users
self.audioUrlMap['lowQuality'] = {
'encoding': 'aacplus',
'bitrate': '32',
'audioUrl': d['additionalAudioUrl'][0],
}
# the actual name of the track, minus any special characters (except dashes) is stored
# as the last part of the songExplorerUrl, before the args.
explorer_name = self.songExplorerUrl.split('?')[0].split('/')[-1]
clean_expl_name = NAME_COMPARE_REGEX.sub('', explorer_name).lower()
clean_name = NAME_COMPARE_REGEX.sub('', self.songName).lower()
if clean_name == clean_expl_name:
self.title = self.songName
else:
try:
with urllib.request.urlopen(self.songExplorerUrl) as x, minidom.parseString(x.read()) as dom:
attr_value = dom.getElementsByTagName('songExplorer')[0].attributes['songTitle'].value
# Pandora stores their titles for film scores and the like as 'Score name: song name'
self.title = attr_value.replace('{0}: '.format(self.songName), '', 1)
except:
self.title = self.songName
@property
def audioUrl(self):
quality = self.pandora.audio_quality
try:
q = self.audioUrlMap[quality]
self.bitrate = q['bitrate']
logging.info("Using audio quality %s: %s %s", quality, q['bitrate'], q['encoding'])
return q['audioUrl']
except KeyError:
logging.warning("Unable to use audio format %s. Using %s",
quality, list(self.audioUrlMap.keys())[0])
self.bitrate = list(self.audioUrlMap.values())[0]['bitrate']
return list(self.audioUrlMap.values())[0]['audioUrl']
@property
def station(self):
return self.pandora.get_station_by_id(self.stationId)
def get_duration_sec(self):
if self.duration is not None:
return self.duration // 1000000000
else:
return self.trackLength
def get_position_sec(self):
if self.position is not None:
return self.position // 1000000000
else:
return 0
def rate(self, rating):
if self.rating != rating:
self.station.transformIfShared()
if rating == RATE_NONE:
if not self.feedbackId:
# We need a feedbackId, get one by re-rating the song. We
# could also get one by calling station.getStation, but
# that requires transferring a lot of data (all feedback,
# seeds, etc for the station).
opposite = RATE_BAN if self.rating == RATE_LOVE else RATE_LOVE
self.feedbackId = self.pandora.add_feedback(self.trackToken, opposite)
self.pandora.delete_feedback(self.station.idToken, self.feedbackId)
else:
self.feedbackId = self.pandora.add_feedback(self.trackToken, rating)
self.rating = rating
def set_tired(self):
if not self.tired:
self.pandora.json_call('user.sleepSong', {'trackToken': self.trackToken})
self.tired = True
def bookmark(self):
self.pandora.json_call('bookmark.addSongBookmark', {'trackToken': self.trackToken})
def bookmark_artist(self):
self.pandora.json_call('bookmark.addArtistBookmark', {'trackToken': self.trackToken})
@property
def rating_str(self):
return self.rating
def is_still_valid(self):
# Playlists are valid for 1 hour. A song is considered valid if there is enough time
# to play the remaining duration of the song before the playlist expires.
return ((time.time() + (self.get_duration_sec() - self.get_position_sec())) - self.playlist_time) < PLAYLIST_VALIDITY_TIME
def __repr__(self):
return '<{}.{} {} "{}" by "{}" from "{}">'.format(
__name__,
__class__.__name__,
self.trackToken,
self.title,
self.artist,
self.album,
)
class SearchResult:
def __init__(self, resultType, d):
self.resultType = resultType
self.score = d['score']
self.musicId = d['musicToken']
if resultType == 'song':
self.title = d['songName']
self.artist = d['artistName']
elif resultType == 'artist':
self.name = d['artistName']
elif resultType == 'genre':
self.stationName = d['stationName']
|
Still struggling with injuries your 8th Grade Lady Braves lost two in the Brown County tournament. The first to St. James 29-15, the second to Edgewood 28-20.
Hoping to put together four quarters of intense team play your 8th Grade Lady Braves hosted Franklin Community. After fighting back to a 22-20 lead at the beginning of the fourth quarter, the Lady Braves fall 32-22.
|
import asyncio
import pytest
from ai.backend.manager.models import verify_dotfile_name, verify_vfolder_name
from ai.backend.gateway.utils import (
call_non_bursty,
)
@pytest.mark.asyncio
async def test_call_non_bursty():
key = 'x'
execution_count = 0
async def execute():
nonlocal execution_count
await asyncio.sleep(0)
execution_count += 1
# ensure reset
await asyncio.sleep(0.11)
# check run as coroutine
execution_count = 0
with pytest.raises(TypeError):
await call_non_bursty(key, execute())
# check run as coroutinefunction
execution_count = 0
await call_non_bursty(key, execute)
assert execution_count == 1
await asyncio.sleep(0.11)
# check burstiness control
execution_count = 0
for _ in range(129):
await call_non_bursty(key, execute)
assert execution_count == 3
await asyncio.sleep(0.01)
await call_non_bursty(key, execute)
assert execution_count == 3
await asyncio.sleep(0.11)
await call_non_bursty(key, execute)
assert execution_count == 4
for _ in range(64):
await call_non_bursty(key, execute)
assert execution_count == 5
def test_vfolder_name_validator():
assert not verify_vfolder_name('.bashrc')
assert not verify_vfolder_name('.terminfo')
assert verify_vfolder_name('bashrc')
assert verify_vfolder_name('.config')
def test_dotfile_name_validator():
assert not verify_dotfile_name('.terminfo')
assert not verify_dotfile_name('.config')
assert not verify_dotfile_name('.ssh/authorized_keys')
assert verify_dotfile_name('.bashrc')
assert verify_dotfile_name('.ssh/id_rsa')
|
In 1789, at the conclusion of the American Revolution and the successful ratification of the U.S. Constitution, George Washington issued a Thanksgiving proclamation to express gratitude for the conclusion of the country’s war of independence.
In 1827, Sarah Josepha Hale – author of “Mary Had A Little Lamb” - began a campaign to establish Thanksgiving as a national holiday. For 36 years she appealed to governors, senators, presidents and other politicians to nationalize this day of thanks.
I find it interesting that two of our presidents issued this day of thanks at the end of one war and at the height of another. It’s easy to get caught up in world affairs, especially with the recent terrorist attacks in Egypt, France, Mali and Lebanon. Washington and Lincoln faced crucial challenges in setting a new course for this country and collapsing into despondency and depression was not an option.
I suffer for the “widows and orphans” escaping their country to find a better, safer life, often dying in the process. However, l lose my effectiveness if I give into anger, despair and vitriolic political opinions. Instead, I’m going to use gratitude as an antidote when I feel overwhelmed by these troubling times.
Gratitude improves physical health: A 2012 study published in Personality and Individual Differences showed that grateful people experience fewer aches and pains and are more likely to take care of their health, exercise more often, and are more likely to get regular check-ups.
Gratitude enhances empathy and reduces aggression: The University of Kentucky, in 2012, showed that participants who ranked higher on gratitude scales were less likely to retaliate against others, even when given negative feedback. They experienced more sensitivity and empathy toward other people and a decreased desire to seek revenge.
Gratitude increases mental strength: A 2006 study published in Behavior Research and Therapy found that Vietnam War Veterans with higher levels of gratitude experienced lower rates of PTSD. The Journal of Personality and Social Psychology found that gratitude was a major contributor to resilience following the terrorist attacks on September 11. Recognizing all you have to be thankful for – even during the worst times of your life – fosters resilience.
I wasn’t born with the gratitude gene, but mostly I feel grateful of all in my life. Then there are times, when I feel the weight of the world and all those suffering. At these times, I practice gratitude with resolve. In other words, I fake it ‘til I make it.
|
# Copyright (C) 2006, 2013 Red Hat, Inc.
# Copyright (C) 2006 Daniel P. Berrange <berrange@redhat.com>
#
# This work is licensed under the GNU GPLv2 or later.
# See the COPYING file in the top-level directory.
import glob
import os
import urllib.parse
from gi.repository import Gtk
from virtinst import log
from .lib import uiutil
from .baseclass import vmmGObjectUI
from .connmanager import vmmConnectionManager
(HV_QEMU,
HV_XEN,
HV_LXC,
HV_QEMU_SESSION,
HV_BHYVE,
HV_VZ,
HV_CUSTOM) = range(7)
def _default_uri(): # pragma: no cover
if os.path.exists('/var/lib/xen'):
if (os.path.exists('/dev/xen/evtchn') or
os.path.exists("/proc/xen")):
return 'xen:///'
if (os.path.exists("/usr/bin/qemu") or
os.path.exists("/usr/bin/qemu-kvm") or
os.path.exists("/usr/bin/kvm") or
os.path.exists("/usr/libexec/qemu-kvm") or
glob.glob("/usr/bin/qemu-system-*")):
return "qemu:///system"
if (os.path.exists("/usr/lib/libvirt/libvirt_lxc") or
os.path.exists("/usr/lib64/libvirt/libvirt_lxc")):
return "lxc:///"
return None
class vmmCreateConn(vmmGObjectUI):
@classmethod
def get_instance(cls, parentobj):
try:
if not cls._instance:
cls._instance = vmmCreateConn()
return cls._instance
except Exception as e: # pragma: no cover
parentobj.err.show_err(
_("Error launching connect dialog: %s") % str(e))
def __init__(self):
vmmGObjectUI.__init__(self, "createconn.ui", "vmm-open-connection")
self._cleanup_on_app_close()
self.builder.connect_signals({
"on_hypervisor_changed": self.hypervisor_changed,
"on_connect_remote_toggled": self.connect_remote_toggled,
"on_username_entry_changed": self.username_changed,
"on_hostname_changed": self.hostname_changed,
"on_cancel_clicked": self.cancel,
"on_connect_clicked": self.open_conn,
"on_vmm_open_connection_delete_event": self.cancel,
})
self.set_initial_state()
self.reset_state()
@staticmethod
def default_uri():
return _default_uri()
def cancel(self, ignore1=None, ignore2=None):
log.debug("Cancelling open connection")
self.close()
return 1
def close(self, ignore1=None, ignore2=None):
log.debug("Closing open connection")
self.topwin.hide()
def show(self, parent):
log.debug("Showing open connection")
if self.is_visible():
self.topwin.present()
return
self.reset_state()
self.topwin.set_transient_for(parent)
self.topwin.present()
def _cleanup(self):
pass
def set_initial_state(self):
self.widget("connect").grab_default()
combo = self.widget("hypervisor")
# [connection ID, label]
model = Gtk.ListStore(int, str)
def _add_hv_row(rowid, config_name, label):
if (not self.config.default_hvs or
not config_name or
config_name in self.config.default_hvs):
model.append([rowid, label])
_add_hv_row(HV_QEMU, "qemu", "QEMU/KVM")
_add_hv_row(HV_QEMU_SESSION, "qemu", "QEMU/KVM " + _("user session"))
_add_hv_row(HV_XEN, "xen", "Xen")
_add_hv_row(HV_LXC, "lxc", "Libvirt-LXC")
_add_hv_row(HV_BHYVE, "bhyve", "Bhyve")
_add_hv_row(HV_VZ, "vz", "Virtuozzo")
_add_hv_row(-1, None, "")
_add_hv_row(HV_CUSTOM, None, _("Custom URI..."))
combo.set_model(model)
uiutil.init_combo_text_column(combo, 1)
def sepfunc(model, it):
return model[it][0] == -1
combo.set_row_separator_func(sepfunc)
def reset_state(self):
self.set_default_hypervisor()
self.widget("autoconnect").set_sensitive(True)
self.widget("autoconnect").set_active(True)
self.widget("hostname").set_text("")
self.widget("connect-remote").set_active(False)
self.widget("username-entry").set_text("")
self.widget("uri-entry").set_text("")
self.connect_remote_toggled(self.widget("connect-remote"))
self.populate_uri()
def is_remote(self):
# Whether user is requesting a remote connection
return self.widget("connect-remote").get_active()
def set_default_hypervisor(self):
default = self.default_uri()
if not default or default.startswith("qemu"):
uiutil.set_list_selection(self.widget("hypervisor"), HV_QEMU)
elif default.startswith("xen"): # pragma: no cover
uiutil.set_list_selection(self.widget("hypervisor"), HV_XEN)
def hostname_changed(self, src_ignore):
self.populate_uri()
def hypervisor_changed(self, src):
ignore = src
hv = uiutil.get_list_selection(self.widget("hypervisor"))
is_session = hv == HV_QEMU_SESSION
is_custom = hv == HV_CUSTOM
show_remote = not is_session and not is_custom
uiutil.set_grid_row_visible(
self.widget("session-warning-box"), is_session)
uiutil.set_grid_row_visible(
self.widget("connect-remote"), show_remote)
uiutil.set_grid_row_visible(
self.widget("username-entry"), show_remote)
uiutil.set_grid_row_visible(
self.widget("hostname"), show_remote)
if not show_remote:
self.widget("connect-remote").set_active(False)
uiutil.set_grid_row_visible(self.widget("uri-label"), not is_custom)
uiutil.set_grid_row_visible(self.widget("uri-entry"), is_custom)
if is_custom:
label = self.widget("uri-label").get_text()
self.widget("uri-entry").set_text(label)
self.widget("uri-entry").grab_focus()
self.populate_uri()
def username_changed(self, src_ignore):
self.populate_uri()
def connect_remote_toggled(self, src_ignore):
is_remote = self.is_remote()
self.widget("hostname").set_sensitive(is_remote)
self.widget("autoconnect").set_active(not is_remote)
self.widget("username-entry").set_sensitive(is_remote)
if is_remote and not self.widget("username-entry").get_text():
self.widget("username-entry").set_text("root")
self.populate_uri()
def populate_uri(self):
uri = self.generate_uri()
self.widget("uri-label").set_text(uri)
def generate_uri(self):
hv = uiutil.get_list_selection(self.widget("hypervisor"))
host = self.widget("hostname").get_text().strip()
user = self.widget("username-entry").get_text()
is_remote = self.is_remote()
hvstr = ""
if hv == HV_XEN:
hvstr = "xen"
elif hv == HV_QEMU or hv == HV_QEMU_SESSION:
hvstr = "qemu"
elif hv == HV_BHYVE:
hvstr = "bhyve"
elif hv == HV_VZ:
hvstr = "vz"
else:
hvstr = "lxc"
addrstr = ""
if user:
addrstr += urllib.parse.quote(user) + "@"
if host.count(":") > 1:
host = "[%s]" % host
addrstr += host
if is_remote:
hoststr = "+ssh://" + addrstr + "/"
else:
hoststr = ":///"
uri = hvstr + hoststr
if hv in (HV_QEMU, HV_BHYVE, HV_VZ):
uri += "system"
elif hv == HV_QEMU_SESSION:
uri += "session"
return uri
def validate(self):
is_remote = self.is_remote()
host = self.widget("hostname").get_text()
if is_remote and not host:
msg = _("A hostname is required for remote connections.")
return self.err.val_err(msg)
return True
def _conn_open_completed(self, conn, ConnectError):
if not ConnectError:
self.close()
self.reset_finish_cursor()
return
msg, details, title = ConnectError
msg += "\n\n"
msg += _("Would you still like to remember this connection?")
remember = self.err.show_err(msg, details, title,
buttons=Gtk.ButtonsType.YES_NO,
dialog_type=Gtk.MessageType.QUESTION, modal=True)
self.reset_finish_cursor()
if remember:
self.close()
else:
vmmConnectionManager.get_instance().remove_conn(conn.get_uri())
def open_conn(self, ignore):
if not self.validate():
return
auto = False
if self.widget("autoconnect").get_sensitive():
auto = bool(self.widget("autoconnect").get_active())
if self.widget("uri-label").is_visible():
uri = self.generate_uri()
else:
uri = self.widget("uri-entry").get_text()
log.debug("Generate URI=%s, auto=%s", uri, auto)
conn = vmmConnectionManager.get_instance().add_conn(uri)
conn.set_autoconnect(auto)
if conn.is_active():
self._conn_open_completed(conn, None)
return
conn.connect_once("open-completed", self._conn_open_completed)
self.set_finish_cursor()
conn.open()
|
TEHRAN, Iran (AP) — Britain’s top diplomat has raised the case of a British-Iranian woman who has been detained in Iran for more than two years during his visit to the Islamic Republic.
The semi-official ISNA news agency on Tuesday quoted the Iranian Foreign Ministry as saying that Foreign Secretary Jeremy Hunt raised the case of Nazanin Zaghari-Ratcliffe with Iranian officials, without elaborating.
Hunt met Iran’s foreign minister and a top security official on Monday.
Zaghari-Ratcliffe, who worked for the charity arm of Thomson Reuters, was arrested in April 2016 on charges of plotting against the government. Her family denies the allegations.
|
#!/usr/bin/env python
"""Portable email sender. Acts as replacement for mail, Mail, mailx,
email (cygwin). Message body is taken from stdin.
"""
from __future__ import print_function
import email.mime.text
import getpass
import logging
import optparse
import os
import smtplib
import socket
import sys
def main():
"""Parse command line arguments and send email!"""
args = _parse_args()
_setup_logging(args.verbose)
body = sys.stdin.read()
# Send the email!
send_email(args.recipients, body, args.subject, args.header, args.sender, args.smtp_host)
def send_email(recipients, body, subject=None, headers=None, sender=None, smtp_host=None):
"""Send email!
:arg recipients: list of recipients. If only one, may be a string.
:arg body: The email message body.
:arg subject: Optional subject. Defaults to ''.
:arg headers: Optional dict of headers to add.
:arg sender: Optional sender address. Defaults to <user>@<fqdn>
:arg smtp_host: Optional SMTP host. Defaults to 'localhost'.
"""
if isinstance(recipients, basestring):
recipients = [recipients]
sender = sender or _default_sender()
subject = subject or ''
smtp_host = smtp_host or _default_smtp_host()
msg = email.mime.text.MIMEText(body)
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ','.join(recipients)
if headers:
for key, value in headers.iteritems():
msg[key] = value
logging.debug('Opening connection to: {0}'.format(smtp_host))
smtp = smtplib.SMTP(smtp_host)
try:
logging.info('Sending email to: {0} from: {1} subject: {2}'.format(
','.join(recipients), sender, subject))
logging.debug('Email headers: {0}'.format(headers))
logging.debug('Email body length: {0}'.format(len(body)))
smtp.sendmail(sender, recipients, msg.as_string())
finally:
smtp.quit()
def _parse_headers(option, opt, value, parser, *args, **kwargs):
"""OptionParser callback function for parsing header values passed by user.
It takes values that have commas (e.g. the user specified
--header=Reply-To=X@y.com,Precedence=bulk), breaks them apart and adds the
individual name/value pairs to the dict of values.
"""
# Get the existing values the parser knows about for this particular
# option.
value_dict = getattr(parser.values, option.dest, None) or {}
# Split the value provided.
parsed_vals = value.split(',')
for v in parsed_vals:
key, value = v.split('=')
value_dict[key] = value
# Set the updated dict to the oiption value.
setattr(parser.values, option.dest, value_dict)
def _default_sender():
"""Return default sender address, which is <user>@<hostname>."""
return '{0}@{1}'.format(getpass.getuser(), socket.getfqdn())
def _default_smtp_host():
"""Return default smtp host, which is localhost unless CHPL_UTIL_SMTP_HOST is
set in environment.
"""
return os.environ.get('CHPL_UTIL_SMTP_HOST', 'localhost')
def _parse_args():
"""Parse and return command line arguments."""
class NoWrapHelpFormatter(optparse.IndentedHelpFormatter):
"""Help formatter that does not wrap the description text."""
def _format_text(self, text):
return text
parser = optparse.OptionParser(
usage='usage: %prog [options] recipient_email [...]',
description=__doc__,
formatter=NoWrapHelpFormatter()
)
parser.add_option(
'-v', '--verbose',
action='store_true',
help='Verbose output.'
)
mail_group = optparse.OptionGroup(parser, 'Mail Options')
mail_group.add_option(
'-s', '--subject',
default=None,
help='Email subject.'
)
mail_group.add_option(
'-H', '--header',
action='callback', type='string',
callback=_parse_headers,
help=('Email header(s) of form NAME=VALUE. '
'Specify more than one with comma delimited list.')
)
mail_group.add_option(
'-S', '--sender',
default=_default_sender(),
help='Sender email address. (default: %default)'
)
mail_group.add_option(
'--smtp-host',
default=_default_smtp_host(),
help='SMTP host to use when sending email. (default: %default)'
)
parser.add_option_group(mail_group)
opts, args = parser.parse_args()
# Add all positional arguments as recipients.
opts.recipients = args
return opts
def _setup_logging(verbose=False):
"""Initialize logging and set level based on verbose.
:type verbose: bool
:arg verbose: When True, set log level to DEBUG.
"""
log_level = logging.DEBUG if verbose else logging.WARN
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
level=log_level)
logging.debug('Verbose output enabled.')
if __name__ == '__main__':
main()
|
Description "50 [age] ; Carpenter ; North Carolina [place of birth]."
Source Includes data from the 1880 Census Population Schedules, Precinct 2, ED 188, Taylor County, Texas, Series: T9, Roll: 1328, p. 279 (stamped), p. 18 (handwritten), entry 22.
Artisan or Artist Hamrick, J. M.
|
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package add_bias
# Module caffe2.python.layers.add_bias
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import schema
from caffe2.python.layers.layers import ModelLayer
import math
class AddBias(ModelLayer):
def __init__(self, model, input_record, bias_init=None,
bias_optim=None, name='add_bias'):
super(AddBias, self).__init__(model, name, input_record)
assert isinstance(input_record, schema.Scalar), "Incorrect input type"
assert len(input_record.field_type().shape) > 0, (
"AddBias expects limited dimensions of the input tensor")
input_dims = input_record.field_type().shape[0]
assert input_dims > 0, (
"AddBias expects input dimensions > 0, got {}".format(input_dims))
scale = math.sqrt(1.0 / input_dims)
bias_init = bias_init if bias_init else (
'UniformFill', {'min': -scale, 'max': scale})
self.b = self.create_param(
param_name='b',
shape=[input_dims, ],
initializer=bias_init,
optimizer=bias_optim,
)
self.output_schema = schema.Scalar(
(input_record.field_type().base, (input_dims, )),
self.get_next_blob_reference('output')
)
def add_ops(self, net):
net.Add(self.input_record.field_blobs() + [self.b],
self.output_schema.field_blobs(), broadcast=1)
|
The Metropolis of Elyria established the Different Power and Vitality Conservation Mortgage Fund to advertise different power and power conservation companies and jobs with City limits. It’s the nightmare of each business proprietor to inadvertently lead his business to bankruptcy, to a path with no return. Enterprise Enlargement is essential for your enterprise progress, and try to be open and prepared to undergo the process of acquiring enterprise financing for your branches.
If it’s a business enlargement, there’s an alternative in order to not make issues more complicate. The mortgage quantity shall be raised by way of participation from commercial banks with MTDC assistance or financial institution of your alternative. This strategy has labored wonders – the outdated retailer caters to a wider, mass market; the latter to a more discerning clientele.
Household pleasure, job relevance for relations, and constructing a enterprise dynasty for not simply your family, but for other families in your group as nicely, are an ideal reason for development. It’s possible you’ll really feel your strategy is about maintaining your core clients completely happy to achieve longevity in your target market.
This is a nice choice if you already know that your enlargement requires new tools, because the loan quantity will cowl simply that cost. Relying in your space of experience, there may come a time when you’ll need to hire managers to run sure elements of your corporation so you may focus on new product development or buyer acquisition.
The important parameters which are of influence to Churn may be detected and modelled into an algorithm with a view to find the optimum set-point or targets on these parameters. For example, you might sometimes have to borrow money or take out a business or industrial loan, should you plan to broaden your online business but you would not have the mandatory capital for that. Most business failures as a consequence of sudden progress are triggered by cash-stream issues.
We welcome you to take a while to explore and be taught about the Enterprise Enlargement course and great sources supplied in our curriculum. It is among the most popular unbiased small enterprise publications on the web. Listed below are some frequent expansion issues you might encounter and tricks to avoid them.
It typically happens that business house owners who are just getting started must kick start their business, they’ve to present it a lift before they actually get to enjoy the uninterrupted cash movement – and before that occurs, it’s not uncommon for them to experience some gaps on the best way, which is perfectly normal as long as they’ve the tools and data required for dealing with them in an efficient and professional manner.
Added to potential cash-flow problems are a number of other managerial problems that can occur just because the enterprise is working now at better velocity, with more individuals (many not yet absolutely trained), and a careworn administration much less prone to find time to examine financial management techniques which, of their flip, may be over-taxed.
Getting a enterprise up and running is challenging sufficient for small enterprise owners. By no means let your meals or customer support high quality undergo throughout business expansion. With correct planning and attention to detail, growth might be the best factor that ever occurred to your business. This fashion you’ll open up new territory and develop your enterprise in a brand new route.
Even if your small business has bought minimally outdoors of WI, your business continues to be qualified to apply. Food Truck enterprise expansion will be thrilling, and distributors may be tempted to hit the road operating quicker than they need to. Stolze, William J. Begin Up Financing: An Entrepreneur’s Information to Financing a New or Growing Enterprise.
If what you are promoting is doing properly, then the chances are that there will probably be some money surplus. Of the numerous forms of financing choices, some are higher than others for business enlargement. For instance, at one point we seemed into growth and warehousing in Europe, but the capital outlay made it prohibitive.
By expanding, a enterprise can achieve an financial system of scale to contribute more money for analysis and growth, sponsorships, employee coaching and training, investment in new technology, and creating extra productivity out of staff. The situation of the financial system does not should dictate your corporation progress and enlargement plans.
|
#!/usr/bin/env python
# pygeo - a distribution of tools for managing geophysical data
# Copyright (C) 2011, 2012 Brendan Smithyman
# This file is part of pygeo.
# pygeo is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
# pygeo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with pygeo. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------
import numpy as np
import sys
from optparse import OptionParser
from pygeo.segyread import SEGYFile
from pygeo.coord import reduceToLocal
# ------------------------------------------------------------------------
# Settings
AUTHORSHIP = 'Brendan Smithyman'
VERSION = '%prog v1.1\n'
DESCRIPTION = 'Exports a series of FAST pick datafiles based on SEG-Y headers.'
USAGE = '%prog [options] segy_file'
format_string = '%10.3f%10.3f%10.3f%10.3f%10.3f%3d\n'
# ------------------------------------------------------------------------
parser = OptionParser( usage = USAGE,
version = VERSION,
description = DESCRIPTION)
parser.add_option('-b', '--basis', action='store', dest='basis',
help='point to use as zero coordinate [%default]')
parser.add_option('-a', '--angle', action='store', dest='angle',
help='angle in degrees for coordinate rotation [%default]')
parser.add_option('-k', '--key', action='store', dest='key',
help='trace header id for first-arrival picks [%default]')
parser.add_option('-u', '--unit', action='store', dest='unit',
help='spatial unit [%default]')
parser.add_option('-z', '--zdir', action='store', dest='zdir',
help='coord. system z-scaling [%default]')
parser.add_option('-t', '--tfac', action='store', dest='tfac',
help='temporal unit [%default]')
parser.add_option('-s', '--shotout', action='store', dest='shotout',
help='filename for shot geometry information (for f.in) [%default]')
parser.add_option('-e', '--error', action='store', dest='error',
help='uniform data error [%default]')
parser.add_option('-o', '--omit', action='store_true', dest='omit',
help='omit shots without picks and renumber accordingly')
parser.add_option('-r', '--resamp', action='store', dest='resamp',
help='use a subset of shots, every nth [%default]')
parser.set_defaults( basis = '0.,0.,0.',
angle = '0.',
key = 'delrt',
unit = '1e3',
zdir = '-1',
tfac = '1e3',
shotout = 'shotout.dat',
error = '30.',
omit = False,
resamp = 1)
(options, args) = parser.parse_args()
if (len(args) < 1):
parser.error('Please specify a SEG-Y file!')
exit(1)
# Get input filename
infile = args[0]
# Convert rotation angle to radians
angle = np.float(options.angle)*np.pi/180.
# Convert basis to array
basis = np.array([np.float(item) for item in options.basis.strip().split(',')])
pickkey = options.key
unit = np.float(options.unit)
zantithesis = np.float(options.zdir)
tfac = np.float(options.tfac)
shotout = options.shotout
error = np.float(options.error)
omit = options.omit
resamp = np.int(options.resamp)
# Open SEG-Y file and get first trace header
sys.stdout.write('Reading "%s"...\n'%(infile,))
sys.stdout.flush()
sf = SEGYFile(infile, endian='Big')
trh0 = sf.trhead[0]
sys.stdout.write('Calculating scale factors...\n')
sys.stdout.flush()
# Determine coordinate and elevation scale factors from first trace header
# (assume same for all traces)
if (trh0['scalco'] < 0):
scalco = 1./abs(trh0['scalco'])
else:
scalco = trh0['scalco']
scalco = scalco / unit
if (trh0['scalel'] < 0):
scalel = 1./abs(trh0['scalel'])
else:
scalel = trh0['scalel']
scalel = scalel / unit
# Use SEGYFile internal to calculate shot-gather boundaries
sys.stdout.write('Calculating ensemble boundaries...\n')
sys.stdout.flush()
sf._calcEnsembles()
# Find the number of ensembles, and order them by occurrence in the SEG-Y file
ngathers = len(sf.ensembles)
ordering = np.argsort(sf.ensembles.values())
shotnums = np.array(sf.ensembles.keys())[ordering]
sys.stdout.write('Writing output files...\n')
sys.stdout.flush()
# Create some empty lists to hold upcoming values
shotlocs = [[],[],[]]
shotactive = []
shotnumber = 0
# Create bound thresholds (which will be updated)
bounds = [1e10,-1e10,1e10,-1e10,1e10,-1e10]
# Loop over each shot gather
for i in xrange(0, ngathers, resamp):
outlines = []
# Get the trace header for the first trace in this shot gather
trhl0 = sf.trhead[sf.ensembles[shotnums[i]]]
sx = trhl0['sx'] * scalco
sy = trhl0['sy'] * scalco
sz = trhl0['selev'] * scalel * zantithesis
(nsx, nsy, nsz) = reduceToLocal(np.array([sx,sy,sz],ndmin=2), angle, basis)[0]
# Append information about this shot to the running tally of all shot
# locations; this is used to construct f.in
shotlocs[0].append(nsx)
shotlocs[1].append(nsy)
shotlocs[2].append(nsz)
outlines.append(format_string % (nsx, nsy, nsz, 0., 0., -1))
tr0 = sf.ensembles[shotnums[i]]
if (i == ngathers - 1):
tr1 = sf.ntr - 1
else:
tr1 = sf.ensembles[shotnums[i+1]]
shotactive.append(0)
for j in xrange(tr0, tr1):
trhl = sf.trhead[j]
rx = trhl['gx'] * scalco
ry = trhl['gy'] * scalco
rz = trhl['gelev'] * scalel * zantithesis
(nrx, nry, nrz) = reduceToLocal(np.array([rx,ry,rz],ndmin=2), angle, basis)[0]
if (nrx < bounds[0]):
bounds[0] = nrx
if (nrx > bounds[1]):
bounds[1] = nrx
if (nry < bounds[2]):
bounds[2] = nry
if (nry > bounds[3]):
bounds[3] = nry
if (nrz < bounds[4]):
bounds[4] = nrz
if (nrz > bounds[5]):
bounds[5] = nrz
stime = trhl[pickkey]
if ((stime != 0) and (stime != 65535)):
outlines.append(format_string % (nrx, nry, nrz, stime/tfac, error/tfac, 1))
shotactive[-1] += 1
if (omit):
if (shotactive[-1] != 0):
shotnumber += 1
else:
shotactive.pop()
shotlocs[0].pop()
shotlocs[1].pop()
shotlocs[2].pop()
continue
else:
shotnumber += 1
# Create a FAST output file for this gather (using 4-digit filenames)
outfile = 'fd%04d.ascii'%(shotnumber,)
sys.stdout.write('%s <-- SHOTID %d\n'%(outfile, shotnums[i]))
sys.stdout.flush()
with open(outfile, 'w') as fp:
fp.writelines(outlines)
itrace = []
for i in xrange(shotnumber):
if (shotactive[i] > 0):
itrace.append(i + 1)
with open(shotout, 'w') as fp:
fp.write(' isource=')
fp.write(', '.join(['%d'%(item != 0) for item in shotactive]))
fp.write(',\n xsource=')
fp.write(', '.join(['%8.3f'%item for item in shotlocs[0]]))
fp.write(',\n ysource=')
fp.write(', '.join(['%8.3f'%item for item in shotlocs[1]]))
fp.write(',\n zsource=')
fp.write(', '.join(['%8.3f'%item for item in shotlocs[2]]))
fp.write(',\n itrace=')
fp.write(', '.join(['%d'%item for item in itrace]))
sys.stdout.write('\nBounds:\n\t%f < x < %f\n\t%f < y < %f\n\t%f < z < %f\n' % tuple(bounds))
sys.stdout.flush()
|
they have a much better support for the second monitor.
> problems with Houdini and Maya.
>> fine. At least on Windows.
>> but has anyone tested the 7300GT with Houdini 8?
|
#
# Gramps - a GTK+/GNOME based genealogy program - Family Sheet plugin
#
# Copyright (C) 2008,2009,2010 Reinhard Mueller
# Copyright (C) 2010 Jakim Friant
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""Reports/Text Reports/Relations"""
from __future__ import unicode_literals
#------------------------------------------------------------------------
#
# Standard Python modules
#
#------------------------------------------------------------------------
import string
#------------------------------------------------------------------------
#
# GRAMPS modules
#
#------------------------------------------------------------------------
from gramps.gen.display.name import displayer
from gramps.gen.lib import Date, Event, EventType, FamilyRelType, Name
from gramps.gen.lib import StyledText, StyledTextTag, StyledTextTagType
from gramps.gen.plug import docgen
from gramps.gen.plug.menu import BooleanOption, EnumeratedListOption, PersonOption
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import utils
from gramps.gen.plug.report import MenuReportOptions
import gramps.gen.datehandler
from gramps.gen.relationship import get_relationship_calculator
from gramps.gen.const import GRAMPS_LOCALE as glocale
try:
_trans = glocale.get_addon_translator(__file__)
except ValueError:
_trans = glocale.translation
_ = _trans.gettext
#------------------------------------------------------------------------
#
# Constants
#
#------------------------------------------------------------------------
empty_birth = Event()
empty_birth.set_type(EventType.BIRTH)
empty_marriage = Event()
empty_marriage.set_type(EventType.MARRIAGE)
#------------------------------------------------------------------------
#
# Relations report
#
#------------------------------------------------------------------------
class Relations(Report):
"""
Relations is a page that contains all available info about the relationship
between two people.
"""
def __init__(self, database, options, user):
"""
Initialize the report.
@param database: the GRAMPS database instance
@param options: instance of the Options class for this report
@param user: a gramps.gen.user.User() instance
"""
Report.__init__(self, database, options, user)
menu = options.menu
self.person_id = menu.get_option_by_name('pid').get_value()
self.person2_id = menu.get_option_by_name('pid2').get_value()
self.recurse = menu.get_option_by_name('recurse').get_value()
self.callname = menu.get_option_by_name('callname').get_value()
self.placeholder = menu.get_option_by_name('placeholder').get_value()
self.incl_sources = menu.get_option_by_name('incl_sources').get_value()
self.incl_notes = menu.get_option_by_name('incl_notes').get_value()
def write_report(self):
"""
Build the actual report.
"""
person1 = self.database.get_person_from_gramps_id(self.person_id)
person2 = self.database.get_person_from_gramps_id(self.person2_id)
self.__process_relationship(person1, person2)
#(rank, ahnentafel, person_key) = self.__calc_person_key(person)
#self.__process_person(person, rank, ahnentafel, person_key)
def __process_relationship(self, person1, person2):
# --- Now let the party begin! ---
self.doc.start_paragraph('FSR-Key')
self.doc.write_text('starting')
self.doc.end_paragraph()
self.doc.start_table(None, 'FSR-Table')
# Main person
self.doc.start_row()
self.doc.start_cell('FSR-HeadCell', 3)
self.doc.start_paragraph('FSR-Name')
self.doc.write_text("First Person\n")
self.doc.end_paragraph()
self.__dump_person(person1, False, None)
self.doc.start_paragraph('FSR-Name')
self.doc.write_text("\nSecond Person\n")
self.doc.end_paragraph()
self.__dump_person(person2, False, None)
self.doc.start_paragraph('FSR-Name')
relationship = get_relationship_calculator()
relate = "\nSecond person is the first person's " + relationship.get_one_relationship(self.database, person1, person2)
self.doc.write_text(relate)
self.doc.end_paragraph()
self.doc.start_paragraph('FSR-Name')
self.doc.write_text("\nCommon Ancestor\n")
self.doc.write_text("The common ancestors for Person 1 and Person 2 are ")
#firstAncestor = self.database.get_person_from_handle();
info, msg = relationship.get_relationship_distance_new(
self.database, person1, person2, all_dist=True, only_birth=False)
self.doc.write_text(self.__process_ancestor_string(info))
self.doc.end_paragraph()
#relationship = get_relationship_calculator()
#self.doc.start_paragraph('FSR-Name')
print('info:',info)
print('message:', msg)
self.doc.end_cell()
self.doc.end_row()
self.doc.end_table()
def __process_ancestor_string(self, info):
if type(info).__name__=='tuple':
return None
elif type(info).__name__=='list':
len(info)
ancestorlist=[]
for relation in info:
rank = relation[0]
person_handle = relation[1]
if rank == -1:
return None
ancestor = self.database.get_person_from_handle(person_handle)
name = ancestor.get_primary_name().get_regular_name()
ancestorlist.append(name)
if len(ancestorlist)>0:
return ' and '.join(ancestorlist)
else:
return None
def __process_person(self, person, rank, ahnentafel, person_key):
"""
Recursively build the Family Sheet for this person and all children
with spouses.
@param person: Person object for the key person of the Family Sheet.
@param rank: Numerical distance between the central person in the
database and the person in the parameter (the number of births
needed to connect them).
@param ahnentafel: "Ahnentafel" number of the common ancestor of the
central person in the database and the person in the parameter,
seen from the side of the central person in the database.
@param person_key: Family Sheet key to be printed on the top right of
the corner.
"""
# List of (person, rank, ahnentafel, person_key) tuples for persons to
# process recursively after this one.
more_sheets = []
# Numbering of spouses (integer, but printed in roman numbers).
spouse_index = 0
# Numbering of children (integer, but printed as lowercase letters).
child_index = 0
# Source references to print as footnotes.
self.__citation_index = 0
self.__citations = []
# Notes to print as footnotes.
self.__note_index = 0
self.__notes = []
# --- Now let the party begin! ---
self.doc.start_paragraph('FSR-Key')
self.doc.write_text(person_key)
self.doc.end_paragraph()
self.doc.start_table(None, 'FSR-Table')
# Main person
self.doc.start_row()
self.doc.start_cell('FSR-HeadCell', 3)
self.__dump_person(person, False, None)
self.doc.end_cell()
self.doc.end_row()
# Spouses
for family_handle in person.get_family_handle_list():
family = self.database.get_family_from_handle(family_handle)
spouse_index += 1
spouse_handle = utils.find_spouse(person, family)
spouse = self.database.get_person_from_handle(spouse_handle)
# Determine relationship between the center person and the spouse.
# If the spouse has a closer blood relationship than the current
# person, we refer to the Family Sheet of the spouse instead of
# printing the child list, because all children are more closely
# related to the center person via the spouse than via the current
# person. The same happens if the relationship is on the same
# level, but the relationship via the spouse goes via a common
# ancestor with a lower Ahnentafel numbering (i.e. a relationship
# stronger father-sided). In these cases, refer_spouse will be set
# to True.
(spouse_rank, spouse_at, spouse_key) = \
self.__calc_person_key(spouse)
if self.recurse != RelationsOptions.RECURSE_ALL:
refer_spouse = (spouse_rank != -1 and \
(spouse_rank < rank or
(spouse_rank == rank and spouse_at < ahnentafel)))
else:
refer_spouse = False
self.doc.start_row()
self.doc.start_cell('FSR-NumberCell', 1)
self.doc.start_paragraph('FSR-Number')
self.doc.write_text(utils.roman(spouse_index))
self.doc.end_paragraph()
self.doc.end_cell()
self.doc.start_cell('FSR-DataCell', 2)
self.__dump_family(family, spouse)
if refer_spouse:
self.doc.start_paragraph('FSR-Normal')
self.doc.write_text(_("\u2192 %s") % spouse_key)
self.doc.end_paragraph()
self.doc.end_cell()
self.doc.end_row()
if refer_spouse:
# Spouse with closer relationship than current person? Don't
# print children on this Family Sheet (but count them for the
# numbering).
child_index += len(family.get_child_ref_list())
continue
# Children
for child_ref in family.get_child_ref_list():
child = self.database.get_person_from_handle(child_ref.ref)
child_letter = string.ascii_lowercase[child_index]
self.doc.start_row()
self.doc.start_cell('FSR-EmptyCell', 1)
self.doc.end_cell()
self.doc.start_cell('FSR-NumberCell', 1)
self.doc.start_paragraph('FSR-Number')
self.doc.write_text(child_letter)
self.doc.end_paragraph()
self.doc.end_cell()
self.doc.start_cell('FSR-DataCell', 1)
has_spouses = (child.get_family_handle_list() != [])
self.__dump_person(child, has_spouses, child_ref)
if has_spouses:
# We have to recalculate the key for this person, it could
# be closer related if it is a direct ancestor of the
# central person or one of its spouses.
(child_rank, child_at, child_key) = \
self.__calc_person_key(child)
self.doc.start_paragraph('FSR-Normal')
self.doc.write_text(_("\u2192 %s") % child_key)
self.doc.end_paragraph()
# We recursively print this child *only* if its
# relationship with the central person is closest via the
# current person. This way, we avoid that a person is
# printed recursively from more than one of its ancestors.
if child_key == person_key + child_letter or \
self.recurse == RelationsOptions.RECURSE_ALL:
more_sheets.append(
(child, child_rank, child_at, child_key))
self.doc.end_cell()
self.doc.end_row()
child_index += 1
self.doc.start_row()
self.doc.start_cell('FSR-FootCell', 3)
self.doc.end_cell()
self.doc.end_row()
self.doc.end_table()
self.__dump_sources()
self.__dump_notes()
# Now print the sheets for the children.
if self.recurse != RelationsOptions.RECURSE_NONE:
for (child, child_rank, child_at, child_key) in more_sheets:
self.doc.page_break()
self.__process_person(child, child_rank, child_at, child_key)
def __dump_family(self, family, spouse):
"""
Output all data of a family the key person is a parent in, and all data
of the corresponding spouse.
"""
self.__dump_attributes(family)
# If this is a married couple, it must at least have a marriage event.
# If no marriage event is there, print placeholders for it
# nevertheless.
if family.get_relationship() == FamilyRelType.MARRIED and spouse:
for event_ref in family.get_event_ref_list():
event = self.database.get_event_from_handle(event_ref.ref)
if event.get_type() == EventType.MARRIAGE:
break
else:
self.__dump_event(empty_marriage, None)
for event_ref in family.get_event_ref_list():
self.__dump_event_ref(event_ref)
if spouse:
self.__dump_person(spouse, False, family)
else:
self.doc.start_paragraph('FSR-Normal')
self.__write_sources(family)
self.__write_notes(family)
self.doc.end_paragraph()
def __dump_person(self, person, short, ref):
"""
Output all data of a person.
@param person: Person object to output.
@param short: If True, print only name and birth event.
@param ref: Reference through which this person is linked into the
Family Sheet. Can be a family object (for the spouses) or a
child_ref object (for the children). Source references and notes
for this reference object will also be output.
"""
name = person.get_primary_name()
name_text = _Name_get_styled(name, self.callname, self.placeholder)
self.doc.start_paragraph('FSR-Name')
mark = utils.get_person_mark(self.database, person)
self.doc.write_text("", mark)
self.doc.write_markup(str(name_text), name_text.get_tags())
self.__write_sources(name)
self.__write_notes(name)
self.__write_sources(person)
self.__write_notes(person)
if ref:
self.__write_sources(ref)
self.__write_notes(ref)
self.doc.end_paragraph()
if short:
event_ref = person.get_birth_ref()
if event_ref:
self.__dump_event_ref(event_ref)
else:
for alt_name in person.get_alternate_names():
name_type = str(alt_name.get_type())
name = _Name_get_styled(alt_name, self.callname,
self.placeholder)
self.__dump_line(name_type, name, alt_name)
self.__dump_attributes(person)
# Each person should have a birth event. If no birth event is
# there, print the placeholders for it nevertheless.
if not person.get_birth_ref():
self.__dump_event(empty_birth, None)
for event_ref in person.get_primary_event_ref_list():
self.__dump_event_ref(event_ref)
for addr in person.get_address_list():
location = utils.get_address_str(addr)
date = gramps.gen.datehandler.get_date(addr)
self.doc.start_paragraph('FSR-Normal')
if date:
self.doc.write_text(_("Address (%(date)s): %(location)s") % {
'date': date,
'location': location})
else:
self.doc.write_text(_("Address: %(location)s") % {
'location': location})
self.__write_sources(addr)
self.__write_notes(addr)
self.doc.end_paragraph()
def __dump_event_ref(self, event_ref):
"""
Output all data for an event given as a reference.
"""
event = self.database.get_event_from_handle(event_ref.ref)
self.__dump_event(event, event_ref)
def __dump_event(self, event, ref):
"""
Output all data for an event.
@param event: Event object
@param ref: Reference through which this event is linked to the
currently processed object. Source references and notes for this
reference object will also be output.
"""
description = event.get_description()
date_text = _Event_get_date_text(event, self.placeholder)
place_text = _Event_get_place_text(event, self.database,
self.placeholder)
self.doc.start_paragraph('FSR-Normal')
self.doc.write_text("%s:" % event.get_type())
if description:
self.doc.write_text(" ")
self.doc.write_text(description)
if date_text:
self.doc.write_text(" ")
self.doc.write_text(date_text)
if place_text:
self.doc.write_text(" ")
self.doc.write_text(place_text)
if event.get_place_handle():
place = self.database.get_place_from_handle(event.get_place_handle())
self.__write_sources(place)
self.__write_notes(place)
self.__write_sources(event)
self.__write_notes(event)
if ref:
self.__write_notes(ref)
for attr in event.get_attribute_list():
self.doc.write_text(_("; %(type)s: %(value)s") % {
'type' : attr.get_type(),
'value': attr.get_value()})
self.__write_sources(attr)
self.__write_notes(attr)
self.doc.end_paragraph()
def __dump_attributes(self, obj):
"""
Output all attributes of the given object
"""
for attr in obj.get_attribute_list():
self.__dump_line(str(attr.get_type()), attr.get_value(), obj)
def __dump_line(self, name, text, obj):
"""
Output a name/text pair (like an attribute) with its related source
references and notes.
"""
self.doc.start_paragraph('FSR-Normal')
self.doc.write_text("%s: " % name)
if isinstance (text, StyledText):
self.doc.write_markup(str(text), text.get_tags())
else:
self.doc.write_text(text)
self.__write_sources(obj)
self.__write_notes(obj)
self.doc.end_paragraph()
def __write_sources(self, obj):
"""
Output source reference numbers for the given object (numbers like [1]
in superscript) and collect the source references to be printed at the
end of the report.
"""
if not self.incl_sources:
return
for citation_handle in obj.get_citation_list():
# Citation already in list? If yes, use same number again.
if citation_handle in self.__citations:
index = self.__citations.index(citation_handle) + 1
else:
self.__citations.append(citation_handle)
self.__citation_index += 1
index = self.__citation_index
self.doc.start_superscript()
self.doc.write_text(" [%s]" % index)
self.doc.end_superscript()
def __write_notes(self, obj):
"""
Output note reference numbers for the given object (numbers like (1) in
superscript) and collect the note handles to be printed at the end of
the report.
"""
if not self.incl_notes:
return
for note_handle in obj.get_note_list():
# Note already in list? If yes, use same number again.
if note_handle in self.__notes:
index = self.__notes.index(note_handle) + 1
else:
self.__notes.append(note_handle)
self.__note_index += 1
index = self.__note_index
self.doc.start_superscript()
self.doc.write_text(" (%s)" % index)
self.doc.end_superscript()
def __dump_sources(self):
"""
Print the collected sources.
"""
if self.__citations:
self.doc.start_paragraph('FSR-Footnote')
self.doc.write_text("\n")
self.doc.write_text(_("Source references:"))
self.doc.end_paragraph()
index = 0
for citation_handle in self.__citations:
citation = self.database.get_citation_from_handle(citation_handle)
source = self.database.get_source_from_handle(citation.get_reference_handle())
index += 1
self.doc.start_paragraph('FSR-Footnote')
self.doc.write_text("[%s]: " % index)
if source.get_abbreviation():
self.doc.write_text(source.get_abbreviation())
else:
if source.get_author():
self.doc.write_text(_("%s: ") % source.get_author())
self.doc.write_text(source.get_title())
self.__write_notes(source)
if citation.get_page():
self.doc.write_text(_(", page %s") % citation.get_page())
self.__write_notes(citation)
self.doc.end_paragraph()
def __dump_notes(self):
"""
Print the collected notes.
"""
if self.__notes:
self.doc.start_paragraph('FSR-Footnote')
self.doc.write_text("\n")
self.doc.write_text(_("Notes:"))
self.doc.end_paragraph()
index = 0
for note_handle in self.__notes:
note = self.database.get_note_from_handle(note_handle)
index += 1
self.doc.start_paragraph('FSR-Footnote')
self.doc.write_text("(%s): " % index)
self.doc.write_text(note.get())
self.doc.end_paragraph()
def __calc_person_key(self, person):
"""
The person key is a unique identifier that is built from the
relationship to the default person. It consists of the "Ahnentafel"
number of the common ancestor of the person with the default person,
and then a letter representing the child number for each generation
from the common ancestor to the person.
If more than one common ancestor exists, the common ancestor with the
lowest "Ahnentafel" number has precedence.
For example, the second child of the third child of the father of the
mother of the central person gets the person key "6cb".
"""
relationship = get_relationship_calculator()
default_person = self.database.get_default_person()
# No home person set.
if default_person is None:
return (-1, 0, "")
# First try direct relationship.
spousestring = ""
info, msg = relationship.get_relationship_distance_new(
self.database, default_person, person, all_dist=True)
info = relationship.collapse_relations(info)[0]
(rank, ancestor_handle, default_rel, default_fam, person_rel,
person_fam) = info
# Then try relationship to any spouse.
if rank == -1:
index = 0
for family_handle in default_person.get_family_handle_list():
index += 1
family = self.database.get_family_from_handle(family_handle)
spouse_handle = utils.find_spouse(default_person, family)
spouse = self.database.get_person_from_handle(spouse_handle)
info, msg = relationship.get_relationship_distance_new(
self.database, spouse, person, all_dist=True)
info = relationship.collapse_relations(info)[0]
(rank, ancestor_handle, default_rel, default_fam, person_rel,
person_fam) = info
if rank != -1:
spousestring = utils.roman(index)
break
# If no relationship found at all, exit here.
if rank == -1:
return (rank, 0, "")
# Calculate Ahnentafel number of common ancestor.
ahnentafel = 1
for rel in default_rel:
ahnentafel *= 2
if rel in (relationship.REL_MOTHER,
relationship.REL_MOTHER_NOTBIRTH):
ahnentafel += 1
# Find out child letters.
child = person
childletters = ""
for rel in person_rel:
family_handle = child.get_main_parents_family_handle()
family = self.database.get_family_from_handle(family_handle)
if rel in (relationship.REL_MOTHER,
relationship.REL_MOTHER_NOTBIRTH):
parent_handle = family.get_mother_handle()
else:
parent_handle = family.get_father_handle()
parent = self.database.get_person_from_handle(parent_handle)
# Count *all* children from this parent
childletter = "?"
index = 0
for family_handle in parent.get_family_handle_list():
family = self.database.get_family_from_handle(family_handle)
for child_ref in family.get_child_ref_list():
if child_ref.ref == child.get_handle():
childletter = string.ascii_lowercase[index]
break
index += 1
else:
continue
break
childletters = childletter + childletters
child = parent
return (rank, ahnentafel,
"%s%s%s" % (spousestring, ahnentafel, childletters))
#------------------------------------------------------------------------
#
# Reusable functions (could be methods of gramps.gen.lib.*)
#
#------------------------------------------------------------------------
_Name_CALLNAME_DONTUSE = 0
_Name_CALLNAME_REPLACE = 1
_Name_CALLNAME_UNDERLINE_ADD = 2
def _Name_get_styled(name, callname, placeholder=False):
"""
Return a StyledText object with the name formatted according to the
parameters:
@param callname: whether the callname should be used instead of the first
name (CALLNAME_REPLACE), underlined within the first name
(CALLNAME_UNDERLINE_ADD) or not used at all (CALLNAME_DONTUSE).
@param placeholder: whether a series of underscores should be inserted as a
placeholder if first name or surname are missing.
"""
# Make a copy of the name object so we don't mess around with the real
# data.
n = Name(source=name)
# Insert placeholders.
if placeholder:
if not n.first_name:
n.first_name = "____________"
if not n.get_surname():
n.get_primary_surname().set_surname("____________")
if n.call:
if callname == _Name_CALLNAME_REPLACE:
# Replace first name with call name.
n.first_name = n.call
elif callname == _Name_CALLNAME_UNDERLINE_ADD:
if n.call not in n.first_name:
# Add call name to first name.
n.first_name = "\"%(call)s\" (%(first)s)" % {
'call': n.call,
'first': n.first_name}
text = displayer.display_name(n)
tags = []
if n.call:
if callname == _Name_CALLNAME_UNDERLINE_ADD:
# "name" in next line is on purpose: only underline the call name
# if it was a part of the *original* first name
if n.call in name.first_name:
# Underline call name
callpos = text.find(n.call)
tags = [StyledTextTag(StyledTextTagType.UNDERLINE, True,
[(callpos, callpos + len(n.call))])]
return StyledText(text, tags)
def _Date_get_text(date, placeholder=False):
"""
Return a textual representation of the date to be used in textual context,
like "on 1 January 1980" or "in January 1980" or "after January 1980".
@param placeholder: whether a series of underscores should be inserted as a
placeholder if the date is missing or incomplete.
"""
text = gramps.gen.datehandler.displayer.display(date) # @UndefinedVariable
if date.get_modifier() == Date.MOD_NONE \
and date.get_quality() == Date.QUAL_NONE:
if date.get_day_valid():
text = _("on %(ymd_date)s") % {'ymd_date': text}
elif date.get_month_valid():
text = _("in %(ym_date)s") % {'ym_date': text}
elif date.get_year_valid():
text = _("in %(y_date)s") % {'y_date': text}
if placeholder:
if date.is_empty():
text = _("on %(placeholder)s") % { 'placeholder': "__________"}
elif not date.is_regular():
text = _("on %(placeholder)s (%(partial)s)") % {
'placeholder': "__________",
'partial': text}
return text
# Output placeholders for missing dates and places only for the
# following event types.
_Event_needs_date_place = [
EventType.BIRTH,
EventType.DEATH,
EventType.MARRIAGE,
EventType.DIVORCE]
def _Event_get_date_text(event, placeholder=False):
"""
Return a textual representation of the event's date to be used in textual
context, like "on 1 January 1980" or "in January 1980" or "after January
1980".
@param placeholder: whether a series of underscores should be inserted as a
placeholder if the date is missing or incomplete.
"""
return _Date_get_text(event.get_date_object(),
placeholder and event.get_type() in _Event_needs_date_place)
def _Event_get_place_text(event, database, placeholder=False):
"""
Return a textual representation of the event's place to be used in textual
context. This is basically "in " + the place title.
@param placeholder: whether a series of underscores should be inserted as a
placeholder if the place is missing.
"""
place_handle = event.get_place_handle()
if place_handle:
place = database.get_place_from_handle(place_handle)
text = _("in %(place)s") % {'place': place.get_title()}
elif placeholder and event.get_type() in _Event_needs_date_place:
text = _("in %(place)s") % {'place': "__________"}
else:
text = ""
return text
#------------------------------------------------------------------------
#
# MenuReportOptions
#
#------------------------------------------------------------------------
class RelationsOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
RECURSE_NONE = 0
RECURSE_SIDE = 1
RECURSE_ALL = 2
def __init__(self, name, dbase):
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
##########################
category_name = _("Report Options")
##########################
pid = PersonOption(_("First Relative"))
pid2 = PersonOption(_("Second Relative"))
pid.set_help(_("The first person for the relationship calculation."))
pid2.set_help(_("The second person for the relationship calculation."))
menu.add_option(category_name, "pid", pid)
menu.add_option(category_name, "pid2", pid2)
recurse = EnumeratedListOption(_("Print sheets for"), self.RECURSE_NONE)
recurse.set_items([
(self.RECURSE_NONE, _("Center person only")),
(self.RECURSE_SIDE, _("Center person and descendants in side branches")),
(self.RECURSE_ALL, _("Center person and all descendants"))])
menu.add_option(category_name, "recurse", recurse)
callname = EnumeratedListOption(_("Use call name"), _Name_CALLNAME_DONTUSE)
callname.set_items([
(_Name_CALLNAME_DONTUSE, _("Don't use call name")),
(_Name_CALLNAME_REPLACE, _("Replace first name with call name")),
(_Name_CALLNAME_UNDERLINE_ADD, _("Underline call name in first name / add call name to first name"))])
menu.add_option(category_name, "callname", callname)
placeholder = BooleanOption( _("Print placeholders for missing information"), True)
menu.add_option(category_name, "placeholder", placeholder)
incl_sources = BooleanOption( _("Include sources"), True)
menu.add_option(category_name, "incl_sources", incl_sources)
incl_notes = BooleanOption( _("Include notes"), True)
menu.add_option(category_name, "incl_notes", incl_notes)
def make_default_style(self, default_style):
"""Make default output style for the Family Sheet Report."""
#Paragraph Styles
font = docgen.FontStyle()
font.set_type_face(docgen.FONT_SANS_SERIF)
font.set_size(10)
font.set_bold(0)
para = docgen.ParagraphStyle()
para.set_font(font)
para.set_description(_('The basic style used for the text display'))
default_style.add_paragraph_style('FSR-Normal', para)
font = docgen.FontStyle()
font.set_type_face(docgen.FONT_SANS_SERIF)
font.set_size(10)
font.set_bold(0)
para = docgen.ParagraphStyle()
para.set_font(font)
para.set_alignment(docgen.PARA_ALIGN_RIGHT)
para.set_description(_('The style used for the page key on the top'))
default_style.add_paragraph_style('FSR-Key', para)
font = docgen.FontStyle()
font.set_type_face(docgen.FONT_SANS_SERIF)
font.set_size(12)
font.set_bold(1)
para = docgen.ParagraphStyle()
para.set_font(font)
para.set_description(_("The style used for names"))
default_style.add_paragraph_style('FSR-Name', para)
font = docgen.FontStyle()
font.set_type_face(docgen.FONT_SANS_SERIF)
font.set_size(12)
font.set_bold(1)
para = docgen.ParagraphStyle()
para.set_font(font)
para.set_alignment(docgen.PARA_ALIGN_CENTER)
para.set_description(_("The style used for numbers"))
default_style.add_paragraph_style('FSR-Number', para)
font = docgen.FontStyle()
font.set_type_face(docgen.FONT_SANS_SERIF)
font.set_size(8)
font.set_bold(0)
para = docgen.ParagraphStyle()
para.set_font(font)
para.set_description(_(
'The style used for footnotes (notes and source references)'))
default_style.add_paragraph_style('FSR-Footnote', para)
#Table Styles
cell = docgen.TableCellStyle()
cell.set_padding(0.1)
cell.set_top_border(1)
cell.set_left_border(1)
cell.set_right_border(1)
default_style.add_cell_style('FSR-HeadCell', cell)
cell = docgen.TableCellStyle()
cell.set_padding(0.1)
cell.set_left_border(1)
default_style.add_cell_style('FSR-EmptyCell', cell)
cell = docgen.TableCellStyle()
cell.set_padding(0.1)
cell.set_top_border(1)
cell.set_left_border(1)
default_style.add_cell_style('FSR-NumberCell', cell)
cell = docgen.TableCellStyle()
cell.set_padding(0.1)
cell.set_top_border(1)
cell.set_right_border(1)
cell.set_left_border(1)
default_style.add_cell_style('FSR-DataCell', cell)
cell = docgen.TableCellStyle()
cell.set_padding(0.1)
cell.set_top_border(1)
default_style.add_cell_style('FSR-FootCell', cell)
table = docgen.TableStyle()
table.set_width(100)
table.set_columns(3)
table.set_column_width(0, 7)
table.set_column_width(1, 7)
table.set_column_width(2, 86)
default_style.add_table_style('FSR-Table', table)
|
Being a parent is crazy tough, and doing it on your own is even tougher. Balancing your kid’s activities, school, house chores and everything else we do can be so tiring. Whether you homeschool like I do, send your kids to public or private school, at some point more than likely, the idea of playing an instrument will pop up.
It’s an idea that most parents dread. The noise, the cost, the impatience…all the little joys. Yes, leaning to play a musical instrument is wonderful and has so many benefits for a child. So how do you weigh the pros and cons? Let’s go over some essential advice for what to do in this situation, shall we?
If you haven’t reached the point yet where your child has asked for music lessons, then know it’s probably coming. Just about every parent reaches that point. Maybe they heard a piece of music that they particularly like and want to recreate, perhaps their friends are starting up a band, maybe a lesson at school piqued their interest. Whatever their reason, you should encourage them wholeheartedly, even though you may not exactly thrilled. But here are a few sage pieces of advice for you to consider along the way.
When choosing an instrument with your little one, you will want to bear in mind various factors. Cost is a major one – while all instruments are equal, some will cost a whole lot more than others to purchase and learn to play. A piano, for example, is a large investment piece and something that you will ideally have to purchase if your kid is going to be able to get enough practice time at home. Piano lessons also come hand in hand with a notoriously high price tag.
A beginner’s guitar, on the other hand, will come at a much lower price and your child could watch free online tutorials to pick up chords and notes. Next, you need to consider volume. Some instruments can be played at a lower volume effectively and others create a racket. Remember that you’re potentially going to be listening to this instrument being played in your home for years to come if your children take to it, so make sure it’s something that you can bear.
No matter what instrument your little one ends up picking, there are various ways to save money down the line. This is huge for us single moms who need to save money however and wherever we can.
Always look for a free alternative before forking out for expensive materials or resources. You can find plenty of music for your little one to learn through sites like easyukulelesongs.com and as we have briefly mentioned before, there are plenty of brilliant free tutorials on video streaming sites such as Youtube.
Make a schedule for your little ones practice sessions. There’s a pretty good chance that your neighbors really won’t appreciate your little one’s attempt to become the newest member of a heavy metal band if they start practicing at nine o’clock every evening. You also need to set boundaries regarding when your kids can and cannot play their instruments.
After all, you don’t want them hopping out of bed to try out an elaborate drumroll in the middle of the night when you’re fast asleep. So, have a designated room for practicing music and a cut off time that they have to put the instruments down. This will keep everyone happy. You and your neighbors won’t be disturbed, and your child will be able to expand their knowledge and skill comfortably and at a good pace.
Learning an instrument is likely to be a wholly positive experience for your little one, so you should fully endorse their aspirations. But the advice above can help you to save a whole lot of money and maintain a little peace of mind. Enjoy the creative and musical fun your child can experience and who knows, maybe your kid will go pro with their new found talent!
Give this post a share if you liked these tips and make sure you subscribe to my blog for more parenting tips, ideas, and support!
|
import httplib
import time
class DataDogStat:
def __init__(self, config):
self._config = config
def post_status(self, status):
conn = httplib.HTTPSConnection("app.datadoghq.com", timeout=60)
conn.request("GET", "/api/v1/validate?api_key=" + self._config.DATADOG_API_KEY)
r1 = conn.getresponse()
if r1.status != 200:
raise Exception("Not 200 status in DataDog API login: " + str(r1.status))
current_timestamp = int(time.time())
if status:
datadog_metric_value = self._config.DATADOG_ON_VALUE
else:
datadog_metric_value = self._config.DATADOG_OFF_VALUE
headers = {"Content-type": "application/json"}
post_data = '{ "series" : [{"metric":"' + self._config.DATATOG_METRIC_NAME + \
'", "points":[[' + str(current_timestamp) + \
', ' + datadog_metric_value + ']], "type":"gauge", "host":"' + \
self._config.DATADOG_HOST_NAME + '", "tags\":[""]}]}'
conn = httplib.HTTPSConnection("app.datadoghq.com", timeout=60)
conn.request("POST", "/api/v1/series?api_key=" + self._config.DATADOG_API_KEY,
post_data,
headers)
r1 = conn.getresponse()
if r1.status != 202:
raise Exception("Not 202 status in Datadog metric post: " + str(r1.status))
return True
|
What the Browns did to the Cincinnati Bengals in front of their home fans and a national television audience Thursday night almost defies description.
In what was easily the club’s best all-around performance in who knows how long, the Browns thoroughly dismantled a pretty good Bengals team, 24-3, and dislodged them from first place in the AFC North.
The victory moved the Browns into a first-place tie with Pittsburgh for the division lead. The tie will be broken Sunday when the Steelers visit the New York Jets.
It was a peerless performance by the Browns that bordered on near perfection from the opening kickoff to the final gun as the Browns blasted a couple of streaks to smithereens.
It was their first road victory in the division since knocking off Cincinnati, 20-13, on Sept. 28, 2008. That’s 17 games ago. The crushing loss also shattered the Bengals’ unbeaten streak at home at 14 games.
The Browns, winners of three straight and five of their last six games, won this one in a manner that can best be described dominating. The running game made a triumphant return. Brian Hoyer was his usual efficient self. And the defense was, well, special.
Four turnovers (three interceptions and a fumble recovery) blunted everything the Bengals tried. The Cleveland defense took opportunity to a whole new level.
The Bengals were never in the game after quarterback Andy Dalton threw the first of his three interceptions right into the hands of Cleveland linebacker Craig Robertson on the game’s fifth play.
Five running plays later, Ben Tate scored the first of the club’s three rushing touchdowns. Isaiah Crowell scored his fifth of the season early in the second quarter, culminating an eight-play, 59-yard drive.
And Terrance West, running hard and smart all evening, made it three for the infantry with a 1-yard plunge late in the third quarter. Considering how well the defense was playing, it served merely as frosting on the cake.
It seemed the harder Dalton and his offense tried, the worse it got. Nothing went right. But it did for the Browns. That’s because they made their own breaks.
Even when Jim Leonhard fumbled away a Kevin Huber punt on the Bengals’ second series of the game, their only mistake of the evening, the Browns’ transition defense ramped up and stopped the Cincinnati offense again. And when Buster Skrine was flagged for pass interference on a fourth-and-10 at the Cleveland 32 on the same series, the transition defense found another gear.
It took the Bengals 13 plays, a fumble recovery on a punt and a pass interference to put up the only points they would score. Two gifts and all they did with them was score three points.
That right then and there should have told Browns Nation this one was going to be different and extremely enjoyable.
But Browns fans are so conditioned to expect something negative to spoil the fun, it is understandable if they had trouble figuring out just why the Bengals really never put up a fight and waited for a turnaround that never came.
That’s because the Browns arrived ready to play. They arrived with attitude and a nastiness that lasted the entire game. They basically bullied the Bengals all evening. Not once did the home team retaliate. It was an unfair fight.
On offense, the line fired out all evening, grinding relentlessly. The ground game, which averaged just 52 yards in three dismal performances against Jacksonville, Oakland and Tampa Bay, erupted for 170 yards as West (94 yards), Tate and Crowell pounded away.
It ran smoothly, almost effortlessly, as just about everything seemed to work. They really did not stop themselves with any critical errors, controlling the ball for nearly 36 minutes.
Hoyer was a workmanlike 15-of-23 for 198 yards and came up with big plays when he needed them. They added up to a season-best 7-of-16 on third down. His play fakes were borderline exquisite due to the effectiveness of the run game.
The defense, which welcomed back big Phil Taylor to the middle of the line, appears to have a much firmer grasp lately of the Mike Pettine scheme. It limited the Bengals to just 165 yards in 14 series. Their deepest penetration was the Cleveland 19 following Skrine’s PI in the first quarter. He atoned later with a pair of picks.
Taylor had only three tackles, but was a disruptive force up the middle, teaming with Paul Kruger and Desmond Bryant (two sacks) to put enough pressure on Dalton to make him throw before he wanted to, missing badly most of the game.
The longest Cincinnati drive of the evening lasted eight plays, covered 46 yards and ended with Skrine’s first interception. The next Cincinnati possession produced Skrine’s second theft four plays later.
Dalton was just 10-of-33 for an embarrassing 86 yards and three picks. The Bengals, who had only 11 first downs, converted just three third downs in 17 attempts.
A microcosm of just how just about everything went in the Browns’ favor was what took place during the second series of the second half.
They had a third down and a foot at the Cincinnati 31 after the Bengals successfully challenged a spot by the officials that originally gave them a first down. They dropped back five more yards when right guard John Greco was flagged for a false start.
Typical Cleveland mistake at the wrong time, moaned most Browns fans. And when Hoyer failed to connect with Taylor Gabriel on the next play, the moaning grew louder. But wait, a flag. What now?
Referee Jeff Triplette signaled holding before slowly indicating Cincinnati’s George Iloka was the culprit. A 6-4 safety mugging a 5-7 receiver.
On the very next play, Crowell appeared to fumble the ball after a one-yard pickup and the Bengals recovered. A review (all turnovers are reviewed) reversed the call. Another bullet dodged. That’s two.
A 28-yard strike to tight end Gary Barnidge, a Tate two-yard run and a failed Hoyer sneak from the 1 brought up a third and goal. West dived over the line, thrust out the ball and it appeared to cross the plane of the goal.
The ball popped out and rolled to the back of the end zone and another flag flew. Browns Nation held its breath. After a moment, Triplette finally indicated the Bengals were offside, but the Browns declined the penalty because West had, indeed, scored.
Whenever the Browns need a turnover, they got one. Like when free safety Tashaun Gipson (who later dropped an interception right in his chest) popped the ball free from rookie Bengals running back Jeremy Hill early in the second quarter and it floated right into the hands of a waiting Joe Haden.
Haden also had a spectacular game, as did the entire secondary, which contested passes hard. Haden shut down A. J. Green with just three catches for 23 yards.
Whenever they needed a big play on offense to sustain a drive, they got one from Hoyer, whose mid-range accuracy was solid.
It was a performance that had to make NFL Nation sit up and take notice. It also makes one wonder just where this team was the last three weeks. How did these guys lose to the Jacksonville Jaguars and look very ordinary against Oakland and Tampa Bay?
It was an entirely different team that actually looked like a contender. It appeared to love the national TV stage. In fact, it seemed to relish the spotlight. No stage fright there.
Working on very little rest because of the short turnaround definitely brought out the best in this team at least for one night. They now have 10 days to get ready for Houston back home.
After what we saw in Cincinnati, it is not unreasonable to expect a similar performance against the Texans. We now know the Browns are capable of it.
Did you hear anything about Phil Taylor's injury? He tried to get off the field but it appeared his leg gave out before he could make it.
They're checking him out today. The OBR should be able to provide the info you seek.
So you're a fan of his, too? Ain't the Internet wonderful?
He gives the word "annoying" a whole new meaning.
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import warnings
import paddle.fluid as fluid
from paddle.fluid import core
from paddle.fluid.framework import Program
from paddle.fluid.compiler import CompiledProgram
from paddle.fluid.executor import Executor
from paddle.fluid.parallel_executor import ParallelExecutor
from paddle.fluid.framework import Variable, Parameter
from .runtime_base import RuntimeBase
from ..base.private_helper_function import wait_server_ready
__all__ = []
class ParameterServerRuntime(RuntimeBase):
def __init__(self):
super(ParameterServerRuntime, self).__init__()
self._communicator = None
def _set_basic_info(self, context):
self.context = context
self.role_maker = context["role_maker"]
self.origin_main_program = context["origin_main_program"]
self.origin_startup_program = context["origin_startup_program"]
self.async_strategy = self._get_distributed_strategy()
self.compiled_strategy = self.build_compiled_startegy()
def _get_distributed_strategy(self):
strategy = None
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import StrategyFactory
dist_strategy = self.context["valid_strategy"]
k_steps = dist_strategy.a_sync_configs["k_steps"]
if not dist_strategy.a_sync and k_steps == 0:
strategy = StrategyFactory.create_sync_strategy()
if dist_strategy.a_sync and k_steps == 0:
strategy = StrategyFactory.create_async_strategy()
if dist_strategy.a_sync and k_steps > 0:
strategy = StrategyFactory.create_geo_strategy(k_steps)
if not strategy:
raise ValueError("k_steps must be invalid value, please check")
return strategy
def build_compiled_startegy(self):
from paddle.fluid.incubate.fleet.parameter_server.ir.public import CompileTimeStrategy
compiled_config = CompileTimeStrategy(
self.origin_main_program, self.origin_main_program,
self.async_strategy, self.role_maker)
return compiled_config
def _load_sparse_params(self,
executor,
dirname,
varnames,
main_program=None):
assert vars != None
check_vars = []
load_prog = Program()
load_block = load_prog.global_block()
def _in_varnames(var):
return var.name in varnames
load_vars = list(
filter(_in_varnames, fluid.default_main_program().list_vars()))
if main_program is None:
main_program = self.origin_main_program
from paddle.fluid.incubate.fleet.parameter_server.ir.public import _get_varname_parts
for each_var in load_vars:
assert isinstance(each_var, Variable)
origin_varname, _, _ = _get_varname_parts(each_var.name)
new_var = fluid.io._clone_var_in_block_(load_block, each_var)
var_path = os.path.join(dirname, origin_varname)
if not os.path.exists(var_path):
raise ValueError("SelectedRows var {} can not find at {}".
format(new_var.name, var_path))
if os.path.isfile(var_path):
load_block.append_op(
type='sparse_tensor_load',
inputs={},
outputs={'Out': [new_var]},
attrs={
'file_path': os.path.join(dirname, origin_varname),
'node_index': self.role_maker._server_index(),
'node_num': self.role_maker._server_num(),
'shape': each_var.shape
})
check_vars.append(each_var)
executor.run(load_prog)
def _load_distributed_params(self, dirname, varnames):
from paddle.fluid.communicator import LargeScaleKV
from paddle.fluid.incubate.fleet.parameter_server.ir.public import _get_varname_parts
scale_kv = LargeScaleKV()
for varname in varnames:
origin_varname, _, _ = _get_varname_parts(varname)
sparse_dir = os.path.join(dirname, origin_varname, varname)
scale_kv.load(varname, sparse_dir)
@staticmethod
def __exclude_vars(exclude_var_names=[]):
def is_valid(var):
if var.name in exclude_var_names:
return False
from paddle.fluid.incubate.fleet.parameter_server.ir.public import _get_varname_parts
origin_varname, _, _ = _get_varname_parts(var.name)
if origin_varname.endswith("@GRAD"):
return False
if origin_varname == "learning_rate_0":
return False
if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
var.desc.type() == core.VarDesc.VarType.READER:
return False
return var.persistable
return is_valid
def _init_worker(self):
def sync_strategy_envs():
kwargs = {}
kwargs[
"pserver_endpoints"] = self.role_maker._get_pserver_endpoints()
kwargs["trainer_id"] = self.role_maker._worker_index()
return kwargs
def geo_strategy_envs():
from paddle.fluid.incubate.fleet.parameter_server.ir.public import get_sparse_tablenames
def get_sparse_attrs():
opt_init_map = {}
opt_init_map["gaussian_random"] = ["seed", "mean", "std"]
opt_init_map["fill_constant"] = ["value"]
opt_init_map["uniform_random"] = ["seed", "min", "max"]
opt_init_map[
"truncated_gaussian_random"] = ["seed", "mean", "std"]
dist_varnames = get_sparse_tablenames(self.origin_main_program,
True)
sparse_varnames = get_sparse_tablenames(
self.origin_main_program, False)
if len(dist_varnames) != 0:
raise ValueError(
"GeoStrategy can not support large scale embeding now, please use fluid.layers.embedding"
)
init_attrs = []
for value_name in sparse_varnames:
value_var = self.origin_main_program.global_block().vars[
value_name]
value_attr = [
value_name,
",".join([str(dim) for dim in value_var.shape])
]
for op in self.origin_startup_program.global_block().ops:
if op.type in opt_init_map.keys(
) and value_name == op.output("Out")[0]:
init_attr = [op.type]
for attr in opt_init_map[op.type]:
init_attr.append(str(op.attr(attr)))
value_attr.append("&".join(init_attr))
init_attrs.append(":".join(value_attr))
break
return "#".join(init_attrs)
kwargs = {}
kwargs["trainers"] = self.role_maker._worker_num()
kwargs["sparse_attrs"] = get_sparse_attrs()
return kwargs
from paddle.fluid.incubate.fleet.parameter_server.ir.public import _get_lr_ops, _has_global_step
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import \
SyncStrategy, GeoStrategy
trainer_config = self.async_strategy.get_trainer_runtime_config()
print(trainer_config)
dist_strategy = self.context["valid_strategy"]
launch_barrier = dist_strategy.a_sync_configs["launch_barrier"]
if launch_barrier:
# for trainer wait server ready
wait_server_ready(self.role_maker._get_pserver_endpoints())
# for ps-heter mode, wait heter worker ready
if self.role_maker._is_heter_parameter_server_mode and self.role_maker._is_worker(
):
wait_server_ready(self.role_maker._get_heter_worker_endpoints())
lrs = _has_global_step(_get_lr_ops(self.origin_main_program))
if lrs:
kwargs = {"need_global_step": "1"}
else:
kwargs = {"need_global_step": "0"}
if isinstance(self.async_strategy, GeoStrategy):
geo_kwargs = geo_strategy_envs()
kwargs.update(geo_kwargs)
if isinstance(self.async_strategy, SyncStrategy):
sync_kwargs = sync_strategy_envs()
kwargs.update(sync_kwargs)
kwargs = kwargs if kwargs else None
send_ctx = self.compiled_strategy.get_communicator_send_context()
if self.compiled_strategy.is_geo_mode():
recv_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=4)
else:
recv_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=1)
from paddle.fluid.communicator import Communicator
self._communicator = Communicator(
trainer_config.mode, kwargs,
trainer_config.get_communicator_flags())
self._communicator.init_with_ctx(send_ctx, recv_ctx)
if not self._communicator.is_running():
self._communicator.start()
else:
warnings.warn("communicator has been initialized, skip")
def _get_executor(self):
executor = fluid.Executor(fluid.CPUPlace())
if self.role_maker._is_heter_parameter_server_mode:
heter_worker_device_guard = self.context[
"valid_strategy"].a_sync_configs[
"heter_worker_device_guard"].upper()
if heter_worker_device_guard not in ["GPU", "XPU", "CPU"]:
raise ValueError("Heter Worker Not Support Device {}".format(
heter_worker_device_guard))
if self.role_maker._is_heter_worker():
if heter_worker_device_guard == "GPU":
executor = Executor(
fluid.CUDAPlace(
int(os.getenv("FLAGS_selected_gpus", "0"))))
elif heter_worker_device_guard == "XPU":
executor = Executor(
fluid.XPUPlace(
int(os.getenv("FLAGS_selected_xpus", "0"))))
return executor
def _init_server(self, *args, **kwargs):
if len(args) > 1:
raise ValueError("init server can only accept 1 args: `dirname`")
elif len(args) == 1:
model_dirname = args[0]
else:
model_dirname = None
executor = self._get_executor()
if self.role_maker._is_heter_worker() and self.context[
"valid_strategy"].a_sync_configs["launch_barrier"]:
# for heter trainer wait server ready
wait_server_ready(self.role_maker._get_pserver_endpoints())
executor.run(fluid.default_startup_program())
if self.role_maker._is_heter_worker():
self._init_worker()
return
sparse_varnames = self.compiled_strategy.get_sparse_varname_on_ps(False)
sparse_related_optimize_varnames = []
for var_name in sparse_varnames:
sparse_related_optimize_varnames += self.compiled_strategy.get_optimize_varname_on_ps(
var_name)
sparse_related_optimize_varnames = list(
set(sparse_related_optimize_varnames))
distribtued_varnames = self.compiled_strategy.get_sparse_varname_on_ps(
True)
distributed_related_optimize_varnames = []
for var_name in distribtued_varnames:
distributed_related_optimize_varnames += self.compiled_strategy.get_optimize_varname_on_ps(
var_name)
distributed_related_optimize_varnames = list(
set(distributed_related_optimize_varnames))
remaining_vars = list(
filter(
ParameterServerRuntime.__exclude_vars(
sparse_varnames + distribtued_varnames +
sparse_related_optimize_varnames +
distributed_related_optimize_varnames),
fluid.default_main_program().list_vars()))
if not model_dirname:
return
if not os.path.isdir(model_dirname):
raise ValueError("There is no directory named '%s'", model_dirname)
# load dense
fluid.io.load_vars(
executor,
main_program=fluid.default_main_program(),
dirname=model_dirname,
vars=remaining_vars)
# load sparse
self._load_sparse_params(
executor=executor,
dirname=model_dirname,
varnames=sparse_varnames + sparse_related_optimize_varnames)
# load large scale
self._load_distributed_params(
dirname=model_dirname,
varnames=distribtued_varnames +
distributed_related_optimize_varnames)
def _run_server(self):
executor = self._get_executor()
executor.run(fluid.default_main_program())
def _stop_worker(self):
self._communicator.stop()
executor = self._get_executor()
executor.close()
def _get_optimizer_status(self, op, param_name):
supported_opts = [
"sgd", "adam", "adagrad", "adamax", "momentum", "lars_momentum",
"rmsprop", "decayed_adagrad", "ftrl"
]
reshaped_val_map = {}
reshaped_val_map["sgd"] = []
reshaped_val_map["adam"] = ["moment1_0", "moment2_0"]
reshaped_val_map["adagrad"] = ["moment_0"]
reshaped_val_map["adamax"] = ["moment_0", "inf_norm_0"]
reshaped_val_map["momentum"] = ["velocity_0"]
reshaped_val_map["lars_momentum"] = ["velocity_0"]
reshaped_val_map[
"rmsprop"] = ["momentum_0", "mean_square_0", "mean_grad_0"]
reshaped_val_map["decayed_adagrad"] = ["moment_0"]
reshaped_val_map["ftrl"] = ["squared_0", "linear_0"]
orishaped_val_map = {}
orishaped_val_map["adam"] = ["beta1_pow_acc_0", "beta2_pow_acc_0"]
orishaped_val_map["adamax"] = ["beta1_pow_acc_0"]
if op not in supported_opts:
raise ValueError(
"fleet can not support optimizer: {}, only this can be supported: {}".
format(op, supported_opts))
reshaped_names = [
param_name + "_" + val for val in reshaped_val_map[op]
]
if op not in orishaped_val_map:
origin_names = []
else:
origin_names = [
param_name + "_" + val for val in orishaped_val_map[op]
]
return reshaped_names, origin_names
def _get_optimizer_op(self, param_name):
from paddle.fluid.incubate.fleet.parameter_server.ir.public import _get_optimize_ops
opts = _get_optimize_ops(self.origin_main_program)
for op in opts:
if "Param" in op.input_names and \
"LearningRate" in op.input_names and op.input("Param")[0] == param_name:
return op
def _save_dense_params(self, executor, dirname, context, main_program):
self._communicator.recv()
prog = Program()
block = prog.global_block()
local_vars = []
for name, var_ctx in context.items():
if len(var_ctx.origin_varnames()) != 1:
raise ValueError("Dense can not support split now.")
varname = var_ctx.origin_varnames()[0]
local_vars.append(varname)
optimizer = self._get_optimizer_op(varname)
reshaped_varnames, origin_varnames = self._get_optimizer_status(
optimizer.type, varname)
for var_name in [varname] + reshaped_varnames + origin_varnames:
var = self.origin_main_program.global_block().vars[var_name]
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes":
[",".join([str(i) for i in var.shape])],
"slice_varnames": [var.name],
"remote_varnames": [var.name],
"is_sparse": False,
"endpoints": var_ctx.split_endpoints(),
"file_path": os.path.join(dirname, var.name)
})
executor.run(prog)
return local_vars
def _save_sparse_params(self, executor, dirname, context, main_program):
prog = Program()
block = prog.global_block()
local_vars = []
for name, var_ctx in context.items():
if len(var_ctx.origin_varnames()) != 1:
raise ValueError("Dense can not support split now.")
varname = var_ctx.origin_varnames()[0]
local_vars.append(varname)
optimizer = self._get_optimizer_op(varname)
reshaped_varnames, origin_varnames = self._get_optimizer_status(
optimizer.type, varname)
var = self.origin_main_program.global_block().vars[varname]
slice_shapes = []
dims1 = ",".join([str(i) for i in var.shape[1:]])
for section in var_ctx.sections():
slice_shapes.append(str(section) + dims1)
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": slice_shapes,
"slice_varnames": var_ctx.split_varnames(),
"remote_varnames": var_ctx.split_varnames(),
"is_sparse": True,
"endpoints": var_ctx.split_endpoints(),
"pserver_num":
len(self.role_maker._get_pserver_endpoints()),
"file_path": os.path.join(dirname, var.name)
})
for reshaped_varname in reshaped_varnames:
var = self.origin_main_program.global_block().vars[
reshaped_varname]
slice_varnames = []
remote_varnames = []
for i in range(len(var_ctx.split_varnames())):
slice_varnames.append("{}.block{}".format(reshaped_varname,
i))
remote_varnames.append(reshaped_varname)
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes": slice_shapes,
"slice_varnames": slice_varnames,
"remote_varnames": remote_varnames,
"is_sparse": True,
"endpoints": var_ctx.split_endpoints(),
"pserver_num":
len(self.role_maker._get_pserver_endpoints()),
"file_path": os.path.join(dirname, var.name)
})
for origin_varname in origin_varnames:
var = self.origin_main_program.global_block().vars[
origin_varname]
block.append_op(
type='recv_save',
attrs={
"trainer_id": self.role_maker._worker_index(),
"shape": var.shape,
"slice_shapes":
[",".join([str(i) for i in var.shape])],
"slice_varnames": [origin_varname],
"remote_varnames": [origin_varname],
"is_sparse": False,
"endpoints": var_ctx.split_endpoints()[:1],
"file_path": os.path.join(dirname, var.name)
})
executor.run(prog)
return context.keys()
def _save_distributed_params(self, executor, dirname, context, mode):
prog = Program()
block = prog.global_block()
for name, var_ctx in context.items():
block.append_op(
type='checkpoint_notify',
attrs={
"varname": name,
"mode": mode,
"slice_varnames": var_ctx.split_varnames(),
"remote_varnames": var_ctx.split_varnames(),
"endpoints": var_ctx.split_endpoints(),
"dirname": dirname
})
executor.run(prog)
return context.keys()
def _save_distributed_persistables(self, executor, dirname, main_program,
mode):
dense_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=1, use_origin_program=True)
sparse_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=2, use_origin_program=True)
distributed_ctx = self.compiled_strategy.get_communicator_recv_context(
recv_type=3, use_origin_program=True)
recv_dense_varnames = self._save_dense_params(executor, dirname,
dense_ctx, main_program)
recv_sparse_varnames = self._save_sparse_params(
executor, dirname, sparse_ctx, main_program)
recv_distributed_varnames = self._save_distributed_params(
executor, dirname, distributed_ctx, mode)
saved_varnames = recv_dense_varnames + list(
recv_sparse_varnames) + list(recv_distributed_varnames)
remaining_vars = list(
filter(
ParameterServerRuntime.__exclude_vars(saved_varnames),
main_program.list_vars()))
fluid.io.save_vars(
executor,
main_program=main_program,
dirname=dirname,
vars=remaining_vars)
def _ps_inference_save_persistables(self,
executor,
dirname,
main_program=None,
mode=0,
**kwargs):
"""
This function filters out all variables with `persistable==True` from the
give `main_program` and then saves these variables to the folder `dirname`
or file `filename`.
The `dirname` is used to specify the folder where persistable variables
are going to be saved. If you would like to save variables in separate
files, set `filename` None; if you would like to save all variables in a
single file, use `filename` to specify the file name.
"""
if isinstance(executor, ParallelExecutor):
raise TypeError(
"in fleet.save_persistables() function, executor must be as Executor type, ParallelExecutor is not allowed"
)
if not isinstance(executor, Executor):
raise TypeError(
"in fleet.save_persistables() function, executor must be as Executor type"
)
if main_program is None:
main_program = self.compiled_strategy.get_origin_ps_main_program()
if isinstance(main_program, CompiledProgram):
raise TypeError(
"in fleet.save_persistables() function, main_program must be as Program type, CompiledProgram is not allowed"
)
self._save_distributed_persistables(executor, dirname, main_program,
mode)
def _ps_inference_save_inference_model(self,
executor,
dirname,
feeded_var_names,
target_vars,
main_program=None,
export_for_deployment=True):
"""
Prune the given `main_program` to build a new program especially for inference,
and then save it and all related parameters to given `dirname` by the `executor`.
"""
if isinstance(executor, ParallelExecutor):
raise TypeError(
"in fleet.save_inference_model() function, executor must be as Executor type, ParallelExecutor is not allowed"
)
if not isinstance(executor, Executor):
raise TypeError(
"in fleet.save_inference_model() function, executor must be as Executor type"
)
if main_program is not None:
if isinstance(main_program, CompiledProgram):
raise TypeError(
"in fleet.save_inference_model() function, main_program must be as Program type, CompiledProgram is not allowed"
)
fluid.io.save_inference_model(dirname, feeded_var_names,
target_vars, executor, main_program,
None, None, export_for_deployment)
else:
fluid.io.save_inference_model(dirname, feeded_var_names,
target_vars, executor,
self.origin_main_program, None, None,
export_for_deployment, True)
model_basename = "__model__"
model_filename = os.path.join(dirname, model_basename)
with open(model_filename, "rb") as f:
program_desc_str = f.read()
program = Program.parse_from_string(program_desc_str)
program._copy_dist_param_info_from(fluid.default_main_program())
self._ps_inference_save_persistables(
executor, dirname, program, mode=0)
def _save_inference_model(self, *args, **kwargs):
self._ps_inference_save_inference_model(*args, **kwargs)
def _save_persistables(self, *args, **kwargs):
self._ps_inference_save_persistables(*args, **kwargs)
|
Sophie Anderson made her debut for the Scottish Women’s Rugby Team against Spain in Madrid.
A north-east woman has swapped delivering papers for delivering tries.
Sophie Anderson is part of the Scotland Women’s Rugby team, and played in Madrid at the weekend.
It was the 20-year-old’s first cap for the squad, despite only trying the game for the first time two years ago.
REPORT | Four-try Scots edged by Spain Women in Madrid thriller (29-24).
The former Mintlaw papergirl – who even won an award for her dedication to delivering P&Js to more than 40 people for four years – described it as an “unreal experience”.
Her fans at home – including her former customers – are being kept up-to-date with her achievements, as her brother has now taken over the paper round.
Miss Anderson said: “For four years I was a papergirl in Mintlaw and got on so well with everyone.
“I had over 40 papers for the area and even now everyone chats to me.
As well as being dedicated to the deliveries, the former Mintlaw Academy pupil played football with the successful Buchan Girls and Ladies FC.
It was thanks to encouragement from a teammate that she discovered her love of rugby.
“One teammate kept saying I would be able to play rugby as I had the speed and strength and she believed I would play for Scotland,” she said.
“Eventually about two years ago I messaged her, tried it out with the Garioch Ladies team and I fell in love with it.
“We had been playing in Edinburgh and someone came up to me after I had a really good game and scored a try – they asked for my contact details.
“I was asked to start training with the West of Scotland women which meant a lot of travel to start off.
Sophie Anderson in action for Scotland against Spain in Madrid.
While studying midwifery at the University of the West of Scotland, she joined the Hillhead Jordanhill RFC Club where her talent was noticed and she was asked to become part of the national training program.
And despite breaking her foot at the start of April, Miss Anderson – who plays in the second row, using her speed and strength to gain ground – was delighted to make the trip to Spain.
Miss Anderson said: “It was an unreal experience and I loved getting to play in Madrid.
“It’s such an exciting time for the women’s team with a lot of firsts.
The match was a tight contest, with Spain narrowly winning the game 29-24.
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGenomicfeatures(RPackage):
"""Conveniently import and query gene models.
A set of tools and methods for making and manipulating transcript
centric annotations. With these tools the user can easily download the
genomic locations of the transcripts, exons and cds of a given organism,
from either the UCSC Genome Browser or a BioMart database (more sources
will be supported in the future). This information is then stored in a
local database that keeps track of the relationship between transcripts,
exons, cds and genes. Flexible methods are provided for extracting the
desired features in a convenient format."""
homepage = "https://bioconductor.org/packages/GenomicFeatures"
git = "https://git.bioconductor.org/packages/GenomicFeatures.git"
version('1.36.4', commit='28082ec465c91ccaec6881ff348b380edac1b555')
version('1.34.8', commit='c798b3bb111f4de30632303540074ec1875c1387')
version('1.32.3', commit='80807d88048858846de3750cecb9431a0e5e69e1')
version('1.30.3', commit='496bbf81beebd7c934b8d3dcea001e3e4a7d7dee')
version('1.28.5', commit='ba92381ae93cb1392dad5e6acfab8f6c1d744834')
depends_on('r-biocgenerics@0.1.0:', type=('build', 'run'))
depends_on('r-s4vectors@0.9.47:', type=('build', 'run'))
depends_on('r-iranges@2.9.19:', type=('build', 'run'))
depends_on('r-genomeinfodb@1.11.4:', type=('build', 'run'))
depends_on('r-genomicranges@1.27.6:', type=('build', 'run'))
depends_on('r-annotationdbi@1.33.15:', type=('build', 'run'))
depends_on('r-dbi', type=('build', 'run'))
depends_on('r-rsqlite@2.0:', type=('build', 'run'))
depends_on('r-rcurl', type=('build', 'run'))
depends_on('r-xvector', type=('build', 'run'))
depends_on('r-biostrings@2.23.3:', type=('build', 'run'))
depends_on('r-rtracklayer@1.29.24:', type=('build', 'run'))
depends_on('r-biomart@2.17.1:', type=('build', 'run'))
depends_on('r-biobase@2.15.1:', type=('build', 'run'))
depends_on('r-iranges@2.11.16:', when='@1.30.3:', type=('build', 'run'))
depends_on('r-genomeinfodb@1.13.1:', when='@1.30.3:', type=('build', 'run'))
depends_on('r-genomicranges@1.29.14:', when='@1.30.3:', type=('build', 'run'))
depends_on('r-rmysql', when='@1.30.3', type=('build', 'run'))
depends_on('r-s4vectors@0.17.29:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-iranges@2.13.23:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-genomeinfodb@1.15.4:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-genomicranges@1.31.17:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-annotationdbi@1.41.4:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-xvector@0.19.7:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-biostrings@2.47.6:', when='@1.32.3:', type=('build', 'run'))
depends_on('r-rtracklayer@1.39.7:', when='@1.32.3:', type=('build', 'run'))
|
Ignite finished up two weeks ago. Now that the jet lag has fully worn off, and many of us have returned to more ‘comfortable’ climates, we thought we’d take a look at the week’s antics through the eyes of our well-traveled Quadro-bulls.
Never knowingly underdressed, our bulls took sartorial inspiration from across the Sunshine State for their trip. Kitted up like ‘gators, superheroes, beach-lovers, and a certain famous mouse, the bulls were ready to hit the road.
Give us a twirl ‘Gator!
True fact: Did you know, South Florida is the only place in the world where both Alligators and Crocodiles reside (in the wild).
Now while Ignite is many things: busy, exciting, HUGE, and certainly a tad overwhelming, not many people come back from the event having suffered quite as much as our poor bulls this year. Where our previous ‘bull-based’ blog saw our little friends living like high rollers in Vegas, we can’t say they fared as well in Orlando.
First off, our bulls didn’t have the most glamorous entrance into Florida. They turned up in herds, stuffed in plastic wrappers, having been forced to make the journey up close and personal in a cardboard box with 1000+ of their fellow travelers. And you thought your economy flight out there was bad?
Quitely concerned that you don’t know the difference? Here’s a handy guide, which will also tell you what a ‘steer’ and a ‘heifer’ is – you’re welcome.
Microsoft Ignite attracts 26,000 highly skilled and intelligent technical experts from across the world, in various industries. I feel like that point needs (very) literal underlining. I’m also willing to bet that NONE of said attendees have the job title, or a sideline career as a ‘Zoologist’, ‘Vet’, ‘Animal Trainer’, however, some of you could consider stepping AWAY from the screen and getting out into nature, or at the very least tuning into some National Geographic once in a while. It might save you some embarrassment when you’re next on the farm.
Once they were set free from their plastic confines, a particularly unlucky set of 40 bulls were chosen for the ‘Tombulla’.
These poor creatures were loaded into our perspex tombola, and essentially rotated non-stop for four days. Visitors to the stand could pick a numbered bull out of the box, and if it corresponded with the code on the lockbox then they got to pick a prize from the tech box.
Happy hour is a time to take the intensity down a notch. It’s a time to have a relaxed chat, drink in hand, and watch the show wind down for another day.
Nope. Not just yet – we had one last game in store for our poor bulls. A quick game of BASKETBULL, with Tony Sterling our SVP of Customer Success taking the brunt of the team’s poor throwing skills.
How do you kick back and celebrate after a week of being rolled around in a plastic prison? A celebration at a theme park, of course!
Unlike the human components of the Quadrotech team (who were very excited), our bulls were less than impressed with Microsoft’s Thursday night celebration (shutting down both Universal parks for Ignite attendees). It was the final straw in their high-octane, thrill-ride of a trip.
Feeling queasy, bruised, our bulls high-tailed it out of Florida at the first opportunity. Until next time Ignite.
Bulls-eye view: Want to see what went down at Inspire?
Microsoft Ignite 2018: That’s A Wrap!
Our final daily update from MS Ignite 2018. Take a look at our video infographic review.
|
#!/usr/bin/env python3
#
# Copyright (c) 2014, Scott Silver Labs, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import os
from rstem import led_matrix, button
import random
import time
# notify of progress
print("P50")
sys.stdout.flush()
# initialize led matrix
#led_matrix.init_grid(2,2)
led_matrix.init_matrices([(0,8),(8,8),(8,0),(0,0)])
# set up buttons
A = 4
B = 17
UP = 25
DOWN = 24
LEFT = 23
RIGHT = 18
START = 27
SELECT = 22
# setup exit and restart button
exit_button = button.Button(START)
restart_button = button.Button(A)
# notify of progress
print("P60")
sys.stdout.flush()
# initialize variables
num_rows, num_cols, curr_gen, next_gen = (None, None, None, None)
def get_num_neighbors(curr_gen, x, y):
"""Returns the number of (alive) neighbors of given pixel"""
count = 0
for j in range(y-1, y+2):
for i in range(x-1, x+2):
if not(i == x and j == y): # don't count itself
if i >= 0 and i < led_matrix.width() and j >= 0 and j < led_matrix.height():
if curr_gen[j][i] == 0xF:
count += 1
return count
def next_generation():
"""Creates next generation using Conway's Game of Life rules:
http://en.wikipedia.org/wiki/Conway's_Game_of_Life
"""
global next_gen
global curr_gen
for y in range(0,num_rows):
for x in range(0,num_cols):
num_neighbors = get_num_neighbors(curr_gen, x, y)
if curr_gen[y][x] == 0xF and num_neighbors < 2:
next_gen[y][x] = 0 # pixel died off, not enough neighbors
elif curr_gen[y][x] == 0xF and num_neighbors > 3:
next_gen[y][x] = 0 # pixel died off, too many neighbors
elif curr_gen[y][x] == 0 and num_neighbors == 3:
next_gen[y][x] = 0xF # birth of a new pixel
else:
next_gen[y][x] = curr_gen[y][x]
curr_gen, next_gen = next_gen, curr_gen # swap lists
def random_grid(width, height):
"""Creates a grid of random dead and alive pixels."""
grid = []
for y in range(height):
row = []
for x in range(width):
random_num = random.randint(0,3)
if random_num == 0: # make alive pixels less common
row.append(0xF) # add an alive pixel
else:
row.append(0x0) # add a dead pixel
grid.append(row)
return grid
def draw_grid():
"""Draws the current generation to led_matrix."""
for y in range(num_rows):
for x in range(num_cols):
led_matrix.point(x, y, curr_gen[y][x])
# whole game loop
while True:
# variables
num_rows = led_matrix.height()
num_cols = led_matrix.width()
# notify of progress
print("P80")
sys.stdout.flush()
curr_gen = random_grid(num_cols, num_rows)
# notify of progress
print("P90")
sys.stdout.flush()
next_gen = [[0 for i in range(num_cols)] for j in range(num_rows)]
# TODO allow sprite input instead of random grid?
# notify menu we are ready for the led matrix
print("READY")
sys.stdout.flush()
# single game loop
while True:
if exit_button.is_pressed():
# clean up stuff and exit the program
button.cleanup()
led_matrix.cleanup()
sys.exit(0)
elif restart_button.is_pressed():
break # break out of this inner loop (lets us restart generations)
else:
led_matrix.erase() # clear the display
draw_grid() # draw the current generation
led_matrix.show() # show on display
next_generation() # update generation to next generation
|
This volume was published to be used as the textbook for the Short Course on Fe-Ti Oxides: Their Petrologic and Magnetic Significance, held May 24-27, 1991, organized by B.R. Frost, D.H. Lindsley, and SK Banerjee and jointly sponsored by the Mineralogical Society of America and the American Geophysical Union.
It has been fourteen and a half years since the last MSA Short Course on Oxide Minerals and the appearance of Volume 3 of Reviews in Mineralogy. Much progress has been made in the interim. This is particularly evident in the coverage of the thermodynamic properties of oxide minerals: nothing in Volume 3, while in contrast, Volume 25 has three chapters (6, 7, and 8) presenting various aspects of the thermodynamics of oxide minerals; and other chapters (9, 11, 12) build extensively on thermodynamic models. The coverage of magnetic properties has also been considerably expanded (Chapters 4, 8, and 14). Finally, the interaction of oxides and silicates is emphasized in Chapters 9, 11, 12, 13, and 14. One of the prime benefits of Reviews in Mineralogy has been that any scientist can afford to have it at his or her fingertips. Because Volume 3 is out of print and will not be readily available to newcomers to our science, as much as possible we have tried to make Volume 25 a replacement for, rather than a supplement to, the earlier volume. Chapters on crystal chemistry, phase equilibria, and oxide minerals in both igneous and metamorphic rocks have been rewritten or extensively revised. The well received photographs of oxide textures in Volume 3 have been collected and expanded into a "Mini-Atlas" In Volume 25. Topics that receive less attention than in the earlier volume are oxides in lunar rocks and meteorites, and the manganese minerals. We hope that the new volume will tum out to be as useful as the previous one was.
|
from ..utils import asciireplace, limit_text
from ..exceptions.messages import RowObjectMsg as msg
from functools import lru_cache
from tabulate import tabulate
from string import ascii_lowercase, digits
from types import FunctionType
import keyword
accepted_chars = (ascii_lowercase + "_" + digits)
class Row(dict):
"""This Class represents a row in a spreadsheet
This object is a highly specialized dict, meant to allow
extremely quick and easy access/manipulation to row data
at an acceptable memory cost.
"""
__slots__ = ["__delwhitelist__", "__output__", "__sawhitelist__"]
def __init__(self, data, columns_map, *args, **kwargs):
# These are used to
super(Row, self).__setattr__("__delwhitelist__",
RowDefaults.__delwhitelist__)
super(Row, self).__setattr__("__sawhitelist__",
RowDefaults.__sawhitelist__)
super(Row, self).__init__(data)
self[RowDefaults.__psvcolumns__] = columns_map
self._set_outputrow(True)
self.construct(*args, **kwargs)
def __call__(self, column, setvalue=None, delete=False):
"""Alais for .getcolumn() family of methods"""
if delete:
self.delcolumn(column, False)
elif setvalue is None:
return self.getcolumn(column, False)
else:
self.setcolumn(column, setvalue, False)
def __eq__(self, other):
#Returns True if content is the same as the
if isinstance(other, self.__class__):
return self.__hashvalue__() == other.__hashvalue__()
return False
def __hashvalue__(self):
"""raw data that can be hashed if all contents are hashable
or can be used for comparison
"""
return (tuple((column, self[column])
for column in filter(lambda x: x != "__psvcolumnstracker__", sorted(self.keys()))))
def __repr__(self):
return "<{rowname}:{columnamount} object at {hexloc}>".format(
rowname=self.__class__.__name__,
columnamount=len(self.keys())-1,
hexloc=hex(id(self)).upper().replace("X", "x")
)
def __str__(self):
return "<{rowname}:{columnamount} object at {hexloc}>".format(
rowname=self.__class__.__name__,
columnamount=len(self.keys())-1,
hexloc=hex(id(self)).upper().replace("X", "x")
)
def __pos__(self):
self._set_outputrow(True)
return self
def __neg__(self):
self._set_outputrow(False)
return self
def __invert__(self):
self._set_outputrow(not (self.outputrow))
return self
def __getattribute__(self, attr):
if not self["__psvcolumnstracker__"].get(attr, False):
return super(dict, self).__getattribute__(attr)
else:
return self[self["__psvcolumnstracker__"][attr]]
def __getattr__(self, attr):
"""Handles all exception handeling when __getattribute__ fails"""
s = cleanup_name(attr)
if s in self["__psvcolumnstracker__"].keys():
raise AttributeError((
"{}{}"
.format(
'\'{}\' has no attribute \'{}\''.format(
type(self), attr),
". However, '{s}' is an existing condensed ".format(s=s) +
"column name. Only the condensed version is supported."
.format(s=s)
)))
else:
raise AttributeError(msg.attribute_missing.format(
type(self), attr))
def __setattr__(self, attr, v):
"""Allows setting of rows and attributes by using =
statement
Note: Setting class Attributes is not optimized, this dict has specialized around
dynamic attribute (from row data) access. Regular Attribute Setting may be much slower.
"""
s = cleanup_name(attr)
try:
self[self["__psvcolumnstracker__"][attr]] = v
except KeyError:
if attr in self.__sawhitelist__:
super(Row, self).__setattr__(attr, v)
else:
keys = self["__psvcolumnstracker__"].keys()
if s in keys:
raise AttributeError((
"{}{}"
.format(
'\'{}\' has no attribute \'{}\''.format(
type(self), attr),
". However, '{s}' is an existing condensed ".format(s=s) +
"column name. Only the condensed version is supported."
.format(s=s)
)))
else:
# A somewhat hacky implementation of Dict's restriction of editing it's
# Attributes.
if attr in dir(self):
raise AttributeError(
msg.attribute_readonly.format(classname=self.__class__, attr=attr))
else:
raise AttributeError(msg.attribute_missing.format(
type(self), attr))
def __delattr__(self, attr):
"""Allows deletion of rows and attributes (Makes a row an empty string) by using
del statement"""
s = cleanup_name(attr)
try:
self[self["__psvcolumnstracker__"][attr]] = ""
except KeyError:
if attr in self.__delwhitelist__:
super(Row, self).__delattr__(attr)
else:
keys = self["__psvcolumnstracker__"].keys()
if s in keys:
raise AttributeError((
"{}{}"
.format(
'\'{}\' has no attribute \'{}\''.format(
type(self), attr),
". However, '{s}' is an existing condensed ".format(s=s) +
"column name. Only the condensed version is supported."
.format(s=s)
)))
else:
if attr in dir(self):
raise AttributeError(
msg.attribute_readonly.format(classname=self.__class__, attr=attr))
else:
raise AttributeError(msg.attribute_missing.format(
type(self), attr))
def add_valid_attribute(self, attr, deletable=False):
"""Used by classes that inherit to add attributes to the whitelists
Note: Row should only be inherited if no other option is available.
These attributes being accessed will be notably slower due to the implementation.
Memory Usage may also be much higher, as the whitelists will no longer be a
static variable.
"""
if self.__class__ is Row:
raise TypeError(msg.inherited_rows)
super(Row, self).__setattr__(
"__sawhitelist__", set(self.__sawhitelist__ | set((attr,))))
if deletable:
super(Row, self).__setattr__(
"__delwhitelist__", set(self.__delwhitelist__ | set((attr,))))
def construct(self, *args, **kwargs):
"""This method can be used by inherited objects of :class:`Row` as if it was __init__
Note: Row should only be inherited if no other option is available. It cause
memory bloat issues and can be notably slower.
"""
pass
@property
def outputrow(self):
"""Returns a boolean of the current output flag for this row"""
return self.__output__
@outputrow.setter
def outputrow(self, v):
if not isinstance(v, bool):
raise TypeError(msg.outputrowmsg.format(bool, type(v)))
self.__output__ = v
def _set_outputrow(self, v):
"""Fast Internal way to set output flags
Doesn't check for bad input, meant for internal use only
Much faster than the setter
"""
super(Row, self).__setattr__("__output__", v)
def getcolumn(self, column, accept_small_names=True):
"""Get a cell by the orginal column name
:param column: The column name. Can only be long form if accept_small_names == False
:type column: :class:`str`
:returns: String of the data, or an int/float if a number/decimal.
:rtype: :class:`str`, :class:`int`, or :class:`float`
"""
if column in self.keys():
return (self[column])
elif accept_small_names:
if self["__psvcolumnstracker__"].get(column):
return getattr(self, column)
if not accept_small_names:
raise ValueError("'{}'".format(column))
else:
raise ValueError("'{}'. Make sure the shorterned columns name have no collisions".format(column))
def setcolumn(self, column, value, accept_small_names=True):
"""Set a cell by the orginal column name
:param column: The column name. Can be both long and short form.
:param value: The data to be set to the specified column
:type column: :class:`str`
"""
if column in self.keys():
self[column] = value
return
elif accept_small_names:
if self["__psvcolumnstracker__"].get(column):
self.__setattr__(column, value)
return
if not accept_small_names:
raise ValueError("'{}'".format(column))
else:
raise ValueError("'{}'. Make sure the shorterned columns name have no collisions".format(column))
def delcolumn(self, column, accept_small_names=True):
"""Delete a cell by the orginal column name
:param column: The column name. Can be both long and short form.
:type column: :class:`str`
"""
if column in self.keys():
self[column] = ""
return
elif accept_small_names:
if self["__psvcolumnstracker__"].get(column):
self.__delattr__(column)
return
if not accept_small_names:
raise ValueError("'{}'".format(column))
else:
raise ValueError("'{}'. Make sure the shorterned columns name have no collisions".format(column))
def _addcolumns(self, columnname, columndata=""):
"""Adds a column for this row only doesn't add to column tracker
Warning: Internal Method, API/Behavior may change without notice"""
self[columnname] = columndata
def _addcolumns_func(self, columnname, columnfunc):
self[columnname] = columnfunc(self)
def _delcolumns(self, columnname, columndata=""):
"""Adds a column for this row only
doesn't add to column tracker
Warning: Internal Method, API/Behavior may change without notice"""
del self[columnname]
def _rename_columns(self, old_columnname, new_columnname):
self[new_columnname] = self[old_columnname]
del self[old_columnname]
def longcolumn(self, columns=None):
"""
:params columns: A collection of columns, if supplied the method
will return only the specified columns.
:type columns: :class:`tuple`, :class:`list`
:returns: Generates a :class:`dict` that uses orginal names of
the column.
:rtype: :class:`dict`
"""
newdict = {}
for k in columns or self.keys():
if k == "__psvcolumnstracker__":
continue
newdict.update({
k: self[k]})
return newdict
def update_values(self, *arg, **kwargs):
"""Safe way to use a .update() like method on rows, checks header columns
"""
keys = set(self.keys())
if arg:
for x in arg:
xkeys = set(x.keys())
if xkeys.issubset(keys):
self.update(x)
else:
raise ValueError(
"'{}' contains columns not in this row currently"
.format(x)
)
if kwargs:
kwkeys = set(kwargs.keys())
if kwkeys.issubset(keys):
self.update(kwargs)
else:
raise ValueError(
"'{}' contains columns not in this row currently"
.format(kwargs)
)
def tabulate(self, format="grid", only_ascii=True, columns=None, text_limit=None):
"""Integrates tabulate library with psv
:param format: A valid format for :class:`tabulate` library.
:only_ascii: If :data:`True`, only return ascii characters.
:param columns: Collection of column names that will be included in the
tabulating.
:param text_limit: The number of characters to include per cell.
:type format: :class:`str`
"""
data = self.longcolumn()
sortedcolumns = sorted(data) if not columns else columns
result = tabulate(
[sortedcolumns] +
[[limit_text(data[c], text_limit) for c in sortedcolumns]],
headers="firstrow",
tablefmt=format)
if only_ascii:
return asciireplace(result)
else:
return result
class RowDefaults(object):
"""Contains Static Variables the Row uses
to prevent rampant memory waste.
"""
__delwhitelist__ = set()
__sawhitelist__ = {"__output__", "outputrow"}
# This is inlined in most of the library due to speed constraints.
__psvcolumns__ = '__psvcolumnstracker__'
# For backwards compability, will be removed in the future
# Refering Row as BaseRow is considered Depreciated
BaseRow = Row
#This block was in utils,
# but it relied on a circular reference that re-imported
# a variable everytime this core function was called.
#While less clean, this produces a decent speedup.
banned_columns = {RowDefaults.__psvcolumns__,}
non_accepted_key_names = set(tuple(dir(
Row)) + ("row_obj", RowDefaults.__psvcolumns__,
RowDefaults.__psvcolumns__) + tuple(keyword.kwlist))
bad_first_char = set(digits)
@lru_cache(1024)
def cleanup_name(s):
result = "".join(filter(lambda x: x in accepted_chars, s.lower()))
if not result:
raise ValueError(msg.non_valid.format(s))
if result in non_accepted_key_names or result[0] in bad_first_char:
result = "psv_" + result
return result
|
The Boston College Graduate Employees Union (BCGEU-UAW) held a rally to BC to respect its vote to establish collective bargaining rights with the University and drop its appeal to the National Labor Relations Board to invalidate the election. As of Wednesday morning, 262 graduate students had signed a petition, which the union plans to deliver to BC at some point in the future.
Tensions ran high Tuesday morning on O’Neill Plaza, as about 40 union members implored the University to respect their vote.
“You don’t have the right to protest here, but you have the right to do it outside the gates just like we’ve done with other unions,” said Richard DeCapua, the associate dean for student conduct, at the rally.
He warned that while BC would not stop the union from rallying Tuesday, there could potentially be consequences because of the gathering, which was unsanctioned because the union is not affiliated with BC, he said.
The union argues that BC should stop its appeal to the NLRB and assist them in the process of creating collective bargaining rights at BC. Gabriele pointed to the fact that doing so is ethically and morally justifiable, since the University would exemplify its Jesuit mission and “men and women for others” ideology.
Members of the union believe that establishing these rights for graduate students provides them with better health benefits, an effective family leave policy, a more strict sexual harassment code of conduct, and a variety of other benefits. Doing so will allow them to focus on their studies while also balancing their personal lives and having more stable personal finances.
The rally particularly highlighted the fact that graduate students have a double identity at BC as both students and workers. Prasannan Parthasarathi, the director of graduate studies in the history department, argued that they are students who also work extremely hard as workers contributing to the mission of the University. He further stated that the BC administration must recognize graduate students’ work as researchers, research assistants, teaching assistants, and teaching fellows.
Correction: Due to a miscommunication by the union, a previous version of this article and its headline said they had delivered a petition to the University. They have in fact not yet delivered the petition.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 @lmorillas. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Based on
https://github.com/google/google-api-python-client/blob/master/samples/service_account/tasks.py
by jcgregorio@google.com
"""
__author__ = 'morillas@google.com (Luis Miguel Morillas)'
import httplib2
import pprint
import sys
import datetime
from operator import itemgetter
from itertools import groupby
from googleapiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials, AccessTokenRefreshError
from geopy import geocoders
google = geocoders.GoogleV3(timeout=5)
yandex = geocoders.Yandex(timeout=5)
nom = geocoders.Nominatim(timeout=5)
import shelve
# Credentials for Service Accout
EMAIL_CLIENT = '696801545616-44i6o78jdoa7me4lr416n1d5rniidmns@developer.gserviceaccount.com'
FILE_KEY = 'pycal.p12'
def connect_calendar():
# Load the key in PKCS 12 format that you downloaded from the Google API
# Console when you created your Service account.
f = open(FILE_KEY, 'rb')
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(EMAIL_CLIENT,
key,
scope=['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/calendar.readonly'])
http = httplib2.Http()
http = credentials.authorize(http)
service = build(serviceName='calendar', version='v3', http=http)
return service
def get_month(date_str):
'''
returns start month str from event
'''
return datetime.datetime.strptime(date_str[:10], '%Y-%m-%d').strftime("%B")
def calendar_events(service, cal_id, singleEvents="False"):
# Today: only envents present and future
timeMin = datetime.datetime.now().strftime('%Y-%m-%dT00:00:00.000Z')
if singleEvents != "False":
timeMax = '{}-12-31T23:00:00.000Z'.format(datetime.datetime.now().year)
else:
timeMax = None
#timeMin = datetime.datetime.now().isoformat()
events = []
try:
page_token = None
while True:
event_list = service.events().list(singleEvents=singleEvents,orderBy='startTime', calendarId=cal_id,
pageToken=page_token, timeMin=timeMin, timeMax=timeMax).execute()
events.extend([event for event in event_list['items']])
page_token = event_list.get('nextPageToken')
if not page_token:
break
except AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run'
'the application to re-authorize.')
return events
def geolocate(address):
global geocache
#address = address.encode('utf-8') # for storing in shelve
loc = None
if address not in geocache.keys():
print ('Searching ', address)
try:
loc = google.geocode(address)
except:
pass
if not loc:
try:
loc = yandex.geocode(address)
except:
pass
if not loc:
try:
loc = google.geocode(','.join(address.split(',')[1:]))
except:
pass
if loc:
loc = loc.latitude, loc.longitude, loc.raw
geocache[address] = loc
else:
loc = geocache.get(address)[:2]
return loc
def loc_to_country(latlon):
global geocache
if latlon not in geocache.keys():
print ('Searching country of ', latlon)
try:
loc = nom.reverse(latlon)
if loc:
country = loc.raw.get('address').get('country')
geocache[latlon] = country
return country
except:
return ''
else:
return geocache.get(latlon)
def event_to_item(event, cal):
if event.get('summary'):
print (event.get('summary').encode('utf-8'), ' --> ' )
else:
print('No summary ? ', event)
item = {}
item['description'] = event.get('description')
item['id'] = event.get('id')
item['start'] = event.get('start').get('date')
if not item['start']:
item['start'] = event.get('start').get('dateTime')
item['end'] = event.get('end').get('date')
if not item['end']:
item['end'] = event.get('end').get('dateTime')
item['label'] = event.get('summary')
item['url'] = event.get('htmlLink')
item['cal'] = cal
item['month'] = get_month(item.get('start'))
address = event.get('location')
if address:
location = geolocate(address)
if location:
lat = location[0]
lon = location[1]
item['latlon'] = "{},{}".format(lat, lon)
print (item['latlon'])
country = loc_to_country(item['latlon'])
item['country'] = country
return item
def create_index(data="", schema = ""):
import pytz
#data = json.dumps(data)
data = json.JSONEncoderForHTML().encode(data)
schema = json.dumps(schema)
now = datetime.datetime.now(pytz.utc)
format = "%Y-%m-%d" # "%Y-%m-%d %H:%M %Z"
template = open('index.templ').read()
open('docs/index.html', 'w').write(template.format(datetime=now.strftime(format),
data=data, schema=schema ))
def select_first_event(eventlist):
'''select only the first enven when repeated events'''
def sort_by_eventID(element):
return element.get('recurringEventId', element.get('summary'))
#recurring = itemgetter('recurringEventId') # keyerror ?
recurring = sort_by_eventID
def _date(x):
return x.get('start').get('dateTime')
eventlist.sort(key=recurring)
_non_repeated = []
for ev, recur in groupby(eventlist, key=recurring):
try:
recur = sorted(recur, key=_date)
_non_repeated.append(recur[0]) # only add the first
except:
print ('recur error -> ', [x for x in recur])
return _non_repeated
if __name__ == '__main__':
import datetime
import simplejson as json
geocache = shelve.open('geocache.dat')
# Cals IDs from https://wiki.python.org/moin/PythonEventsCalendar
cal_id_python_events = 'j7gov1cmnqr9tvg14k621j7t5c@group.calendar.google.com'
cal_id_user_group = '3haig2m9msslkpf2tn1h56nn9g@group.calendar.google.com'
items = []
service = connect_calendar()
events = calendar_events(service, cal_id_python_events)
for event in events:
items.append(event_to_item(event, 'Larger'))
events = calendar_events(service, cal_id_user_group, singleEvents="True")
events = select_first_event(events)
for event in events:
items.append(event_to_item(event, 'Smaller'))
geocache.sync()
geocache.close()
schema = {"properties": {
"url": {
"valueType": "url"
},
"start": {
"valueType": "date"
},
"end": {
"valueType": "date"
},
"month": {
"valueType": "date"
},
},
"types": {
"Item": {
"pluralLabel": "events",
"label": "event"
}
}}
data = {'items': items}
#data.update(metadata)
#json.dump(data, open('docs/events_python.json', 'w'))
create_index(data, schema)
|
- Insert so the poles (+ and –) match those in the remote control.
- Remove if the remote control is not going to be used for a long period of time. Store in a cool, dark place.
- Replace if the unit does not respond to the remote control even when held close to the front panel.
- Do not use rechargeable type batteries.
- Mishandling of batteries in the remote control can cause electrolyte leakage, which may cause a fire. If electrolyte leaks from the batteries, consult your dealer. Wash thoroughly with water if electrolyte comes in contact with any part of your body.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Proper likefile control of a sprite handler
#
from likefile import LikeFile, schedulerThread
import time, Axon, os, random, pygame, math
from Sprites.BasicSprite import BasicSprite
from Sprites.SpriteScheduler import SpriteScheduler
from Kamaelia.UI.Pygame.EventHandler import EventHandler
from Simplegame import cat_location, screensize, border, background, screen_surface, randomFromRangeExcludingZero
import Axon
bg = schedulerThread(slowmo=0.01).start()
global spritescheduler
class MyGamesEvents(EventHandler):
def __init__(self, cat_args, trace=1, ):
self.trace = 0
self.cat_args = cat_args
def keydown(self, unicode, key, mod, where):
if key == 113: # "Q"
raise "QUIT"
class CatSprite(BasicSprite):
def main(self):
spritescheduler.allsprites.add(self)
while True:
self.pause()
yield 1
def make_cat(cat_location, screensize, border):
# Get the cat again!
files = list()
for x in os.listdir("pictures"):
if x not in ("README","CVS",".svn"):
files.append(x)
image_location = files[random.randint(0,len(files)-1)]
cat_surface = pygame.image.load("pictures/"+image_location)
cat = cat_surface.convert()
cat.set_colorkey((255,255,255), pygame.RLEACCEL)
newCat = CatSprite(image=cat)
return newCat
cat_args = (cat_location, screensize, border)
spritescheduler = SpriteScheduler(cat_args, [], background, screen_surface, MyGamesEvents).activate()
#newcat = make_cat(*cat_args)
class SolarSystem(Axon.ThreadedComponent.threadedcomponent):
def __init__(self, *cat_args):
super(SolarSystem, self).__init__()
self.the_sun = LikeFile(make_cat(*cat_args), extrainboxes = ("translation", "imaging"))
self.the_sun.activate()
self.planet = LikeFile(make_cat(*cat_args), extrainboxes = ("translation", "rotator", "imaging"))
self.planet.activate()
self.sun_position = tuple([x/2 for x in screensize])
self.planet_position = (screensize[0]/4.0, screensize[1]/2)
self.planet_velocity = (0.0, 10)
# ugh, I should be using numpy but it works, that's the important thing
# This is merely a test of likefile. Really, kamaelia components should be written for a physics simulation like this.
def acceleration(self, pos_planet, pos_sun):
g = 200 # fudge factor
# F = ma, but F is proportional to distance ** -2
# neatly removing the need to calculate a square root for the distance
direction = (pos_planet[0] - pos_sun[0], pos_planet[1] - pos_sun[1])
magnitude = direction[0] ** 2 + direction[1] ** 2
return tuple([g * x/magnitude for x in direction])
def apply_acceleration_to_velocity(self, velocity, accn):
return (velocity[0] + accn[0], velocity[1] + accn[1])
def apply_velocity_to_position(self,position, velocity):
return (position[0] + velocity[0], position[1] + velocity[1])
def main(self):
self.the_sun.put(self.sun_position, "translation")
while True:
time.sleep(0.01)
self.planet.put(self.planet_position, "translation")
accn = self.acceleration(self.sun_position, self.planet_position)
self.planet_velocity = self.apply_acceleration_to_velocity(self.planet_velocity, accn)
self.planet_position = self.apply_velocity_to_position(self.planet_position, self.planet_velocity)
SolarSystem(*cat_args).activate()
while 1:
time.sleep(100)
|
Ways To Cope In January and A Free Now TV Voucher!
If you’re anything like me, you’ll love free stuff – especially in January, when your bank balance is still crying after the Excesses of Christmas!
You’ve massively over spent despite promising yourself (as you do every year) that you absolutely will not succumb this time – until you realise that four of your friends have had kids that year and you now need to buy presents for their gorgeous new additions too.
There are all kinds of freebies out there to be had that can help to take the pressure off until pay day (if you were paid early in December, you’ll need to take full advantage) to make sure you survive until the end of the month.
These are such a great help at this time of year, so it’s a good idea to save them to use now, when you’re really feeling the pinch and counting the pennies.
They make those trips to the super market much less depressing and mean that you don’t have to live on rice and beans for the best part of a month.
There are many, many vochers out there to help you save money – this Now TV Voucher Code is a good one for January – if you just can’t afford to go out at the moment, use this to get free movies for cosy nights in until pay day.
|
import os
import pickle
import numpy as np
import chainer
from multiprocessing import Pool
from functools import partial
from chainer import Variable
from chainer.backends import cuda
from qanta.datasets.quiz_bowl import QuizBowlDataset
from qanta.guesser.abstract import AbstractGuesser
from qanta.util.constants import BUZZER_DEV_FOLD, BUZZER_TRAIN_FOLD
# constansts
N_GUESSES = 10
os.makedirs("output/buzzer", exist_ok=True)
dataset_dir = "output/buzzer/{}_data.pkl"
def vector_converter_0(guesses_sequence):
"""vector converter / feature extractor with only prob
Args:
guesses_sequence: a sequence (length of question) of list of guesses
(n_guesses), each entry is (guess, prob)
Returns:
a sequence of vectors
"""
length = len(guesses_sequence)
prev_prob_vec = [0.0 for _ in range(N_GUESSES)]
prev_dict = dict()
vecs = []
for i in range(length):
prob_vec = []
prob_diff_vec = []
isnew_vec = []
guesses = guesses_sequence[i]
for guess, prob in guesses:
prob_vec.append(prob)
if i > 0 and guess in prev_dict:
prev_prob = prev_dict[guess]
prob_diff_vec.append(prob - prev_prob)
isnew_vec.append(0)
else:
prob_diff_vec.append(prob)
isnew_vec.append(1)
if len(guesses) < N_GUESSES:
for k in range(max(N_GUESSES - len(guesses), 0)):
prob_vec.append(0)
prob_diff_vec.append(0)
isnew_vec.append(0)
features = (
prob_vec[:3]
+ isnew_vec[:3]
+ prob_diff_vec[:3]
+ [prob_vec[0] - prob_vec[1], prob_vec[1] - prob_vec[2]]
+ [prob_vec[0] - prev_prob_vec[0], prob_vec[1] - prev_prob_vec[1]]
+ [sum(isnew_vec[:5])]
+ [np.average(prob_vec), np.average(prev_prob_vec)]
+ [np.average(prob_vec[:6]), np.average(prev_prob_vec[:5])]
+ [np.var(prob_vec), np.var(prev_prob_vec)]
+ [np.var(prob_vec[:5]), np.var(prev_prob_vec[:5])]
)
vecs.append(np.array(features, dtype=np.float32))
prev_prob_vec = prob_vec
prev_dict = {g: p for g, p in guesses}
return vecs
def vector_converter_1(guesses_sequence):
"""vector converter / feature extractor with both logit and prob
Args:
guesses_sequence: a sequence (length of question) of list of guesses
(n_guesses), each entry is (guess, logit, prob)
Returns:
a sequence of vectors
"""
length = len(guesses_sequence)
prev_logit_vec = [0.0 for _ in range(N_GUESSES)]
prev_prob_vec = [0.0 for _ in range(N_GUESSES)]
prev_dict = dict()
vecs = []
for i in range(length):
logit_vec = []
prob_vec = []
logit_diff_vec = []
prob_diff_vec = []
isnew_vec = []
guesses = guesses_sequence[i]
for guess, logit, prob in guesses:
logit_vec.append(logit)
prob_vec.append(prob)
if i > 0 and guess in prev_dict:
prev_logit, prev_prob = prev_dict[guess]
logit_diff_vec.append(logit - prev_logit)
prob_diff_vec.append(prob - prev_prob)
isnew_vec.append(0)
else:
logit_diff_vec.append(logit)
prob_diff_vec.append(prob)
isnew_vec.append(1)
if len(guesses) < N_GUESSES:
for k in range(max(N_GUESSES - len(guesses), 0)):
logit_vec.append(0)
prob_vec.append(0)
logit_diff_vec.append(0)
prob_diff_vec.append(0)
isnew_vec.append(0)
features = (
logit_vec[:3]
+ prob_vec[:3]
+ isnew_vec[:3]
+ logit_diff_vec[:3]
+ prob_diff_vec[:3]
+ [logit_vec[0] - logit_vec[1], logit_vec[1] - logit_vec[2]]
+ [prob_vec[0] - prob_vec[1], prob_vec[1] - prob_vec[2]]
+ [logit_vec[0] - prev_logit_vec[0], logit_vec[1] - prev_logit_vec[1]]
+ [prob_vec[0] - prev_prob_vec[0], prob_vec[1] - prev_prob_vec[1]]
+ [sum(isnew_vec[:5])]
+ [np.average(logit_vec), np.average(prev_logit_vec)]
+ [np.average(prob_vec), np.average(prev_prob_vec)]
+ [np.average(logit_vec[:6]), np.average(prev_logit_vec[:5])]
+ [np.average(prob_vec[:6]), np.average(prev_prob_vec[:5])]
+ [np.var(logit_vec), np.var(prev_logit_vec)]
+ [np.var(prob_vec), np.var(prev_prob_vec)]
+ [np.var(logit_vec[:5]), np.var(prev_logit_vec[:5])]
+ [np.var(prob_vec[:5]), np.var(prev_prob_vec[:5])]
)
vecs.append(np.array(features, dtype=np.float32))
prev_logit_vec = logit_vec
prev_prob_vec = prob_vec
prev_dict = {x: (y, z) for x, y, z in guesses}
return vecs
def process_question(questions, vector_converter, item):
"""multiprocessing worker that converts the guesser output of a single
question into format used by the buzzer
"""
qid, q_rows = item
qid = q_rows.qanta_id.tolist()[0]
answer = questions[qid].page
q_rows = q_rows.groupby("char_index")
char_indices = sorted(q_rows.groups.keys())
guesses_sequence = []
labels = []
for idx in char_indices:
p = q_rows.get_group(idx).sort_values("score", ascending=False)
guesses_sequence.append(list(zip(p.guess, p.score))[:N_GUESSES])
labels.append(int(p.guess.tolist()[0] == answer))
vectors = vector_converter(guesses_sequence)
return qid, vectors, labels, char_indices
def read_data(
fold,
output_type="char",
guesser_module="qanta.guesser.rnn",
guesser_class="RnnGuesser",
guesser_config_num=0,
vector_converter=vector_converter_0,
):
if os.path.isfile(dataset_dir.format(fold)):
with open(dataset_dir.format(fold), "rb") as f:
return pickle.load(f)
g_dir = AbstractGuesser.output_path(
guesser_module, guesser_class, guesser_config_num, ""
)
g_path = AbstractGuesser.guess_path(g_dir, fold, output_type)
with open(g_path, "rb") as f:
df = pickle.load(f)
df_groups = df.groupby("qanta_id")
questions = QuizBowlDataset(buzzer_train=True).questions_by_fold()
questions = {q.qanta_id: q for q in questions[fold]}
pool = Pool(8)
worker = partial(process_question, questions, vector_converter)
dataset = pool.map(worker, df_groups)
with open(dataset_dir.format(fold), "wb") as f:
pickle.dump(dataset, f)
return dataset
def convert_seq(batch, device=None):
def to_device_batch(batch):
if device is None:
return batch
elif device < 0:
return [chainer.dataset.to_device(device, x) for x in batch]
else:
xp = cuda.cupy.get_array_module(*batch)
concat = xp.concatenate(batch, axis=0)
sections = np.cumsum([len(x) for x in batch[:-1]], dtype=np.int32)
concat_dev = chainer.dataset.to_device(device, concat)
batch_dev = cuda.cupy.split(concat_dev, sections)
return batch_dev
qids, vectors, labels, positions = list(map(list, zip(*batch)))
xs = [Variable(x) for x in to_device_batch(vectors)]
ys = to_device_batch(labels)
return {"xs": xs, "ys": ys}
if __name__ == "__main__":
data = read_data(BUZZER_TRAIN_FOLD)
print(data)
|
The second is a 24-year-old Gambian national, who had also been arrested for common crimes, and is accused of having served in “international terrorist groups.” In a notebook found in his possession the man had reproduced diagrams of explosive devices with instructions for their assembly and use.
In the same notebook, the man declared his affinity to a paramilitary formation active in central-western Africa, expressing the strong desire to “kill white tourists” and “Christians” in Gambia by striking hotels and churches. The man wrote that he entrusted the success of his future attacks to Allah.
Finally, authorities also expelled a 28-year-old Tunisian citizen who had been arrested in Modena for vandalism and the sale of drugs. During his trial, the man repeatedly praised the work of the Islamic State and, refusing to return to his detention cell, had attacked prison police.
The announcement of these deportations followed closely on the expulsion of an imam charged with “inciting to Islamic terrorism.” The 26-year-old Egyptian imam, Ahmed Elbadry Elbasiouny Aboualy, had been living in Italy on a work permit since 2005 and was expelled from the country earlier this week.
According to investigators, Aboualy espoused a radical Islamist orientation that was confirmed by surveillance of his electronic communications as well as his dealings in the mosque.
Aboualy had also been identified as one of those responsible for an attack against the magistrate Daniela Santanché in 2009 during a demonstration against the full Islamic veil.
The Interior Ministry declared that Friday’s deportations brought the total number of expulsions carried out since January 2015 to 339, of which 102 have happened in 2018.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.