signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def get_token ( self ) :
"""sets client token from Cerberus""" | auth_resp = self . get_auth ( )
if auth_resp [ 'status' ] == 'mfa_req' :
token_resp = self . get_mfa ( auth_resp )
else :
token_resp = auth_resp
token = token_resp [ 'data' ] [ 'client_token' ] [ 'client_token' ]
return token |
def __boost ( self , grad , hess ) :
"""Boost Booster for one iteration with customized gradient statistics .
Note
For multi - class task , the score is group by class _ id first , then group by row _ id .
If you want to get i - th row score in j - th class , the access way is score [ j * num _ data + i ]
and you should group grad and hess in this way as well .
Parameters
grad : 1 - D numpy array or 1 - D list
The first order derivative ( gradient ) .
hess : 1 - D numpy array or 1 - D list
The second order derivative ( Hessian ) .
Returns
is _ finished : bool
Whether the boost was successfully finished .""" | grad = list_to_1d_numpy ( grad , name = 'gradient' )
hess = list_to_1d_numpy ( hess , name = 'hessian' )
assert grad . flags . c_contiguous
assert hess . flags . c_contiguous
if len ( grad ) != len ( hess ) :
raise ValueError ( "Lengths of gradient({}) and hessian({}) don't match" . format ( len ( grad ) , len ( hess ) ) )
is_finished = ctypes . c_int ( 0 )
_safe_call ( _LIB . LGBM_BoosterUpdateOneIterCustom ( self . handle , grad . ctypes . data_as ( ctypes . POINTER ( ctypes . c_float ) ) , hess . ctypes . data_as ( ctypes . POINTER ( ctypes . c_float ) ) , ctypes . byref ( is_finished ) ) )
self . __is_predicted_cur_iter = [ False for _ in range_ ( self . __num_dataset ) ]
return is_finished . value == 1 |
def union ( self , sr , geometries ) :
"""The union operation is performed on a geometry service resource . This
operation constructs the set - theoretic union of the geometries in the
input array . All inputs must be of the same type .
Inputs :
geometries - array of geometries to be unioned ( structured as JSON
geometry objects returned by the ArcGIS REST API ) .
sr - spatial reference of the input geometries .""" | url = self . _url + "/union"
params = { "f" : "json" , "sr" : sr , "geometries" : self . __geometryListToGeomTemplate ( geometries = geometries ) }
return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) |
def fit ( self , Xs , ys = None , Xt = None , yt = None ) :
"""Build a coupling matrix from source and target sets of samples
( Xs , ys ) and ( Xt , yt )
Parameters
Xs : array - like , shape ( n _ source _ samples , n _ features )
The training input samples .
ys : array - like , shape ( n _ source _ samples , )
The class labels
Xt : array - like , shape ( n _ target _ samples , n _ features )
The training input samples .
yt : array - like , shape ( n _ target _ samples , )
The class labels . If some target samples are unlabeled , fill the
yt ' s elements with - 1.
Warning : Note that , due to this convention - 1 cannot be used as a
class label
Returns
self : object
Returns self .""" | # check the necessary inputs parameters are here
if check_params ( Xs = Xs , Xt = Xt , ys = ys ) :
super ( SinkhornL1l2Transport , self ) . fit ( Xs , ys , Xt , yt )
returned_ = sinkhorn_l1l2_gl ( a = self . mu_s , labels_a = ys , b = self . mu_t , M = self . cost_ , reg = self . reg_e , eta = self . reg_cl , numItermax = self . max_iter , numInnerItermax = self . max_inner_iter , stopInnerThr = self . tol , verbose = self . verbose , log = self . log )
# deal with the value of log
if self . log :
self . coupling_ , self . log_ = returned_
else :
self . coupling_ = returned_
self . log_ = dict ( )
return self |
def project_create_notif ( self , tenant_id , tenant_name ) :
"""Tenant Create notification .""" | if not self . fw_init :
return
self . os_helper . create_router ( '_' . join ( [ fw_constants . TENANT_EDGE_RTR , tenant_name ] ) , tenant_id , [ ] ) |
def convert ( self , money , to_currency , date = None ) :
"""Convert the given ` ` money ` ` to ` ` to _ currency ` ` using exchange rate on ` ` date ` `
If ` ` date ` ` is omitted then the date given by ` ` money . date ` ` will be used .""" | if str ( money . currency ) == str ( to_currency ) :
return copy . copy ( money )
return Money ( amount = money . amount * self . rate ( money . currency , to_currency , date or datetime . date . today ( ) ) , currency = to_currency , ) |
def enable ( self , ids ) :
"""Enable Pool Members Running Script
: param ids : List of ids
: return : None on success
: raise PoolMemberDoesNotExistException
: raise InvalidIdPoolMemberException
: raise ScriptEnablePoolException
: raise NetworkAPIException""" | data = dict ( )
data [ "ids" ] = ids
uri = "api/pools/enable/"
return self . post ( uri , data ) |
def NewFile ( self , filename , encoding , options ) :
"""parse an XML file from the filesystem or the network . The
parsing flags @ options are a combination of
xmlParserOption . This reuses the existing @ reader
xmlTextReader .""" | ret = libxml2mod . xmlReaderNewFile ( self . _o , filename , encoding , options )
return ret |
def demo_login ( self , auth = None , url = None ) :
"""Authenticate with a " Share Your Class " URL using a demo user .
You may provide either the entire ` ` url ` ` or simply the ` ` auth ` `
parameter .
: param url : Example - " https : / / piazza . com / demo _ login ? nid = hbj11a1gcvl1s6 & auth = 06c111b "
: param auth : Example - " 06c111b " """ | self . _rpc_api = PiazzaRPC ( )
self . _rpc_api . demo_login ( auth = auth , url = url ) |
def abbreviate ( s , maxlength = 25 ) :
"""Color - aware abbreviator""" | assert maxlength >= 4
skip = False
abbrv = None
i = 0
for j , c in enumerate ( s ) :
if c == '\033' :
skip = True
elif skip :
if c == 'm' :
skip = False
else :
i += 1
if i == maxlength - 1 :
abbrv = s [ : j ] + '\033[0m...'
elif i > maxlength :
break
if i <= maxlength :
return s
else :
return abbrv |
def _extract_hunt_results ( self , output_file_path ) :
"""Open a hunt output archive and extract files .
Args :
output _ file _ path : The path where the hunt archive is downloaded to .
Returns :
list : tuples containing :
str : The name of the client from where the files were downloaded .
str : The directory where the files were downloaded to .""" | # Extract items from archive by host for processing
collection_paths = [ ]
client_ids = set ( )
client_id_to_fqdn = { }
hunt_dir = None
try :
with zipfile . ZipFile ( output_file_path ) as archive :
items = archive . infolist ( )
for f in items :
if not hunt_dir :
hunt_dir = f . filename . split ( '/' ) [ 0 ]
# If we ' re dealing with client _ info . yaml , use it to build a client
# ID to FQDN correspondence table & skip extraction .
if f . filename . split ( '/' ) [ - 1 ] == 'client_info.yaml' :
client_id , fqdn = self . _get_client_fqdn ( archive . read ( f ) )
client_id_to_fqdn [ client_id ] = fqdn
continue
client_id = f . filename . split ( '/' ) [ 1 ]
if client_id . startswith ( 'C.' ) :
if client_id not in client_ids :
client_directory = os . path . join ( self . output_path , hunt_dir , client_id )
collection_paths . append ( ( client_id , client_directory ) )
client_ids . add ( client_id )
try :
archive . extract ( f , self . output_path )
except KeyError as exception :
print ( 'Extraction error: {0:s}' . format ( exception ) )
return [ ]
except OSError as exception :
msg = 'Error manipulating file {0:s}: {1!s}' . format ( output_file_path , exception )
self . state . add_error ( msg , critical = True )
return [ ]
except zipfile . BadZipfile as exception :
msg = 'Bad zipfile {0:s}: {1!s}' . format ( output_file_path , exception )
self . state . add_error ( msg , critical = True )
return [ ]
try :
os . remove ( output_file_path )
except OSError as exception :
print ( 'Output path {0:s} could not be removed: {1:s}' . format ( output_file_path , exception ) )
# Translate GRR client IDs to FQDNs with the information retrieved
# earlier
fqdn_collection_paths = [ ]
for client_id , path in collection_paths :
fqdn = client_id_to_fqdn . get ( client_id , client_id )
fqdn_collection_paths . append ( ( fqdn , path ) )
if not fqdn_collection_paths :
self . state . add_error ( 'Nothing was extracted from the hunt archive' , critical = True )
return [ ]
return fqdn_collection_paths |
def get_info ( self ) :
"""Return surface reconstruction as well as primary and
secondary adsorption site labels""" | reconstructed = self . is_reconstructed ( )
site , site_type = self . get_site ( )
return reconstructed , site , site_type |
def ingest_containers ( self , containers = None ) :
"""Transform the YAML into a dict with normalized keys""" | containers = containers or self . stream or { }
output_containers = [ ]
for container_name , definition in containers . items ( ) :
container = definition . copy ( )
container [ 'name' ] = container_name
output_containers . append ( container )
return output_containers |
def usermacro_updateglobal ( globalmacroid , value , ** kwargs ) :
'''Update existing global usermacro .
: param globalmacroid : id of the host usermacro
: param value : new value of the host usermacro
: param _ connection _ user : Optional - zabbix user ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ password : Optional - zabbix password ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ url : Optional - url of zabbix frontend ( can also be set in opts , pillar , see module ' s docstring )
return : ID of the update global usermacro .
CLI Example :
. . code - block : : bash
salt ' * ' zabbix . usermacro _ updateglobal 1 ' public ' ''' | conn_args = _login ( ** kwargs )
ret = { }
try :
if conn_args :
params = { }
method = 'usermacro.updateglobal'
params [ 'globalmacroid' ] = globalmacroid
params [ 'value' ] = value
params = _params_extend ( params , _ignore_name = True , ** kwargs )
ret = _query ( method , params , conn_args [ 'url' ] , conn_args [ 'auth' ] )
return ret [ 'result' ] [ 'globalmacroids' ] [ 0 ]
else :
raise KeyError
except KeyError :
return ret |
def factorgraph_viz ( d ) :
"""Map the dictionary into factorgraph - viz format . See https : / / github . com / mbforbes / factorgraph - viz
: param d : The dictionary
: return : The formatted dictionary""" | m = defaultdict ( list )
for node in d [ 'nodes' ] :
m [ 'nodes' ] . append ( dict ( id = node [ 'id' ] , type = 'rv' ) )
for factor in d [ 'factors' ] :
m [ 'nodes' ] . append ( dict ( id = factor [ 'id' ] , type = 'fac' ) )
for source in factor [ 'sources' ] :
m [ 'links' ] . append ( dict ( source = source , target = factor [ 'id' ] ) )
if factor [ 'sink' ] :
m [ 'links' ] . append ( dict ( source = factor [ 'id' ] , target = factor [ 'sink' ] ) )
return dict ( m ) |
def _wrap ( obj , wrapper = None , methods_to_add = ( ) , name = None , skip = ( ) , wrap_return_values = False , wrap_filenames = ( ) , filename = None , wrapped_name_func = None , wrapped = None ) :
"""Wrap module , class , function or another variable recursively
: param Any obj : Object to wrap recursively
: param Optional [ Callable ] wrapper : Wrapper to wrap functions and methods in ( accepts function as argument )
: param Collection [ Callable ] methods _ to _ add : Container of functions , which accept class as argument , and return tuple of method name and method to add to all classes
: param Optional [ str ] name : Name of module to wrap to ( if ` obj ` is module )
: param Collection [ Union [ str , type , Any ] ] skip : Items to skip wrapping ( if an item of a collection is the str , wrap will check the obj name , if an item of a collection is the type , wrap will check the obj type , else wrap will check an item itself )
: param bool wrap _ return _ values : If try , wrap return values of callables ( only types , supported by wrap function are supported )
: param Collection [ str ] wrap _ filenames : Files to wrap
: param Optional [ str ] filename : Source file of ` obj `
: param Optional [ Callable [ Any , str ] ] wrapped _ name _ func : Function that accepts ` obj ` as argument and returns the name of wrapped ` obj ` that will be written into wrapped ` obj `
: param Any wrapped : Object to wrap to
: return : Wrapped ` obj `""" | # noinspection PyUnresolvedReferences
class ModuleProxy ( types . ModuleType , Proxy ) : # noinspection PyShadowingNames
def __init__ ( self , name , doc = None ) :
super ( ) . __init__ ( name = name , doc = doc )
try : # Subclassing from obj to pass isinstance ( some _ object , obj ) checks . If defining the class fails , it means that
# ` obj ` was not a class , that means ClassProxy wouldn ' t be used , we can create a dummy class .
class ClassProxy ( obj , Proxy ) :
@ staticmethod
def __new__ ( cls , * args , ** kwargs ) : # noinspection PyUnresolvedReferences
original_obj_object = cls . _original_obj ( * args , ** kwargs )
# noinspection PyArgumentList
result = _wrap ( obj = original_obj_object , wrapper = wrapper , methods_to_add = methods_to_add , name = name , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = filename , wrapped_name_func = wrapped_name_func )
return result
except TypeError :
class ClassProxy ( Proxy ) :
pass
class ObjectProxy ( Proxy ) :
pass
# noinspection PyShadowingNames
def get_name ( * names ) :
name = None
for obj in names :
try :
name = obj . __name__
except AttributeError :
if isinstance ( obj , str ) :
name = obj
if name is not None :
return name
return name
# noinspection PyShadowingNames
def make_key ( obj , wrapper , methods_to_add , name , skip , wrap_return_values , wrap_filenames , filename , wrapped_name_func ) :
try :
obj_key = 'hash' , hash ( obj )
except TypeError :
obj_key = 'id' , id ( obj )
return obj_key + ( wrapper , methods_to_add , name , skip , wrap_return_values , wrap_filenames , filename , wrapped_name_func )
# noinspection PyShadowingNames
def wrap_ ( obj , name , members , wrapped = None ) :
def get_obj_type ( ) :
if inspect . ismodule ( object = obj ) :
result = ObjectType . MODULE
elif inspect . isclass ( object = obj ) :
result = ObjectType . CLASS
elif ( inspect . isbuiltin ( object = obj ) or inspect . isfunction ( object = obj ) or inspect . ismethod ( object = obj ) or inspect . ismethoddescriptor ( object = obj ) or isinstance ( obj , MethodWrapper ) ) :
result = ObjectType . FUNCTION_OR_METHOD
elif inspect . iscoroutine ( object = obj ) :
result = ObjectType . COROUTINE
else :
result = ObjectType . OBJECT
return result
def create_proxy ( proxy_type ) :
return { ProxyType . MODULE : ModuleProxy ( name = name ) , ProxyType . CLASS : ClassProxy , ProxyType . OBJECT : ObjectProxy ( ) , } [ proxy_type ]
def add_methods ( ) :
for method_to_add in methods_to_add :
method_name , method = method_to_add ( wrapped )
if method is not None :
setattr ( wrapped , method_name , method )
def set_original_obj ( ) :
with suppress ( AttributeError ) :
what = type if obj_type == ObjectType . CLASS else object
what . __setattr__ ( wrapped , wrapped_name_func ( obj ) , obj )
def need_to_wrap ( ) :
return is_magic_name ( name = attr_name ) and attr_name not in [ '__class__' , '__new__' ]
obj_type = get_obj_type ( )
if wrapped is None :
if obj_type in [ ObjectType . MODULE , ObjectType . CLASS ] :
wrapped = create_proxy ( proxy_type = ProxyType . MODULE if inspect . ismodule ( obj ) else ProxyType . CLASS )
elif obj_type == ObjectType . FUNCTION_OR_METHOD :
wrapped = function_or_method_wrapper ( )
elif obj_type == ObjectType . COROUTINE :
wrapped = coroutine_wrapper ( )
else :
wrapped = create_proxy ( proxy_type = ProxyType . OBJECT )
key = make_key ( obj = obj , wrapper = wrapper , methods_to_add = methods_to_add , name = name , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = filename , wrapped_name_func = wrapped_name_func )
_wrapped_objs [ key ] = wrapped
set_original_obj ( )
if obj_type in [ ObjectType . FUNCTION_OR_METHOD , ObjectType . COROUTINE ] :
return wrapped
add_methods ( )
if obj_type == ObjectType . CLASS :
for attr_name , attr_value in members :
if need_to_wrap ( ) :
raises_exception = ( isinstance ( attr_value , tuple ) and len ( attr_value ) > 0 and attr_value [ 0 ] == RAISES_EXCEPTION )
if raises_exception and not obj_type == ObjectType . MODULE :
def raise_exception ( self ) :
_ = self
raise attr_value [ 1 ]
attr_value = property ( raise_exception )
with suppress ( AttributeError , TypeError ) : # noinspection PyArgumentList
attr_value_new = _wrap ( obj = attr_value , wrapper = wrapper , methods_to_add = methods_to_add , name = get_name ( attr_value , attr_name ) , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = get_obj_file ( obj = attr_value ) or filename , wrapped_name_func = wrapped_name_func )
with suppress ( Exception ) :
type . __setattr__ ( wrapped , attr_name , attr_value_new )
if obj_type != ObjectType . CLASS :
wrapped_class_name = get_name ( obj . __class__ )
# noinspection PyArgumentList
wrapped_class = _wrap ( obj = obj . __class__ , wrapper = wrapper , methods_to_add = methods_to_add , name = wrapped_class_name , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = get_obj_file ( obj = obj . __class__ ) or filename , wrapped_name_func = wrapped_name_func , wrapped = wrapped . __class__ )
object . __setattr__ ( wrapped , '__class__' , wrapped_class )
return wrapped
def wrap_return_values_ ( result ) :
if wrap_return_values : # noinspection PyArgumentList
result = _wrap ( obj = result , wrapper = wrapper , methods_to_add = methods_to_add , name = get_name ( result , 'result' ) , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = filename , wrapped_name_func = wrapped_name_func )
return result
# noinspection PyShadowingNames
def is_magic_name ( name ) :
return name . startswith ( '__' ) and name . endswith ( '__' )
# noinspection PyShadowingNames
def is_magic ( obj ) :
return is_magic_name ( name = obj . __name__ )
# noinspection PyShadowingNames
def is_coroutine_function ( obj , wrapper ) :
return inspect . iscoroutinefunction ( object = wrapper ( obj ) ) and not is_magic ( obj = obj )
# noinspection PyShadowingNames
def wrap_call_and_wrap_return_values ( obj , wrapper ) :
if is_coroutine_function ( obj = obj , wrapper = wrapper ) : # noinspection PyShadowingNames
@ wraps ( obj )
async def wrapper ( * args , ** kwargs ) :
return wrap_return_values_ ( result = await obj ( * args , ** kwargs ) )
else : # noinspection PyShadowingNames
@ wraps ( obj )
def wrapper ( * args , ** kwargs ) :
return wrap_return_values_ ( result = obj ( * args , ** kwargs ) )
return wrapper
def function_or_method_wrapper ( ) : # noinspection PyShadowingNames
@ wraps ( obj )
def wrapped_obj ( * args , ** kwargs ) :
return wrapper ( obj ) ( * args , ** kwargs )
@ wraps ( obj )
def obj_with_original_obj_as_self ( * args , ** kwargs ) :
if len ( args ) > 0 and isinstance ( args [ 0 ] , Proxy ) : # noinspection PyProtectedMember
args = ( object . __getattribute__ ( args [ 0 ] , '_original_obj' ) , ) + args [ 1 : ]
return obj ( * args , ** kwargs )
if wrapper is None :
result = obj
elif is_magic ( obj = obj ) :
if obj . __name__ == '__getattribute__' :
@ wraps ( obj )
def result ( * args , ** kwargs ) : # If we are trying to access magic attribute , call obj with args [ 0 ] . _ original _ obj as self ,
# else call original _ _ getattribute _ _ and wrap the result before returning it .
# noinspection PyShadowingNames
name = args [ 1 ]
attr_value = obj_with_original_obj_as_self ( * args , ** kwargs )
if is_magic_name ( name = name ) :
return attr_value
else : # noinspection PyShadowingNames , PyArgumentList
return _wrap ( obj = attr_value , wrapper = wrapper , methods_to_add = methods_to_add , name = name , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = filename , wrapped_name_func = wrapped_name_func )
else :
result = obj_with_original_obj_as_self
elif obj . __name__ == '__getattr__' :
@ wraps ( obj )
def result ( * args , ** kwargs ) :
return wrapper ( obj ( * args , ** kwargs ) )
else :
result = wrapped_obj
if wrap_return_values :
result = wrap_call_and_wrap_return_values ( obj = result , wrapper = wrapper )
return result
def coroutine_wrapper ( ) :
@ wraps ( obj )
async def result ( * args , ** kwargs ) :
return await wrapper ( obj ) ( * args , ** kwargs )
if wrap_return_values :
result = wrap_call_and_wrap_return_values ( obj = result , wrapper = wrapper )
return result
def is_in_skip ( ) :
result = False
for s in skip :
if isinstance ( s , str ) :
if name == s :
result = True
elif isinstance ( s , type ) :
if isinstance ( obj , s ) :
result = True
else :
if obj == s :
result = True
return result
# noinspection PyShadowingNames
def get_obj_file ( obj ) : # noinspection PyShadowingNames
def _get_obj_file ( obj ) :
try :
result = ( obj . __file__ if hasattr ( obj , '__file__' ) else sys . modules [ obj . __module__ ] . __file__ if hasattr ( obj , '__module__' ) else None )
except ( AttributeError , KeyError ) :
result = None
return result
result = _get_obj_file ( obj = obj )
if result is None :
result = _get_obj_file ( obj = type ( obj ) )
return result
def get_obj_library_files ( ) :
obj_file = get_obj_file ( obj = obj )
if obj_file is not None :
obj_file = Path ( obj_file )
if obj_file . name == '__init__.py' :
result = obj_file . parent . glob ( '**/*.py' )
else :
result = [ obj_file ]
result = [ str ( obj_file ) for obj_file in result ]
else :
result = [ ]
result = frozenset ( result )
return result
methods_to_add = frozenset ( methods_to_add )
skip = frozenset ( skip )
wrap_filenames = frozenset ( wrap_filenames )
if wrapped_name_func is None : # noinspection PyShadowingNames
def wrapped_name_func ( obj ) :
_ = obj
return '_original_obj'
name = get_name ( name , obj )
if name is None :
raise ValueError ( "name was not passed and obj.__name__ not found" )
key = make_key ( obj = obj , wrapper = wrapper , methods_to_add = methods_to_add , name = name , skip = skip , wrap_return_values = wrap_return_values , wrap_filenames = wrap_filenames , filename = filename , wrapped_name_func = wrapped_name_func )
wrap_filenames = wrap_filenames or get_obj_library_files ( )
filename = get_obj_file ( obj = obj ) or filename
# noinspection PyUnusedLocal
members = [ ]
with suppress ( ModuleNotFoundError ) :
members = getmembers ( object = obj )
try :
already_wrapped = key in _wrapped_objs
except TypeError :
already_wrapped = False
if filename not in wrap_filenames or is_in_skip ( ) :
wrapped = obj
elif already_wrapped :
wrapped = _wrapped_objs [ key ]
elif members :
wrapped = wrap_ ( obj = obj , name = name , members = members , wrapped = wrapped )
else :
wrapped = obj
_wrapped_objs [ key ] = wrapped
return wrapped |
def merge_csv ( filenames : List [ str ] , outfile : TextIO = sys . stdout , input_dialect : str = 'excel' , output_dialect : str = 'excel' , debug : bool = False , headers : bool = True ) -> None :
"""Amalgamate multiple CSV / TSV / similar files into one .
Args :
filenames : list of filenames to process
outfile : file - like object to write output to
input _ dialect : dialect of input files , as passed to ` ` csv . reader ` `
output _ dialect : dialect to write , as passed to ` ` csv . writer ` `
debug : be verbose ?
headers : do the files have header lines ?""" | writer = csv . writer ( outfile , dialect = output_dialect )
written_header = False
header_items = [ ]
# type : List [ str ]
for filename in filenames :
log . info ( "Processing file " + repr ( filename ) )
with open ( filename , 'r' ) as f :
reader = csv . reader ( f , dialect = input_dialect )
if headers :
if not written_header :
header_items = next ( reader )
if debug :
log . debug ( "Header row: {!r}" , header_items )
writer . writerow ( header_items )
written_header = True
else :
new_headers = next ( reader )
if new_headers != header_items :
raise ValueError ( "Header line in file {filename} doesn't match - " "it was {new} but previous was {old}" . format ( filename = repr ( filename ) , new = repr ( new_headers ) , old = repr ( header_items ) , ) )
if debug :
log . debug ( "Header row matches previous" )
else :
if debug :
log . debug ( "No headers in use" )
for row in reader :
if debug :
log . debug ( "Data row: {!r}" , row )
writer . writerow ( row ) |
def _bright_star_match ( self , matchedObjects , catalogueName , magnitudeLimitFilter , lowerMagnitudeLimit ) :
"""* perform a bright star match on the crossmatch results if required by the catalogue search *
* * Key Arguments : * *
- ` ` matchedObjects ` ` - - the list of matched sources from the catalogue crossmatch
- ` ` catalogueName ` ` - - the name of the catalogue the crossmatch results from
- ` ` magnitudeLimitFilter ` ` - - the name of the column containing the magnitude to filter on
- ` ` lowerMagnitudeLimit ` ` - - the lower magnitude limit to match bright stars against
* * Return : * *
- ` ` brightStarMatches ` ` - - the trimmed matched sources ( bright stars associations only )
. . todo : :
- update key arguments values and definitions with defaults
- update return values and definitions
- update usage examples and text
- update docstring text
- check sublime snippet exists
- clip any useful text to docs mindmap
- regenerate the docs and check redendering of this docstring""" | self . log . debug ( 'starting the ``_bright_star_match`` method' )
import decimal
decimal . getcontext ( ) . prec = 10
# MATCH BRIGHT STAR ASSOCIATIONS
brightStarMatches = [ ]
for row in matchedObjects :
mag = decimal . Decimal ( row [ magnitudeLimitFilter ] )
if mag and mag < lowerMagnitudeLimit :
sep = decimal . Decimal ( row [ "separationArcsec" ] )
if sep < decimal . Decimal ( decimal . Decimal ( 10 ) ** ( - decimal . Decimal ( 0.2 ) * mag + decimal . Decimal ( 3.7 ) ) ) and sep < 20. :
brightStarMatches . append ( row )
self . log . debug ( 'completed the ``_bright_star_match`` method' )
return brightStarMatches |
def include_config ( include , orig_path , verbose , exit_on_config_errors = False ) :
'''Parses extra configuration file ( s ) specified in an include list in the
main config file .''' | # Protect against empty option
if not include :
return { }
if orig_path is None : # When the passed path is None , we just want the configuration
# defaults , not actually loading the whole configuration .
return { }
if isinstance ( include , six . string_types ) :
include = [ include ]
configuration = { }
for path in include : # Allow for includes like ~ / foo
path = os . path . expanduser ( path )
if not os . path . isabs ( path ) :
path = os . path . join ( os . path . dirname ( orig_path ) , path )
# Catch situation where user typos path in configuration ; also warns
# for empty include directory ( which might be by design )
glob_matches = glob . glob ( path )
if not glob_matches :
if verbose :
log . warning ( 'Warning parsing configuration file: "include" path/glob ' "'%s' matches no files" , path )
for fn_ in sorted ( glob_matches ) :
log . debug ( 'Including configuration from \'%s\'' , fn_ )
try :
opts = _read_conf_file ( fn_ )
except salt . exceptions . SaltConfigurationError as error :
log . error ( error )
if exit_on_config_errors :
sys . exit ( salt . defaults . exitcodes . EX_GENERIC )
else : # Initialize default config if we wish to skip config errors
opts = { }
schedule = opts . get ( 'schedule' , { } )
if schedule and 'schedule' in configuration :
configuration [ 'schedule' ] . update ( schedule )
include = opts . get ( 'include' , [ ] )
if include :
opts . update ( include_config ( include , fn_ , verbose ) )
salt . utils . dictupdate . update ( configuration , opts , True , True )
return configuration |
def tag ( self , alt = '' , use_size = None , ** attrs ) :
"""Return a standard XHTML ` ` < img . . . / > ` ` tag for this field .
: param alt : The ` ` alt = " " ` ` text for the tag . Defaults to ` ` ' ' ` ` .
: param use _ size : Whether to get the size of the thumbnail image for use
in the tag attributes . If ` ` None ` ` ( default ) , the size will only
be used it if won ' t result in a remote file retrieval .
All other keyword parameters are added as ( properly escaped ) extra
attributes to the ` img ` tag .""" | if use_size is None :
if getattr ( self , '_dimensions_cache' , None ) :
use_size = True
else :
try :
self . storage . path ( self . name )
use_size = True
except NotImplementedError :
use_size = False
attrs [ 'alt' ] = alt
attrs [ 'src' ] = self . url
if use_size :
attrs . update ( dict ( width = self . width , height = self . height ) )
attrs = ' ' . join ( [ '%s="%s"' % ( key , escape ( value ) ) for key , value in sorted ( attrs . items ( ) ) ] )
return mark_safe ( '<img %s />' % attrs ) |
def isready ( self ) :
"""Used to synchronize the python engine object with the back - end engine . Sends ' isready ' and waits for ' readyok . '""" | self . put ( 'isready' )
while True :
text = self . stdout . readline ( ) . strip ( )
if text == 'readyok' :
return text |
def times ( p , mint , maxt = None ) :
'''Repeat a parser between ` mint ` and ` maxt ` times . DO AS MUCH MATCH AS IT CAN .
Return a list of values .''' | maxt = maxt if maxt else mint
@ Parser
def times_parser ( text , index ) :
cnt , values , res = 0 , Value . success ( index , [ ] ) , None
while cnt < maxt :
res = p ( text , index )
if res . status :
values = values . aggregate ( Value . success ( res . index , [ res . value ] ) )
index , cnt = res . index , cnt + 1
else :
if cnt >= mint :
break
else :
return res
# failed , throw exception .
if cnt >= maxt : # finish .
break
# If we don ' t have any remaining text to start next loop , we need break .
# We cannot put the ` index < len ( text ) ` in where because some parser can
# success even when we have no any text . We also need to detect if the
# parser consume no text .
# See : # 28
if index >= len ( text ) :
if cnt >= mint :
break
# we already have decent result to return
else :
r = p ( text , index )
if index != r . index : # report error when the parser cannot success with no text
return Value . failure ( index , "already meets the end, no enough text" )
return values
return times_parser |
def _grid_sample ( x : TensorImage , coords : FlowField , mode : str = 'bilinear' , padding_mode : str = 'reflection' , remove_out : bool = True ) -> TensorImage :
"Resample pixels in ` coords ` from ` x ` by ` mode ` , with ` padding _ mode ` in ( ' reflection ' , ' border ' , ' zeros ' ) ." | coords = coords . flow . permute ( 0 , 3 , 1 , 2 ) . contiguous ( ) . permute ( 0 , 2 , 3 , 1 )
# optimize layout for grid _ sample
if mode == 'bilinear' : # hack to get smoother downwards resampling
mn , mx = coords . min ( ) , coords . max ( )
# max amount we ' re affine zooming by ( > 1 means zooming in )
z = 1 / ( mx - mn ) . item ( ) * 2
# amount we ' re resizing by , with 100 % extra margin
d = min ( x . shape [ 1 ] / coords . shape [ 1 ] , x . shape [ 2 ] / coords . shape [ 2 ] ) / 2
# If we ' re resizing up by > 200 % , and we ' re zooming less than that , interpolate first
if d > 1 and d > z :
x = F . interpolate ( x [ None ] , scale_factor = 1 / d , mode = 'area' ) [ 0 ]
return F . grid_sample ( x [ None ] , coords , mode = mode , padding_mode = padding_mode ) [ 0 ] |
def _cast_expected_to_returned_type ( expected , returned ) :
'''Determine the type of variable returned
Cast the expected to the type of variable returned''' | ret_type = type ( returned )
new_expected = expected
if expected == "False" and ret_type == bool :
expected = False
try :
new_expected = ret_type ( expected )
except ValueError :
log . info ( "Unable to cast expected into type of returned" )
log . info ( "returned = %s" , returned )
log . info ( "type of returned = %s" , type ( returned ) )
log . info ( "expected = %s" , expected )
log . info ( "type of expected = %s" , type ( expected ) )
return new_expected |
def new ( namespace , name , protected = False , attributes = dict ( ) , api_url = fapi . PROD_API_ROOT ) :
"""Create a new FireCloud workspace .
Returns :
Workspace : A new FireCloud workspace
Raises :
FireCloudServerError : API call failed .""" | r = fapi . create_workspace ( namespace , name , protected , attributes , api_url )
fapi . _check_response_code ( r , 201 )
return Workspace ( namespace , name , api_url ) |
def capsule ( height = 1.0 , radius = 1.0 , count = [ 32 , 32 ] ) :
"""Create a mesh of a capsule , or a cylinder with hemispheric ends .
Parameters
height : float
Center to center distance of two spheres
radius : float
Radius of the cylinder and hemispheres
count : ( 2 , ) int
Number of sections on latitude and longitude
Returns
capsule : trimesh . Trimesh
Capsule geometry with :
- cylinder axis is along Z
- one hemisphere is centered at the origin
- other hemisphere is centered along the Z axis at height""" | height = float ( height )
radius = float ( radius )
count = np . array ( count , dtype = np . int )
count += np . mod ( count , 2 )
# create a theta where there is a double band around the equator
# so that we can offset the top and bottom of a sphere to
# get a nicely meshed capsule
theta = np . linspace ( 0 , np . pi , count [ 0 ] )
center = np . clip ( np . arctan ( tol . merge / radius ) , tol . merge , np . inf )
offset = np . array ( [ - center , center ] ) + ( np . pi / 2 )
theta = np . insert ( theta , int ( len ( theta ) / 2 ) , offset )
capsule = uv_sphere ( radius = radius , count = count , theta = theta )
top = capsule . vertices [ : , 2 ] > tol . zero
capsule . vertices [ top ] += [ 0 , 0 , height ]
return capsule |
def bids_to_pwl ( self , bids ) :
"""Updates the piece - wise linear total cost function using the given
bid blocks .
Based on off2case . m from MATPOWER by Ray Zimmerman , developed at PSERC
Cornell . See U { http : / / www . pserc . cornell . edu / matpower / } for more info .""" | assert self . is_load
# Apply only those bids associated with this dispatchable load .
vl_bids = [ bid for bid in bids if bid . vLoad == self ]
# Filter out zero quantity bids .
gt_zero = [ bid for bid in vl_bids if round ( bid . quantity , 4 ) > 0.0 ]
# Ignore withheld offers .
valid_bids = [ bid for bid in gt_zero if not bid . withheld ]
p_bids = [ v for v in valid_bids if not v . reactive ]
q_bids = [ v for v in valid_bids if v . reactive ]
if p_bids :
self . p_cost = self . _offbids_to_points ( p_bids , True )
self . pcost_model = PW_LINEAR
self . online = True
else :
self . p_cost = [ ( 0.0 , 0.0 ) , ( self . p_max , 0.0 ) ]
self . pcost_model = PW_LINEAR
logger . info ( "No valid active power bids for dispatchable load " "[%s], shutting down." % self . name )
self . online = False
if q_bids :
self . q_cost = self . _offbids_to_points ( q_bids , True )
self . qcost_model = PW_LINEAR
self . online = True
else :
self . q_cost = [ ( self . q_min , 0.0 ) , ( 0.0 , 0.0 ) , ( self . q_max , 0.0 ) ]
self . qcost_model = PW_LINEAR
# logger . info ( " No valid bids for dispatchable load , shutting down . " )
# self . online = False
self . _adjust_limits ( ) |
def get_document_frequency ( self , term ) :
"""Returns the number of documents the specified term appears in .""" | if term not in self . _terms :
raise IndexError ( TERM_DOES_NOT_EXIST )
else :
return len ( self . _terms [ term ] ) |
def _writeImage ( dataArray = None , inputHeader = None ) :
"""Writes out the result of the combination step .
The header of the first ' outsingle ' file in the
association parlist is used as the header of the
new image .
Parameters
dataArray : arr
Array of data to be written to a fits . PrimaryHDU object
inputHeader : obj
fits . header . Header object to use as basis for the PrimaryHDU header""" | prihdu = fits . PrimaryHDU ( data = dataArray , header = inputHeader )
pf = fits . HDUList ( )
pf . append ( prihdu )
return pf |
def triangle_address ( fx , pt ) :
'''triangle _ address ( FX , P ) yields an address coordinate ( t , r ) for the point P in the triangle
defined by the ( 3 x d ) - sized coordinate matrix FX , in which each row of the matrix is the
d - dimensional vector representing the respective triangle vertx for triangle [ A , B , C ] . The
resulting coordinates ( t , r ) ( 0 < = t < = 1 , 0 < = r < = 1 ) address the point P such that , if t gives
the fraction of the angle from vector AB to vector AC that is made by the angle between vectors
AB and AP , and r gives the fraction | | AP | | / | | AR | | where R is the point of intersection between
lines AP and BC . If P is a ( d x n ) - sized matrix of points , then a ( 2 x n ) matrix of addresses
is returned .''' | fx = np . asarray ( fx )
pt = np . asarray ( pt )
# The triangle vectors . . .
ab = fx [ 1 ] - fx [ 0 ]
ac = fx [ 2 ] - fx [ 0 ]
bc = fx [ 2 ] - fx [ 1 ]
ap = np . asarray ( [ pt_i - a_i for ( pt_i , a_i ) in zip ( pt , fx [ 0 ] ) ] )
# get the unnormalized distance . . .
r = np . sqrt ( ( ap ** 2 ) . sum ( 0 ) )
# now we can find the angle . . .
unit = 1 - r . astype ( bool )
t0 = vector_angle ( ab , ac )
t = vector_angle ( ap + [ ab_i * unit for ab_i in ab ] , ab )
sint = np . sin ( t )
sindt = np . sin ( t0 - t )
# finding r0 is tricker - - we use this fancy formula based on the law of sines
q0 = np . sqrt ( ( bc ** 2 ) . sum ( 0 ) )
# B - > C distance
beta = vector_angle ( - ab , bc )
# Angle at B
sinGamma = np . sin ( math . pi - beta - t0 )
sinBeta = np . sin ( beta )
r0 = q0 * sinBeta * sinGamma / ( sinBeta * sindt + sinGamma * sint )
return np . asarray ( [ t / t0 , r / r0 ] ) |
def hexcolor ( color ) :
"returns hex color given a tuple , wx . Color , or X11 named color" | # first , if this is a hex color already , return !
# Python 3 : needs rewrite for str / unicode change
if isinstance ( color , six . string_types ) :
if color [ 0 ] == '#' and len ( color ) == 7 :
return color . lower ( )
# now , get color to an rgb tuple
rgb = ( 0 , 0 , 0 )
if isinstance ( color , tuple ) :
rgb = color
elif isinstance ( color , list ) :
rgb = tuple ( color )
elif isinstance ( color , six . string_types ) :
c = color . lower ( )
if c . find ( ' ' ) > - 1 :
c = c . replace ( ' ' , '' )
if c . find ( 'gray' ) > - 1 :
c = c . replace ( 'gray' , 'grey' )
if c in x11_colors :
rgb = x11_colors [ c ]
else :
try :
rgb = color . Red ( ) , color . Green ( ) , color . Blue ( )
except :
pass
# convert rgb to hex color
col = '#%02x%02x%02x' % ( rgb )
return col . lower ( ) |
def raw_corpus_rougel ( hypotheses : Iterable [ str ] , references : Iterable [ str ] ) -> float :
"""Simple wrapper around ROUGE - L implementation .
: param hypotheses : Hypotheses stream .
: param references : Reference stream .
: return : ROUGE - L score as float between 0 and 1.""" | return rouge . rouge_l ( hypotheses , references ) |
def facter_info ( ) :
"""Returns data from facter .""" | with suppress ( FileNotFoundError ) : # facter may not be installed
proc = subprocess . Popen ( [ 'facter' , '--yaml' ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE )
stdout , stderr = proc . communicate ( )
if not proc . returncode :
data = serializer . load ( stdout )
return { 'facter' : data } |
def from_settings ( cls , settings ) :
"""Read Mongodb Source configuration from the provided settings""" | if not 'mongodb' in settings or not 'collection' in settings or settings [ 'mongodb' ] == '' or settings [ 'collection' ] == '' :
raise Exception ( "Erroneous mongodb settings, " "needs a collection and mongodb setting" , settings )
cx_uri = urlparse . urlsplit ( settings [ "mongodb" ] )
db_name = cx_uri . path
if '?' in db_name :
db_name , query = db_name . split ( '?' , 1 )
db_name = db_name [ 1 : ]
if db_name == "" :
raise Exception ( "Erroneous mongodb settings, " "missing db_name" , settings )
cx_uri = urlparse . urlunsplit ( ( cx_uri . scheme , cx_uri . netloc , "/" , cx_uri . query , cx_uri . fragment ) )
options = copy . deepcopy ( settings )
del options [ 'mongodb' ]
del options [ 'collection' ]
return Mongodb ( cls . connection_for_uri ( cx_uri ) , db_name , settings [ 'collection' ] , options ) |
def diff ( self , other = Index , paths = None , create_patch = False , ** kwargs ) :
"""Creates diffs between two items being trees , trees and index or an
index and the working tree . It will detect renames automatically .
: param other :
Is the item to compare us with .
If None , we will be compared to the working tree .
If Treeish , it will be compared against the respective tree
If Index ( type ) , it will be compared against the index .
If git . NULL _ TREE , it will compare against the empty tree .
It defaults to Index to assure the method will not by - default fail
on bare repositories .
: param paths :
is a list of paths or a single path to limit the diff to .
It will only include at least one of the given path or paths .
: param create _ patch :
If True , the returned Diff contains a detailed patch that if applied
makes the self to other . Patches are somewhat costly as blobs have to be read
and diffed .
: param kwargs :
Additional arguments passed to git - diff , such as
R = True to swap both sides of the diff .
: return : git . DiffIndex
: note :
On a bare repository , ' other ' needs to be provided as Index or as
as Tree / Commit , or a git command error will occur""" | args = [ ]
args . append ( "--abbrev=40" )
# we need full shas
args . append ( "--full-index" )
# get full index paths , not only filenames
args . append ( "-M" )
# check for renames , in both formats
if create_patch :
args . append ( "-p" )
else :
args . append ( "--raw" )
# in any way , assure we don ' t see colored output ,
# fixes https : / / github . com / gitpython - developers / GitPython / issues / 172
args . append ( '--no-color' )
if paths is not None and not isinstance ( paths , ( tuple , list ) ) :
paths = [ paths ]
diff_cmd = self . repo . git . diff
if other is self . Index :
args . insert ( 0 , '--cached' )
elif other is NULL_TREE :
args . insert ( 0 , '-r' )
# recursive diff - tree
args . insert ( 0 , '--root' )
diff_cmd = self . repo . git . diff_tree
elif other is not None :
args . insert ( 0 , '-r' )
# recursive diff - tree
args . insert ( 0 , other )
diff_cmd = self . repo . git . diff_tree
args . insert ( 0 , self )
# paths is list here or None
if paths :
args . append ( "--" )
args . extend ( paths )
# END paths handling
kwargs [ 'as_process' ] = True
proc = diff_cmd ( * self . _process_diff_args ( args ) , ** kwargs )
diff_method = ( Diff . _index_from_patch_format if create_patch else Diff . _index_from_raw_format )
index = diff_method ( self . repo , proc )
proc . wait ( )
return index |
def complex_median ( complex_list ) :
"""Get the median value of a list of complex numbers .
Parameters
complex _ list : list
List of complex numbers to calculate the median .
Returns
a + 1 . j * b : complex number
The median of the real and imaginary parts .""" | median_real = numpy . median ( [ complex_number . real for complex_number in complex_list ] )
median_imag = numpy . median ( [ complex_number . imag for complex_number in complex_list ] )
return median_real + 1.j * median_imag |
def main ( ) :
"""Simulates HHL with matrix input , and outputs Pauli observables of the
resulting qubit state | x > .
Expected observables are calculated from the expected solution | x > .""" | # Eigendecomposition :
# (4.537 , [ - 0.971555 , - 0.0578339 + 0.229643j ] )
# (0.349 , [ - 0.236813 , 0.237270-0.942137j ] )
# | b > = ( 0.64510-0.47848j , 0.35490-0.47848j )
# | x > = ( - 0.0662724-0.214548j , 0.784392-0.578192j )
A = np . array ( [ [ 4.30213466 - 6.01593490e-08j , 0.23531802 + 9.34386156e-01j ] , [ 0.23531882 - 9.34388383e-01j , 0.58386534 + 6.01593489e-08j ] ] )
t = 0.358166 * math . pi
register_size = 4
input_prep_gates = [ cirq . Rx ( 1.276359 ) , cirq . Rz ( 1.276359 ) ]
expected = ( 0.144130 , 0.413217 , - 0.899154 )
# Set C to be the smallest eigenvalue that can be represented by the
# circuit .
C = 2 * math . pi / ( 2 ** register_size * t )
# Simulate circuit
print ( "Expected observable outputs:" )
print ( "X =" , expected [ 0 ] )
print ( "Y =" , expected [ 1 ] )
print ( "Z =" , expected [ 2 ] )
print ( "Actual: " )
simulate ( hhl_circuit ( A , C , t , register_size , * input_prep_gates ) ) |
def compamp_to_ac ( compamp , window = np . hanning ) : # convert single or multi - subband compamps into autocorrelation waterfall
'''Adapted from Gerry Harp at SETI .''' | header = compamp . header ( )
cdata = compamp . complex_data ( )
# Apply Windowing and Padding
cdata = np . multiply ( cdata , window ( cdata . shape [ 2 ] ) )
# window for smoothing sharp time series start / end in freq . dom .
cdata_normal = cdata - cdata . mean ( axis = 2 ) [ : , : , np . newaxis ]
# zero mean , does influence a minority of lines in some plots
cdata = np . zeros ( ( cdata . shape [ 0 ] , cdata . shape [ 1 ] , 2 * cdata . shape [ 2 ] ) , complex )
cdata [ : , : , cdata . shape [ 2 ] / 2 : cdata . shape [ 2 ] + cdata . shape [ 2 ] / 2 ] = cdata_normal
# zero - pad to 2N
# Perform Autocorrelation
cdata = np . fft . fftshift ( np . fft . fft ( cdata ) , 2 )
# FFT all blocks separately and arrange correctly
cdata = cdata . real ** 2 + cdata . imag ** 2
# FFT ( AC ( x ) ) = FFT ( x ) FFT * ( x ) = abs ( x ) ^ 2
cdata = np . fft . ifftshift ( np . fft . ifft ( cdata ) , 2 )
# AC ( x ) = iFFT ( abs ( x ) ^ 2 ) and arrange correctly
cdata = np . abs ( cdata )
# magnitude of AC
# normalize each row to sqrt of AC triangle
cdata = np . divide ( cdata , np . sqrt ( np . sum ( cdata , axis = 2 ) ) [ : , : , np . newaxis ] )
return cdata |
def match ( subject : Expression , pattern : Pattern ) -> Iterator [ Substitution ] :
r"""Tries to match the given * pattern * to the given * subject * .
Yields each match in form of a substitution .
Parameters :
subject :
An subject to match .
pattern :
The pattern to match .
Yields :
All possible match substitutions .
Raises :
ValueError :
If the subject is not constant .""" | if not is_constant ( subject ) :
raise ValueError ( "The subject for matching must be constant." )
global_constraints = [ c for c in pattern . constraints if not c . variables ]
local_constraints = set ( c for c in pattern . constraints if c . variables )
for subst in _match ( [ subject ] , pattern . expression , Substitution ( ) , local_constraints ) :
for constraint in global_constraints :
if not constraint ( subst ) :
break
else :
yield subst |
def _api_args ( self ) :
"""Glances API RESTful implementation .
Return the JSON representation of the Glances command line arguments
HTTP / 200 if OK
HTTP / 404 if others error""" | response . content_type = 'application/json; charset=utf-8'
try : # Get the JSON value of the args ' dict
# Use vars to convert namespace to dict
# Source : https : / / docs . python . org / % s / library / functions . html # vars
args_json = json . dumps ( vars ( self . args ) )
except Exception as e :
abort ( 404 , "Cannot get args (%s)" % str ( e ) )
return args_json |
def _setup ( self ) :
"""Generates _ reverse _ map from _ map""" | ValueMap . _setup ( self )
cls = self . __class__
if cls . _map is not None :
cls . _size = max ( self . _map . keys ( ) ) + 1 |
def ketbra ( i , j , Ne ) :
"""This function returns the outer product : math : ` | i > < j | ` where : math : ` | i > ` and : math : ` | j > ` are elements of the canonical basis of an Ne - dimensional Hilbert space ( in matrix form ) .
> > > ketbra ( 2 , 3 , 3)
Matrix ( [
[0 , 0 , 0 ] ,
[0 , 0 , 1 ] ,
[0 , 0 , 0 ] ] )""" | return ket ( i , Ne ) * bra ( j , Ne ) |
def import_pyfiles ( path ) :
"""Import all * . py files in specified directory .""" | n = 0
for pyfile in glob . glob ( os . path . join ( path , '*.py' ) ) :
m = import_file ( pyfile )
IMPORTED_BUILD_SOURCES . append ( m )
n += 1
return n |
def isVisible ( self ) :
"""Returns whether or not this layer is visible . If the inheritVisibility
value is set to True , then this will look up its parent hierarchy to ensure it is visible .
: return < bool >""" | if self . _visible and self . _inheritVisibility and self . _parent :
return self . _parent . isVisible ( )
return self . _visible |
def __RetrieveContent ( host , port , adapter , version , path , keyFile , certFile , thumbprint , sslContext , connectionPoolTimeout = CONNECTION_POOL_IDLE_TIMEOUT_SEC ) :
"""Retrieve service instance for connection .
@ param host : Which host to connect to .
@ type host : string
@ param port : Port
@ type port : int
@ param adapter : Adapter
@ type adapter : string
@ param version : Version
@ type version : string
@ param path : Path
@ type path : string
@ param keyFile : ssl key file path
@ type keyFile : string
@ param certFile : ssl cert file path
@ type certFile : string
@ param connectionPoolTimeout : Timeout in secs for idle connections to close , specify negative numbers for never
closing the connections
@ type connectionPoolTimeout : int""" | # XXX remove the adapter and service arguments once dependent code is fixed
if adapter != "SOAP" :
raise ValueError ( adapter )
# Create the SOAP stub adapter
stub = SoapStubAdapter ( host , port , version = version , path = path , certKeyFile = keyFile , certFile = certFile , thumbprint = thumbprint , sslContext = sslContext , connectionPoolTimeout = connectionPoolTimeout )
# Get Service instance
si = vim . ServiceInstance ( "ServiceInstance" , stub )
content = None
try :
content = si . RetrieveContent ( )
except vmodl . MethodFault :
raise
except Exception as e : # NOTE ( hartsock ) : preserve the traceback for diagnostics
# pulling and preserving the traceback makes diagnosing connection
# failures easier since the fault will also include where inside the
# library the fault occurred . Without the traceback we have no idea
# why the connection failed beyond the message string .
( type , value , traceback ) = sys . exc_info ( )
if traceback :
fault = vim . fault . HostConnectFault ( msg = str ( e ) )
reraise ( vim . fault . HostConnectFault , fault , traceback )
else :
raise vim . fault . HostConnectFault ( msg = str ( e ) )
return content , si , stub |
def xor ( * variables ) :
'''XOR definition for multiple variables''' | sum_ = False
for value in variables :
sum_ = sum_ ^ bool ( value )
return sum_ |
def indent ( indent_str = None ) :
"""An example indentation ruleset .""" | def indentation_rule ( ) :
inst = Indentator ( indent_str )
return { 'layout_handlers' : { Indent : inst . layout_handler_indent , Dedent : inst . layout_handler_dedent , Newline : inst . layout_handler_newline , OptionalNewline : inst . layout_handler_newline_optional , OpenBlock : layout_handler_openbrace , CloseBlock : layout_handler_closebrace , EndStatement : layout_handler_semicolon , } }
return indentation_rule |
def create ( input_width , input_height , input_channels = 1 , output_dim = 512 ) :
"""Vel factory function""" | def instantiate ( ** _ ) :
return NatureCnn ( input_width = input_width , input_height = input_height , input_channels = input_channels , output_dim = output_dim )
return ModelFactory . generic ( instantiate ) |
def set ( self , instance , value , ** kw ) :
"""Set Analyses to an AR
: param instance : Analysis Request
: param value : Single AS UID or a list of dictionaries containing AS UIDs
: param kw : Additional keyword parameters passed to the field""" | if not isinstance ( value , ( list , tuple ) ) :
value = [ value ]
uids = [ ]
for item in value :
uid = None
if isinstance ( item , dict ) :
uid = item . get ( "uid" )
if api . is_uid ( value ) :
uid = item
if uid is None :
logger . warn ( "Could extract UID of value" )
continue
uids . append ( uid )
analyses = map ( api . get_object_by_uid , uids )
self . _set ( instance , analyses , ** kw ) |
def set ( self , name , value , ex = None , px = None , nx = False , xx = False ) :
"""Set the value at key ` ` name ` ` to ` ` value ` `
` ` ex ` ` sets an expire flag on key ` ` name ` ` for ` ` ex ` ` seconds .
` ` px ` ` sets an expire flag on key ` ` name ` ` for ` ` px ` ` milliseconds .
` ` nx ` ` if set to True , set the value at key ` ` name ` ` to ` ` value ` ` if it
does not already exist .
` ` xx ` ` if set to True , set the value at key ` ` name ` ` to ` ` value ` ` if it
already exists .
: return : Future ( )""" | with self . pipe as pipe :
value = self . valueparse . encode ( value )
return pipe . set ( self . redis_key ( name ) , value , ex = ex , px = px , nx = nx , xx = xx ) |
def _load_cmap_list ( self ) :
"""Searches the colormaps directory for all files , populates the list .""" | # store the current name
name = self . get_name ( )
# clear the list
self . _combobox_cmaps . blockSignals ( True )
self . _combobox_cmaps . clear ( )
# list the existing contents
paths = _settings . ListDir ( 'colormaps' )
# loop over the paths and add the names to the list
for path in paths :
self . _combobox_cmaps . addItem ( _os . path . splitext ( path ) [ 0 ] )
# try to select the current name
self . _combobox_cmaps . setCurrentIndex ( self . _combobox_cmaps . findText ( name ) )
self . _combobox_cmaps . blockSignals ( False ) |
def extract_images_generic ( pike , root , log , options ) :
"""Extract any > = 2bpp image we think we can improve""" | jpegs = [ ]
pngs = [ ]
for _ , xref , ext in extract_images ( pike , root , log , options , extract_image_generic ) :
log . debug ( 'xref = %s ext = %s' , xref , ext )
if ext == '.png' :
pngs . append ( xref )
elif ext == '.jpg' :
jpegs . append ( xref )
log . debug ( "Optimizable images: JPEGs: %s PNGs: %s" , len ( jpegs ) , len ( pngs ) )
return jpegs , pngs |
def index_to_slices ( index ) :
"""take a numpy array of integers ( index ) and return a nested list of slices such that the slices describe the start , stop points for each integer in the index .
e . g .
> > > index = np . asarray ( [ 0,0,0,1,1,1,2,2,2 ] )
returns
> > > [ [ slice ( 0,3 , None ) ] , [ slice ( 3,6 , None ) ] , [ slice ( 6,9 , None ) ] ]
or , a more complicated example
> > > index = np . asarray ( [ 0,0,1,1,0,2,2,2,1,1 ] )
returns
> > > [ [ slice ( 0,2 , None ) , slice ( 4,5 , None ) ] , [ slice ( 2,4 , None ) , slice ( 8,10 , None ) ] , [ slice ( 5,8 , None ) ] ]""" | if len ( index ) == 0 :
return [ ]
# contruct the return structure
ind = np . asarray ( index , dtype = np . int )
ret = [ [ ] for i in range ( ind . max ( ) + 1 ) ]
# find the switchpoints
ind_ = np . hstack ( ( ind , ind [ 0 ] + ind [ - 1 ] + 1 ) )
switchpoints = np . nonzero ( ind_ - np . roll ( ind_ , + 1 ) ) [ 0 ]
[ ret [ ind_i ] . append ( slice ( * indexes_i ) ) for ind_i , indexes_i in zip ( ind [ switchpoints [ : - 1 ] ] , zip ( switchpoints , switchpoints [ 1 : ] ) ) ]
return ret |
def keyColor ( self , key ) :
"""Returns a color for the inputed key ( used in pie charts ) .
: param key | < str >
: return < QColor >""" | self . _keyColors . setdefault ( nativestring ( key ) , self . color ( ) )
return self . _keyColors [ nativestring ( key ) ] |
def get_func_kwargs ( func , recursive = True ) :
"""func = ibeis . run _ experiment
SeeAlso :
argparse _ funckw
recursive _ parse _ kwargs
parse _ kwarg _ keys
parse _ func _ kwarg _ keys
get _ func _ kwargs""" | import utool as ut
argspec = ut . get_func_argspec ( func )
if argspec . defaults is None :
header_kw = { }
else :
header_kw = dict ( zip ( argspec . args [ : : - 1 ] , argspec . defaults [ : : - 1 ] ) )
if argspec . keywords is not None :
header_kw . update ( dict ( ut . recursive_parse_kwargs ( func ) ) )
return header_kw |
def retrieve_tags_from_component ( user , c_id ) :
"""Retrieve all tags attached to a component .""" | JCT = models . JOIN_COMPONENTS_TAGS
query = ( sql . select ( [ models . TAGS ] ) . select_from ( JCT . join ( models . TAGS ) ) . where ( JCT . c . component_id == c_id ) )
rows = flask . g . db_conn . execute ( query )
return flask . jsonify ( { 'tags' : rows , '_meta' : { 'count' : rows . rowcount } } ) |
def filter ( cls , parent = None , ** filters ) :
"""Gets all resources of the given type and parent ( if provided ) which match the given filters .
This will trigger an api GET request .
: param parent ResourceBase : the parent of the resource - used for nesting the request url , optional
: param * * filters : any number of keyword arguments to filter by , e . g name = ' example name '
: returns : a list of matching resources""" | data = cls . _process_filter_request ( parent , ** filters )
return cls . _load_resources ( data ) |
def FindByName ( cls , name ) :
"""Find a specific installed auth provider by name .""" | reg = ComponentRegistry ( )
for _ , entry in reg . load_extensions ( 'iotile.auth_provider' , name_filter = name ) :
return entry |
def local_ip ( ) :
"""Get the local network IP of this machine""" | try :
ip = socket . gethostbyname ( socket . gethostname ( ) )
except IOError :
ip = socket . gethostbyname ( 'localhost' )
if ip . startswith ( '127.' ) :
ip = get_local_ip_by_interfaces ( )
if ip is None :
ip = get_local_ip_by_socket ( )
return ip |
def write ( self , args ) : # pylint : disable = no - self - use
"""writes the progres""" | ShellProgressView . done = False
message = args . get ( 'message' , '' )
percent = args . get ( 'percent' , None )
if percent :
ShellProgressView . progress_bar = _format_value ( message , percent )
if int ( percent ) == 1 :
ShellProgressView . progress_bar = None
ShellProgressView . progress = message |
def unbind ( self , func , etype ) :
'''Remove @ func from the execution list for events with ` . type ` of @ etype
or meta - events with ` . utype ` of @ etype . If @ func is not in said list ,
a ValueError will be raised .''' | i = self . event_funcs [ etype ] . index ( func )
del self . event_funcs [ etype ] [ i ] |
def current_iid ( self ) :
"""Currently active item ' s iid
: rtype : str""" | current = self . current
if current is None or current not in self . _canvas_markers :
return None
return self . _canvas_markers [ current ] |
def density_2d ( self , x , y , rho0 , Ra , Rs , center_x = 0 , center_y = 0 ) :
"""projected density
: param x :
: param y :
: param rho0:
: param Ra :
: param Rs :
: param center _ x :
: param center _ y :
: return :""" | Ra , Rs = self . _sort_ra_rs ( Ra , Rs )
x_ = x - center_x
y_ = y - center_y
r = np . sqrt ( x_ ** 2 + y_ ** 2 )
sigma0 = self . rho2sigma ( rho0 , Ra , Rs )
sigma = sigma0 * Ra * Rs / ( Rs - Ra ) * ( 1 / np . sqrt ( Ra ** 2 + r ** 2 ) - 1 / np . sqrt ( Rs ** 2 + r ** 2 ) )
return sigma |
def _configure_from_mapping ( self , item , whitelist_keys = False , whitelist = None ) :
"""Configure from a mapping , or dict , like object .
Args :
item ( dict ) :
A dict - like object that we can pluck values from .
Keyword Args :
whitelist _ keys ( bool ) :
Should we whitelist the keys before adding them to the
configuration ? If no whitelist is provided , we use the
pre - existing config keys as a whitelist .
whitelist ( list [ str ] ) :
An explicit list of keys that should be allowed . If provided
and ` ` whitelist _ keys ` ` is true , we will use that as our
whitelist instead of pre - existing app config keys .
Returns :
fleaker . App :
Returns itself .""" | if whitelist is None :
whitelist = self . config . keys ( )
if whitelist_keys :
item = { k : v for k , v in item . items ( ) if k in whitelist }
self . config . from_mapping ( item )
return self |
def proj_l1 ( x , radius = 1 , out = None ) :
r"""Projection onto l1 - ball .
Projection onto : :
` ` { x \ in X | | | x | | _ 1 \ leq r } ` `
with ` ` r ` ` being the radius .
Parameters
space : ` LinearSpace `
Space / domain ` ` X ` ` .
radius : positive float , optional
Radius ` ` r ` ` of the ball .
Returns
prox _ factory : callable
Factory for the proximal operator to be initialized .
Notes
The projection onto an l1 - ball can be computed by projection onto a
simplex , see [ D + 2008 ] for details .
References
[ D + 2008 ] Duchi , J . , Shalev - Shwartz , S . , Singer , Y . , and Chandra , T .
* Efficient Projections onto the L1 - ball for Learning in High dimensions * .
ICML 2008 , pp . 272-279 . http : / / doi . org / 10.1145/1390156.1390191
See Also
proximal _ linfty : proximal for l - infinity norm
proj _ simplex : projection onto simplex""" | if out is None :
out = x . space . element ( )
u = x . ufuncs . absolute ( )
v = x . ufuncs . sign ( )
proj_simplex ( u , radius , out )
out *= v
return out |
def build_message ( self ) :
'''Build a message .
: raises InvalidPayloadError :''' | if self . _text is None :
raise InvalidPayloadError ( 'text is required' )
message = { 'text' : self . _text , 'markdown' : self . _markdown }
if self . _channel != Incoming . DEFAULT_CHANNEL :
message [ 'channel' ] = self . _channel
if self . _attachments :
message [ 'attachments' ] = [ ]
for attachment in self . _attachments :
if 'title' not in attachment and 'text' not in attachment :
raise InvalidPayloadError ( 'title or text is required' )
message [ 'attachments' ] . append ( attachment )
return message |
def purge_module ( self , module_name ) :
"""A module has been removed e . g . a module that had an error .
We need to find any containers and remove the module from them .""" | containers = self . config [ "py3_config" ] [ ".module_groups" ]
containers_to_update = set ( )
if module_name in containers :
containers_to_update . update ( set ( containers [ module_name ] ) )
for container in containers_to_update :
try :
self . modules [ container ] . module_class . items . remove ( module_name )
except ValueError :
pass |
def write_bytecode ( self , f ) :
"""Dump the bytecode into the file or file like object passed .""" | if self . code is None :
raise TypeError ( 'can\'t write empty bucket' )
f . write ( bc_magic )
pickle . dump ( self . checksum , f , 2 )
if isinstance ( f , file ) :
marshal . dump ( self . code , f )
else :
f . write ( marshal . dumps ( self . code ) ) |
def get_request_id ( self , renew = False ) :
""": Brief : This method is used in every place to get the already generated request ID or
generate new request ID and sent off""" | if not AppRequest . __request_id or renew :
self . set_request_id ( uuid . uuid1 ( ) )
return AppRequest . __request_id |
def dbmax20years ( self , value = None ) :
"""Corresponds to IDD Field ` dbmax20years `
20 - year return period values for maximum extreme dry - bulb temperature
Args :
value ( float ) : value for IDD Field ` dbmax20years `
Unit : C
if ` value ` is None it will not be checked against the
specification and is assumed to be a missing value
Raises :
ValueError : if ` value ` is not a valid value""" | if value is not None :
try :
value = float ( value )
except ValueError :
raise ValueError ( 'value {} need to be of type float ' 'for field `dbmax20years`' . format ( value ) )
self . _dbmax20years = value |
def get_asset_repository_assignment_session ( self ) :
"""Gets the session for assigning asset to repository mappings .
return : ( osid . repository . AssetRepositoryAssignmentSession ) - an
` ` AssetRepositoryAsignmentSession ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented -
` ` supports _ asset _ repository _ assignment ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ asset _ repository _ assignment ( ) ` ` is ` ` true ` ` . *""" | if not self . supports_asset_repository_assignment ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . AssetRepositoryAssignmentSession ( runtime = self . _runtime ) |
def image_task ( self ) :
"""Returns a json - schema document that represents an task entity .""" | uri = "/%s/task" % self . uri_base
resp , resp_body = self . api . method_get ( uri )
return resp_body |
def get ( self , targetId ) :
"""Yields the analysed wav data .
: param targetId :
: return :""" | result = self . _targetController . analyse ( targetId )
if result :
if len ( result ) == 2 :
if result [ 1 ] == 404 :
return result
else :
return { 'name' : targetId , 'data' : self . _jsonify ( result ) } , 200
else :
return None , 404
else :
return None , 500 |
def BuildDefaultValue ( self , value_cls ) :
"""Renders default value of a given class .
Args :
value _ cls : Default value of this class will be rendered . This class has to
be ( or to be a subclass of ) a self . value _ class ( i . e . a class that this
renderer is capable of rendering ) .
Returns :
An initialized default value .
Raises :
DefaultValueError : if something goes wrong .""" | try :
return value_cls ( )
except Exception as e : # pylint : disable = broad - except
logging . exception ( e )
raise DefaultValueError ( "Can't create default for value %s: %s" % ( value_cls . __name__ , e ) ) |
def reindex_rethrottle ( self , task_id = None , params = None ) :
"""Change the value of ` ` requests _ per _ second ` ` of a running ` ` reindex ` ` task .
` < https : / / www . elastic . co / guide / en / elasticsearch / reference / current / docs - reindex . html > ` _
: arg task _ id : The task id to rethrottle
: arg requests _ per _ second : The throttle to set on this request in
floating sub - requests per second . - 1 means set no throttle .""" | return self . transport . perform_request ( "POST" , _make_path ( "_reindex" , task_id , "_rethrottle" ) , params = params ) |
def output_ip ( gandi , ip , datacenters , vms , ifaces , output_keys , justify = 11 ) :
"""Helper to output an ip information .""" | output_generic ( gandi , ip , output_keys , justify )
if 'type' in output_keys :
iface = ifaces . get ( ip [ 'iface_id' ] )
type_ = 'private' if iface . get ( 'vlan' ) else 'public'
output_line ( gandi , 'type' , type_ , justify )
if type_ == 'private' :
output_line ( gandi , 'vlan' , iface [ 'vlan' ] [ 'name' ] , justify )
if 'vm' in output_keys :
iface = ifaces . get ( ip [ 'iface_id' ] )
vm_id = iface . get ( 'vm_id' )
if vm_id :
vm_name = vms . get ( vm_id , { } ) . get ( 'hostname' )
if vm_name :
output_line ( gandi , 'vm' , vm_name , justify )
if 'dc' in output_keys :
for dc in datacenters :
if dc [ 'id' ] == ip . get ( 'datacenter_id' , ip . get ( 'datacenter' , { } ) . get ( 'id' ) ) :
dc_name = dc . get ( 'dc_code' , dc . get ( 'iso' , '' ) )
break
output_line ( gandi , 'datacenter' , dc_name , justify ) |
def get_match_history ( self , account_id = None , ** kwargs ) :
"""Returns a dictionary containing a list of the most recent Dota matches
: param account _ id : ( int , optional )
: param hero _ id : ( int , optional )
: param game _ mode : ( int , optional ) see ` ` ref / modes . json ` `
: param skill : ( int , optional ) see ` ` ref / skill . json ` `
: param min _ players : ( int , optional ) only return matches with minimum
amount of players
: param league _ id : ( int , optional ) for ids use ` ` get _ league _ listing ( ) ` `
: param start _ at _ match _ id : ( int , optional ) start at matches equal to or
older than this match id
: param matches _ requested : ( int , optional ) defaults to ` ` 100 ` `
: param tournament _ games _ only : ( str , optional ) limit results to
tournament matches only
: return : dictionary of matches , see : doc : ` responses < / responses > `""" | if 'account_id' not in kwargs :
kwargs [ 'account_id' ] = account_id
url = self . __build_url ( urls . GET_MATCH_HISTORY , ** kwargs )
req = self . executor ( url )
if self . logger :
self . logger . info ( 'URL: {0}' . format ( url ) )
if not self . __check_http_err ( req . status_code ) :
return response . build ( req , url , self . raw_mode ) |
def close ( self ) :
"""Close the connection this context wraps .""" | self . logger = None
for exc in _EXCEPTIONS :
setattr ( self , exc , None )
try :
self . mdr . close ( )
finally :
self . mdr = None |
def contribute_to_class ( model_class , name = 'slots' , descriptor = None ) :
"""Function that adds a description to a model Class .
: param model _ class : The model class the descriptor is to be added
to .
: param name : The attribute name the descriptor will be assigned to .
: param descriptor : The descriptor instance to be used . If none is
specified it will default to
` ` icekit . plugins . descriptors . PlaceholderDescriptor ` ` .
: return : True""" | rel_obj = descriptor or PlaceholderDescriptor ( )
rel_obj . contribute_to_class ( model_class , name )
setattr ( model_class , name , rel_obj )
return True |
def project_surface ( surface , angle = DEFAULT_ANGLE ) :
"""Returns the height of the surface when projected at the given angle .
Args :
surface ( surface ) : the surface to project
angle ( float ) : the angle at which to project the surface
Returns :
surface : A projected surface .""" | z_coef = np . sin ( np . radians ( angle ) )
y_coef = np . cos ( np . radians ( angle ) )
surface_height , surface_width = surface . shape
slope = np . tile ( np . linspace ( 0. , 1. , surface_height ) , [ surface_width , 1 ] ) . T
return slope * y_coef + surface * z_coef |
def resample_boundaries ( polygon , resolution , clip = None ) :
"""Return a version of a polygon with boundaries resampled
to a specified resolution .
Parameters
polygon : shapely . geometry . Polygon object
resolution : float , desired distance between points on boundary
clip : ( 2 , ) int , upper and lower bounds to clip
number of samples to ( to avoid exploding counts )
Returns
kwargs : dict , keyword args for a Polygon ( * * kwargs )""" | def resample_boundary ( boundary ) : # add a polygon . exterior or polygon . interior to
# the deque after resampling based on our resolution
count = boundary . length / resolution
count = int ( np . clip ( count , * clip ) )
return resample_path ( boundary . coords , count = count )
if clip is None :
clip = [ 8 , 200 ]
# create a sequence of [ ( n , 2 ) ] points
kwargs = { 'shell' : resample_boundary ( polygon . exterior ) , 'holes' : deque ( ) }
for interior in polygon . interiors :
kwargs [ 'holes' ] . append ( resample_boundary ( interior ) )
kwargs [ 'holes' ] = np . array ( kwargs [ 'holes' ] )
return kwargs |
async def receive_updates ( self , request : Request ) :
"""Handle updates from Telegram""" | body = await request . read ( )
try :
content = ujson . loads ( body )
except ValueError :
return json_response ( { 'error' : True , 'message' : 'Cannot decode body' , } , status = 400 )
logger . debug ( 'Received from Telegram: %s' , content )
message = TelegramMessage ( content , self )
responder = TelegramResponder ( content , self )
await self . _notify ( message , responder )
return json_response ( { 'error' : False , } ) |
def author ( self ) :
"""Return the full path of the theme used by this page .""" | r = self . site . site_config [ 'default_author' ]
if 'author' in self . _config :
r = self . _config [ 'author' ]
return r |
def _reciprocal_condition_number ( lu_mat , one_norm ) :
r"""Compute reciprocal condition number of a matrix .
Args :
lu _ mat ( numpy . ndarray ) : A 2D array of a matrix : math : ` A ` that has been
LU - factored , with the non - diagonal part of : math : ` L ` stored in the
strictly lower triangle and : math : ` U ` stored in the upper triangle .
one _ norm ( float ) : The 1 - norm of the original matrix : math : ` A ` .
Returns :
float : The reciprocal condition number of : math : ` A ` .
Raises :
OSError : If SciPy is not installed .
RuntimeError : If the reciprocal 1 - norm condition number could not
be computed .""" | if _scipy_lapack is None :
raise OSError ( "This function requires SciPy for calling into LAPACK." )
# pylint : disable = no - member
rcond , info = _scipy_lapack . dgecon ( lu_mat , one_norm )
# pylint : enable = no - member
if info != 0 :
raise RuntimeError ( "The reciprocal 1-norm condition number could not be computed." )
return rcond |
def instagram_config ( self , id , secret , scope = None , ** _ ) :
"""Get config dictionary for instagram oauth""" | scope = scope if scope else 'basic'
token_params = dict ( scope = scope )
config = dict ( # request _ token _ url = None ,
access_token_url = '/oauth/access_token/' , authorize_url = '/oauth/authorize/' , base_url = 'https://api.instagram.com/' , consumer_key = id , consumer_secret = secret , request_token_params = token_params )
return config |
def BuscarCertConSaldoDisponible ( self , cuit_depositante = None , cod_grano = 2 , campania = 1314 , coe = None , fecha_emision_des = None , fecha_emision_has = None , ) :
"""Devuelve los certificados de depósito en los que un productor tiene
saldo disponible para Liquidar / Retirar / Transferir""" | ret = self . client . cgBuscarCertConSaldoDisponible ( auth = { 'token' : self . Token , 'sign' : self . Sign , 'cuit' : self . Cuit , } , cuitDepositante = cuit_depositante or self . Cuit , codGrano = cod_grano , campania = campania , coe = coe , fechaEmisionDes = fecha_emision_des , fechaEmisionHas = fecha_emision_has , ) [ 'oReturn' ]
self . __analizar_errores ( ret )
array = ret . get ( 'certificado' , [ ] )
self . Excepcion = self . Traceback = ""
self . params_out [ 'certificados' ] = [ ]
for cert in array :
self . params_out [ 'certificados' ] . append ( dict ( coe = cert [ 'coe' ] , tipo_certificado = cert [ 'tipoCertificado' ] , campania = cert [ 'campania' ] , cuit_depositante = cert [ 'cuitDepositante' ] , cuit_depositario = cert [ 'cuitDepositario' ] , nro_planta = cert [ 'nroPlanta' ] , kilos_disponibles = cert [ 'kilosDisponibles' ] , cod_grano = cert [ 'codGrano' ] , ) )
return True |
def is_valid ( data ) :
"""Checks if the input data is a Swagger document
: param dict data : Data to be validated
: return : True , if data is a Swagger""" | return bool ( data ) and isinstance ( data , dict ) and bool ( data . get ( "swagger" ) ) and isinstance ( data . get ( 'paths' ) , dict ) |
def _meanprecision ( D , tol = 1e-7 , maxiter = None ) :
'''Mean and precision alternating method for MLE of Dirichlet
distribution''' | N , K = D . shape
logp = log ( D ) . mean ( axis = 0 )
a0 = _init_a ( D )
s0 = a0 . sum ( )
if s0 < 0 :
a0 = a0 / s0
s0 = 1
elif s0 == 0 :
a0 = ones ( a . shape ) / len ( a )
s0 = 1
m0 = a0 / s0
# Start updating
if maxiter is None :
maxiter = MAXINT
for i in xrange ( maxiter ) :
a1 = _fit_s ( D , a0 , logp , tol = tol )
s1 = sum ( a1 )
a1 = _fit_m ( D , a1 , logp , tol = tol )
m = a1 / s1
# if norm ( a1 - a0 ) < tol :
if abs ( loglikelihood ( D , a1 ) - loglikelihood ( D , a0 ) ) < tol : # much faster
return a1
a0 = a1
raise Exception ( 'Failed to converge after {} iterations, values are {}.' . format ( maxiter , a1 ) ) |
def load ( self ) :
"""Load the data file , do some basic type conversions""" | df = pd . read_csv ( self . input_file , encoding = 'utf8' )
df [ 'wiki_id' ] = df [ 'artist' ] . str . split ( '/' ) . str [ - 1 ]
# some years of birth are given as timestamps with prefix ' t ' , convert to string
timestamps = df [ 'dob' ] . str . startswith ( 't' )
df . loc [ timestamps , 'dob' ] = df . loc [ timestamps , 'dob' ] . str [ 1 : ] . apply ( lambda s : str ( datetime . datetime . fromtimestamp ( float ( s ) ) ) )
df [ 'year_of_birth' ] = df [ 'dob' ] . str [ : 4 ] . astype ( int )
return df |
def make_blocks ( ec_infos , codewords ) :
"""Returns the data and error blocks .
: param ec _ infos : Iterable of ECC information
: param codewords : Iterable of ( integer ) code words .""" | data_blocks , error_blocks = [ ] , [ ]
offset = 0
for ec_info in ec_infos :
for i in range ( ec_info . num_blocks ) :
block = codewords [ offset : offset + ec_info . num_data ]
data_blocks . append ( block )
error_blocks . append ( make_error_block ( ec_info , block ) )
offset += ec_info . num_data
return data_blocks , error_blocks |
def lgauss ( x , mu , sigma = 1.0 , logpdf = False ) :
"""Log10 normal distribution . . .
x : Parameter of interest for scanning the pdf
mu : Peak of the lognormal distribution ( mean of the underlying
normal distribution is log10 ( mu )
sigma : Standard deviation of the underlying normal distribution""" | x = np . array ( x , ndmin = 1 )
lmu = np . log10 ( mu )
s2 = sigma * sigma
lx = np . zeros ( x . shape )
v = np . zeros ( x . shape )
lx [ x > 0 ] = np . log10 ( x [ x > 0 ] )
v = 1. / np . sqrt ( 2 * s2 * np . pi ) * np . exp ( - ( lx - lmu ) ** 2 / ( 2 * s2 ) )
if not logpdf :
v /= ( x * np . log ( 10. ) )
v [ x <= 0 ] = - np . inf
return v |
def lhlo ( self ) :
"""Send LMTP LHLO greeting , and process the server response .
A regular LMTP greeting is sent , and if accepted by the server , the
capabilities it returns are parsed .
DLMTP authentication starts here by announcing the dlmtp _ ident in
the LHLO as our hostname . When the ident is accepted and DLMTP
mode is enabled ( dspam . conf : ServerMode = dspam | auto ) , the
DSPAMPROCESSMODE capability is announced by the server .
When this capability is detected , the < DspamClient > . dlmtp flag
will be enabled .""" | if self . dlmtp_ident is not None :
host = self . dlmtp_ident
else :
host = socket . getfqdn ( )
self . _send ( 'LHLO ' + host + '\r\n' )
finished = False
while not finished :
resp = self . _read ( )
if not resp . startswith ( '250' ) :
raise DspamClientError ( 'Unexpected server response at LHLO: ' + resp )
if resp [ 4 : 20 ] == 'DSPAMPROCESSMODE' :
self . dlmtp = True
logger . debug ( 'Detected DLMTP extension in LHLO response' )
if resp [ 3 ] == ' ' : # difference between " 250-8BITMIME " and " 250 SIZE "
finished = True |
def duplicate ( self , request , * args , ** kwargs ) :
"""Duplicate ( make copy of ) ` ` Collection ` ` models .""" | if not request . user . is_authenticated :
raise exceptions . NotFound
ids = self . get_ids ( request . data )
queryset = get_objects_for_user ( request . user , 'view_collection' , Collection . objects . filter ( id__in = ids ) )
actual_ids = queryset . values_list ( 'id' , flat = True )
missing_ids = list ( set ( ids ) - set ( actual_ids ) )
if missing_ids :
raise exceptions . ParseError ( "Collections with the following ids not found: {}" . format ( ', ' . join ( map ( str , missing_ids ) ) ) )
duplicated = queryset . duplicate ( contributor = request . user )
serializer = self . get_serializer ( duplicated , many = True )
return Response ( serializer . data ) |
def send ( self , path , value , metric_type ) :
"""Send a metric to Statsd .
: param list path : The metric path to record
: param mixed value : The value to record
: param str metric _ type : The metric type""" | msg = self . _msg_format . format ( path = self . _build_path ( path , metric_type ) , value = value , metric_type = metric_type )
LOGGER . debug ( 'Sending %s to %s:%s' , msg . encode ( 'ascii' ) , self . _host , self . _port )
try :
if self . _tcp :
if self . _sock . closed ( ) :
return
return self . _sock . write ( msg . encode ( 'ascii' ) )
self . _sock . sendto ( msg . encode ( 'ascii' ) , ( self . _host , self . _port ) )
except iostream . StreamClosedError as error : # pragma : nocover
LOGGER . warning ( 'Error sending TCP statsd metric: %s' , error )
except ( OSError , socket . error ) as error : # pragma : nocover
LOGGER . exception ( 'Error sending statsd metric: %s' , error ) |
def get_weights ( self ) :
'''Computes the PLD weights vector : py : obj : ` w ` .
. . warning : : Deprecated and not thoroughly tested .''' | log . info ( "Computing PLD weights..." )
# Loop over all chunks
weights = [ None for i in range ( len ( self . breakpoints ) ) ]
for b , brkpt in enumerate ( self . breakpoints ) : # Masks for current chunk
m = self . get_masked_chunk ( b )
c = self . get_chunk ( b )
# This block of the masked covariance matrix
_mK = GetCovariance ( self . kernel , self . kernel_params , self . time [ m ] , self . fraw_err [ m ] )
# This chunk of the normalized flux
f = self . fraw [ m ] - np . nanmedian ( self . fraw )
# Loop over all orders
_A = [ None for i in range ( self . pld_order ) ]
for n in range ( self . pld_order ) :
if self . lam_idx >= n :
X = self . X ( n , m )
_A [ n ] = np . dot ( X , X . T )
del X
# Compute the weights
A = np . sum ( [ l * a for l , a in zip ( self . lam [ b ] , _A ) if l is not None ] , axis = 0 )
W = np . linalg . solve ( _mK + A , f )
weights [ b ] = [ l * np . dot ( self . X ( n , m ) . T , W ) for n , l in enumerate ( self . lam [ b ] ) if l is not None ]
self . _weights = weights |
def bounds ( self , pixelbuffer = 0 ) :
"""Return Tile boundaries .
- pixelbuffer : tile buffer in pixels""" | left = self . _left
bottom = self . _bottom
right = self . _right
top = self . _top
if pixelbuffer :
offset = self . pixel_x_size * float ( pixelbuffer )
left -= offset
bottom -= offset
right += offset
top += offset
# on global grids clip at northern and southern TilePyramid bound
if self . tp . grid . is_global :
top = min ( [ top , self . tile_pyramid . top ] )
bottom = max ( [ bottom , self . tile_pyramid . bottom ] )
return Bounds ( left , bottom , right , top ) |
def log ( message , severity = "INFO" , print_debug = True ) :
"""Logs , prints , or raises a message .
Arguments :
message - - message to report
severity - - string of one of these values :
CRITICAL | ERROR | WARNING | INFO | DEBUG""" | print_me = [ 'WARNING' , 'INFO' , 'DEBUG' ]
if severity in print_me :
if severity == 'DEBUG' :
if print_debug :
print ( "{0}: {1}" . format ( severity , message ) )
else :
print ( "{0}: {1}" . format ( severity , message ) )
else :
raise Exception ( "{0}: {1}" . format ( severity , message ) ) |
def mkdir ( dirname , overwrite = False ) :
"""Wraps around os . mkdir ( ) , but checks for existence first .""" | if op . isdir ( dirname ) :
if overwrite :
shutil . rmtree ( dirname )
os . mkdir ( dirname )
logging . debug ( "Overwrite folder `{0}`." . format ( dirname ) )
else :
return False
# Nothing is changed
else :
try :
os . mkdir ( dirname )
except :
os . makedirs ( dirname )
logging . debug ( "`{0}` not found. Creating new." . format ( dirname ) )
return True |
def print_message ( self , message , verbosity_needed = 1 ) :
"""Prints the message , if verbosity is high enough .""" | if self . args . verbosity >= verbosity_needed :
print ( message ) |
def is_fp_closed ( obj ) :
"""Checks whether a given file - like object is closed .
: param obj :
The file - like object to check .""" | try : # Check ` isclosed ( ) ` first , in case Python3 doesn ' t set ` closed ` .
# GH Issue # 928
return obj . isclosed ( )
except AttributeError :
pass
try : # Check via the official file - like - object way .
return obj . closed
except AttributeError :
pass
try : # Check if the object is a container for another file - like object that
# gets released on exhaustion ( e . g . HTTPResponse ) .
return obj . fp is None
except AttributeError :
pass
raise ValueError ( "Unable to determine whether fp is closed." ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.