signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def close ( self ) :
"""Close injector & injected Provider instances , including generators .
Providers are closed in the reverse order in which they were opened ,
and each provider is only closed once . Providers are closed if accessed
by the injector , even if a dependency is not successfully provided . As
such , providers should determine whether or not anything needs to be
done in the close method ."""
|
if self . closed :
raise RuntimeError ( '{!r} already closed' . format ( self ) )
for finalizer in reversed ( self . finalizers ) : # Note : Unable to apply injector on close method .
finalizer ( )
self . closed = True
self . instances . clear ( )
self . values . clear ( )
|
def _add_discovery_config ( self ) :
"""Add the Discovery configuration to our list of configs .
This should only be called with self . _ config _ lock . The code here assumes
the lock is held ."""
|
lookup_key = ( discovery_service . DiscoveryService . API_CONFIG [ 'name' ] , discovery_service . DiscoveryService . API_CONFIG [ 'version' ] )
self . _configs [ lookup_key ] = discovery_service . DiscoveryService . API_CONFIG
|
def _process_key ( self , key ) :
"""Process the given export key from redis ."""
|
# Handle the driver case first .
if self . mode != ray . WORKER_MODE :
if key . startswith ( b"FunctionsToRun" ) :
with profiling . profile ( "fetch_and_run_function" ) :
self . fetch_and_execute_function_to_run ( key )
# Return because FunctionsToRun are the only things that
# the driver should import .
return
if key . startswith ( b"RemoteFunction" ) :
with profiling . profile ( "register_remote_function" ) :
( self . worker . function_actor_manager . fetch_and_register_remote_function ( key ) )
elif key . startswith ( b"FunctionsToRun" ) :
with profiling . profile ( "fetch_and_run_function" ) :
self . fetch_and_execute_function_to_run ( key )
elif key . startswith ( b"ActorClass" ) : # Keep track of the fact that this actor class has been
# exported so that we know it is safe to turn this worker
# into an actor of that class .
self . worker . function_actor_manager . imported_actor_classes . add ( key )
# TODO ( rkn ) : We may need to bring back the case of
# fetching actor classes here .
else :
raise Exception ( "This code should be unreachable." )
|
def _secret_event_lifecycle_cb ( conn , secret , event , detail , opaque ) :
'''Secret lifecycle events handler'''
|
_salt_send_event ( opaque , conn , { 'secret' : { 'uuid' : secret . UUIDString ( ) } , 'event' : _get_libvirt_enum_string ( 'VIR_SECRET_EVENT_' , event ) , 'detail' : 'unknown' # currently unused
} )
|
def descriptor_factory ( self , type_name , shard = u'lobby' , ** kwargs ) :
"""Creates and returns a descriptor to pass it later
for starting the agent .
First parameter is a type _ name representing the descirptor .
Second parameter is optional ( default lobby ) . Usage :
> descriptor _ factory ( ' shard _ descriptor ' , ' some shard ' )"""
|
desc = factories . build ( type_name , shard = unicode ( shard ) , ** kwargs )
return self . _database_connection . save_document ( desc )
|
def _set_backup ( self , v , load = False ) :
"""Setter method for backup , mapped from YANG variable / mpls _ state / lsp / backup ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ backup is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ backup ( ) directly .
YANG Description : MPLS LSP detail backup information"""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = backup . backup , is_container = 'container' , presence = False , yang_name = "backup" , rest_name = "backup" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'callpoint' : u'mpls-lsp-backup' , u'cli-suppress-show-path' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-mpls-operational' , defining_module = 'brocade-mpls-operational' , yang_type = 'container' , is_config = False )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """backup must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=backup.backup, is_container='container', presence=False, yang_name="backup", rest_name="backup", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-backup', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""" , } )
self . __backup = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def update_folder_name ( self , name , update_folder_data = True ) :
"""Change this folder name
: param str name : new name to change to
: param bool update _ folder _ data : whether or not to re - fetch the data
: return : Updated or Not
: rtype : bool"""
|
if self . root :
return False
if not name :
return False
url = self . build_url ( self . _endpoints . get ( 'get_folder' ) . format ( id = self . folder_id ) )
response = self . con . patch ( url , data = { self . _cc ( 'displayName' ) : name } )
if not response :
return False
self . name = name
if not update_folder_data :
return True
folder = response . json ( )
self . name = folder . get ( self . _cc ( 'displayName' ) , '' )
self . parent_id = folder . get ( self . _cc ( 'parentFolderId' ) , None )
self . child_folders_count = folder . get ( self . _cc ( 'childFolderCount' ) , 0 )
self . unread_items_count = folder . get ( self . _cc ( 'unreadItemCount' ) , 0 )
self . total_items_count = folder . get ( self . _cc ( 'totalItemCount' ) , 0 )
self . updated_at = dt . datetime . now ( )
return True
|
def save ( self , path ) :
"""Save loss in json format"""
|
json . dump ( dict ( loss = self . __class__ . __name__ , params = self . hparams ) , open ( os . path . join ( path , 'loss.json' ) , 'wb' ) )
|
def set_simple_fault_geometry_3D ( w , src ) :
"""Builds a 3D polygon from a node instance"""
|
assert "simpleFaultSource" in src . tag
geometry_node = src . nodes [ get_taglist ( src ) . index ( "simpleFaultGeometry" ) ]
fault_attrs = parse_simple_fault_geometry ( geometry_node )
build_polygon_from_fault_attrs ( w , fault_attrs )
|
def repr_new_instance ( self , class_data ) :
"""Create code like this : :
person = Person ( name = ' Jack ' , person _ id = 1)"""
|
classname = self . formatted_classname ( class_data [ "classname" ] )
instancename = self . formatted_instancename ( class_data [ "classname" ] )
arguments = list ( )
for key , value in self . sorted_dict ( class_data . get ( "metadata" , dict ( ) ) ) :
arguments . append ( "%s=%r" % ( key , value ) )
return "%s = %s(%s)" % ( instancename , classname , ", " . join ( arguments ) )
|
def anno_parser ( func ) :
"Look at params ( annotated with ` Param ` ) in func and return an ` ArgumentParser `"
|
p = ArgumentParser ( description = func . __doc__ )
for k , v in inspect . signature ( func ) . parameters . items ( ) :
param = func . __annotations__ . get ( k , Param ( ) )
kwargs = param . kwargs
if v . default != inspect . Parameter . empty :
kwargs [ 'default' ] = v . default
p . add_argument ( f"{param.pre}{k}" , ** kwargs )
return p
|
def get_conditional_uni ( cls , left_parent , right_parent ) :
"""Identify pair univariate value from parents .
Args :
left _ parent ( Edge ) : left parent
right _ parent ( Edge ) : right parent
Returns :
tuple [ np . ndarray , np . ndarray ] : left and right parents univariate ."""
|
left , right , _ = cls . _identify_eds_ing ( left_parent , right_parent )
left_u = left_parent . U [ 0 ] if left_parent . L == left else left_parent . U [ 1 ]
right_u = right_parent . U [ 0 ] if right_parent . L == right else right_parent . U [ 1 ]
return left_u , right_u
|
def decrypt ( private , ciphertext , output ) :
"""Decrypt ciphertext with private key .
Requires PRIVATE key file and the CIPHERTEXT encrypted with
the corresponding public key ."""
|
privatekeydata = json . load ( private )
assert 'pub' in privatekeydata
pub = load_public_key ( privatekeydata [ 'pub' ] )
log ( "Loading private key" )
private_key_error = "Invalid private key"
assert 'key_ops' in privatekeydata , private_key_error
assert "decrypt" in privatekeydata [ 'key_ops' ] , private_key_error
assert 'p' in privatekeydata , private_key_error
assert 'q' in privatekeydata , private_key_error
assert privatekeydata [ 'kty' ] == 'DAJ' , private_key_error
_p = phe . util . base64_to_int ( privatekeydata [ 'p' ] )
_q = phe . util . base64_to_int ( privatekeydata [ 'q' ] )
private_key = phe . PaillierPrivateKey ( pub , _p , _q )
log ( "Decrypting ciphertext" )
enc = load_encrypted_number ( ciphertext , pub )
out = private_key . decrypt ( enc )
print ( out , file = output )
|
def attach ( self , ** kwargs ) :
"""Attach to this container .
: py : meth : ` logs ` is a wrapper around this method , which you can
use instead if you want to fetch / stream container output without first
retrieving the entire backlog .
Args :
stdout ( bool ) : Include stdout .
stderr ( bool ) : Include stderr .
stream ( bool ) : Return container output progressively as an iterator
of strings , rather than a single string .
logs ( bool ) : Include the container ' s previous output .
Returns :
By default , the container ' s output as a single string .
If ` ` stream = True ` ` , an iterator of output strings .
Raises :
: py : class : ` docker . errors . APIError `
If the server returns an error ."""
|
return self . client . api . attach ( self . id , ** kwargs )
|
def get_nodes ( self , request ) :
"""Return menu ' s node for tags"""
|
nodes = [ ]
nodes . append ( NavigationNode ( _ ( 'Tags' ) , reverse ( 'zinnia:tag_list' ) , 'tags' ) )
for tag in tags_published ( ) :
nodes . append ( NavigationNode ( tag . name , reverse ( 'zinnia:tag_detail' , args = [ tag . name ] ) , tag . pk , 'tags' ) )
return nodes
|
def getRangeValues ( self , vp , verbose = None ) :
"""Returns a list of all available values for the Visual Property specified by the ` vp ` parameter .
This method is only for Visual Properties with a Discrete Range , such as NODE _ SHAPE or EDGE _ LINE _ TYPE .
Additional details on common Visual Properties can be found in the [ Basic Visual Lexicon JavaDoc API ] ( http : / / chianti . ucsd . edu / cytoscape - 3.6.1 / API / org / cytoscape / view / presentation / property / BasicVisualLexicon . html )
: param vp : ID of the Visual Property
: param verbose : print more
: returns : 200 : successful operation"""
|
response = api ( url = self . ___url + 'styles/visualproperties/' + str ( vp ) + '/values' , method = "GET" , verbose = verbose , parse_params = False )
return response
|
def get ( self , request , bot_id , id , format = None ) :
"""Get TelegramBot by id
serializer : TelegramBotSerializer
responseMessages :
- code : 401
message : Not authenticated"""
|
return super ( TelegramBotDetail , self ) . get ( request , bot_id , id , format )
|
def _parse_canonical_dbpointer ( doc ) :
"""Decode a JSON ( deprecated ) DBPointer to bson . dbref . DBRef ."""
|
dbref = doc [ '$dbPointer' ]
if len ( doc ) != 1 :
raise TypeError ( 'Bad $dbPointer, extra field(s): %s' % ( doc , ) )
if isinstance ( dbref , DBRef ) :
dbref_doc = dbref . as_doc ( )
# DBPointer must not contain $ db in its value .
if dbref . database is not None :
raise TypeError ( 'Bad $dbPointer, extra field $db: %s' % ( dbref_doc , ) )
if not isinstance ( dbref . id , ObjectId ) :
raise TypeError ( 'Bad $dbPointer, $id must be an ObjectId: %s' % ( dbref_doc , ) )
if len ( dbref_doc ) != 2 :
raise TypeError ( 'Bad $dbPointer, extra field(s) in DBRef: %s' % ( dbref_doc , ) )
return dbref
else :
raise TypeError ( 'Bad $dbPointer, expected a DBRef: %s' % ( doc , ) )
|
def ungettext ( self ) :
"""Dispatch to the appropriate ngettext method to handle text objects .
Note that under python 3 , this uses ` ngettext ( ) ` , while under python 2,
it uses ` ungettext ( ) ` . This should not be used with bytestrings ."""
|
# pylint : disable = no - member
if six . PY2 :
return self . _translations . ungettext
else :
return self . _translations . ngettext
|
def _run_server ( self ) :
"""启动 HTTP Server"""
|
try :
if __conf__ . DEBUG :
self . _webapp . listen ( self . _port )
else :
server = HTTPServer ( self . _webapp )
server . bind ( self . _port )
server . start ( 0 )
IOLoop . current ( ) . start ( )
except KeyboardInterrupt :
print ( "exit ..." )
|
def add ( self , key ) :
"""Store new key in a new link at the end of the linked list"""
|
if key not in self . _map :
self . _map [ key ] = link = _Link ( )
root = self . _root
last = root . prev
link . prev , link . next , link . key = last , root , key
last . next = root . prev = weakref . proxy ( link )
|
def makelink ( self , tarinfo , targetpath ) :
"""Make a ( symbolic ) link called targetpath . If it cannot be created
( platform limitation ) , we try to make a copy of the referenced file
instead of a link ."""
|
try : # For systems that support symbolic and hard links .
if tarinfo . issym ( ) :
os . symlink ( tarinfo . linkname , targetpath )
else : # See extract ( ) .
if os . path . exists ( tarinfo . _link_target ) :
os . link ( tarinfo . _link_target , targetpath )
else :
self . _extract_member ( self . _find_link_target ( tarinfo ) , targetpath )
except symlink_exception :
if tarinfo . issym ( ) :
linkpath = os . path . join ( os . path . dirname ( tarinfo . name ) , tarinfo . linkname )
else :
linkpath = tarinfo . linkname
else :
try :
self . _extract_member ( self . _find_link_target ( tarinfo ) , targetpath )
except KeyError :
raise ExtractError ( "unable to resolve link inside archive" )
|
def add_edge ( self , u , v , weight = None ) :
"""Add an edge between u and v .
The nodes u and v will be automatically added if they are
not already in the graph .
Parameters
u , v : nodes
Nodes can be any hashable Python object .
weight : int , float ( default = None )
The weight of the edge
Examples
> > > from pgmpy . base import DAG
> > > G = DAG ( )
> > > G . add _ nodes _ from ( nodes = [ ' Alice ' , ' Bob ' , ' Charles ' ] )
> > > G . add _ edge ( u = ' Alice ' , v = ' Bob ' )
> > > G . nodes ( )
[ ' Alice ' , ' Bob ' , ' Charles ' ]
> > > G . edges ( )
[ ( ' Alice ' , ' Bob ' ) ]
When the node is not already present in the graph :
> > > G . add _ edge ( u = ' Alice ' , v = ' Ankur ' )
> > > G . nodes ( )
[ ' Alice ' , ' Ankur ' , ' Bob ' , ' Charles ' ]
> > > G . edges ( )
[ ( ' Alice ' , ' Bob ' ) , ( ' Alice ' , ' Ankur ' ) ]
Adding edges with weight :
> > > G . add _ edge ( ' Ankur ' , ' Maria ' , weight = 0.1)
> > > G . edge [ ' Ankur ' ] [ ' Maria ' ]
{ ' weight ' : 0.1}"""
|
super ( DAG , self ) . add_edge ( u , v , weight = weight )
|
def gettrace ( self , burn = 0 , thin = 1 , chain = - 1 , slicing = None ) :
"""Return the trace .
: Stochastics :
- burn ( int ) : The number of transient steps to skip .
- thin ( int ) : Keep one in thin .
- chain ( int ) : The index of the chain to fetch . If None , return all chains .
- slicing : A slice , overriding burn and thin assignement ."""
|
if slicing is None :
slicing = slice ( burn , None , thin )
if chain is not None :
if chain < 0 :
chain = range ( self . db . chains ) [ chain ]
return self . _trace [ chain ] [ slicing ]
else :
return concatenate ( list ( self . _trace . values ( ) ) ) [ slicing ]
|
def get_height_rect ( width : int , string : str ) -> int :
"""Return the number of lines which would be printed from these parameters .
` width ` is the width of the print boundary .
` string ` is a Unicode string which may include color control characters .
. . versionadded : : 9.2"""
|
string_ = string . encode ( "utf-8" )
# type : bytes
return int ( lib . get_height_rect2 ( width , string_ , len ( string_ ) ) )
|
def end_datetime ( self ) -> Optional [ datetime . datetime ] :
"""Returns the end date of the set of intervals , or ` ` None ` ` if empty ."""
|
if not self . intervals :
return None
return max ( [ x . end for x in self . intervals ] )
|
def unbounded ( self ) :
"""Whether solution is unbounded"""
|
self . _check_valid ( )
if self . _ret_val != 0 :
return self . _ret_val == swiglpk . GLP_ENODFS
return swiglpk . glp_get_status ( self . _problem . _p ) == swiglpk . GLP_UNBND
|
def validate_create_package ( ctx , opts , owner , repo , package_type , skip_errors , ** kwargs ) :
"""Check new package parameters via the API ."""
|
click . echo ( "Checking %(package_type)s package upload parameters ... " % { "package_type" : click . style ( package_type , bold = True ) } , nl = False , )
context_msg = "Failed to validate upload parameters!"
with handle_api_exceptions ( ctx , opts = opts , context_msg = context_msg , reraise_on_error = skip_errors ) :
with maybe_spinner ( opts ) :
api_validate_create_package ( package_format = package_type , owner = owner , repo = repo , ** kwargs )
click . secho ( "OK" , fg = "green" )
return True
|
def show_buff ( self , pos ) :
"""Return the display of the instruction
: rtype : string"""
|
buff = self . get_name ( ) + " "
buff += "%x:" % self . first_key
for i in self . targets :
buff += " %x" % i
return buff
|
def tuple ( self , var , cast = None , default = NOTSET ) :
""": rtype : tuple"""
|
return self . get_value ( var , cast = tuple if not cast else ( cast , ) , default = default )
|
def dump_children ( self , obj ) :
"""Dump the siblings of a PID ."""
|
data , errors = PIDSchema ( many = True ) . dump ( obj . children . ordered ( 'asc' ) . all ( ) )
return data
|
def _parse_expression ( s ) :
"""Parse boolean expression containing and / or operators"""
|
# Converters for opeartor clauses
operators = { 'and' : And , 'or' : Or , None : lambda * args : args [ 0 ] }
# Pairing of end group symbols with start group symbols
group_pairs = { ')' : '(' , ']' : '[' }
scanner = re . compile ( r'''
(\s+) | # space
(\(|\[) | # group_start
(\)|\]) | # group_end
((?:or|and)\b) | # operator
([^\s\(\)\[\]]+) | # variable
(\Z) | # end
(.) # error
''' , re . DOTALL | re . VERBOSE | re . UNICODE | re . IGNORECASE )
# Parsed using two states and a stack of open clauses
# At state 0 ( not expect _ operator ) : Expect variable , or parenthesis group
# start .
# At state 1 ( expect _ operator ) : Expect operator , parenthesis group end , or
# end .
expect_operator = False
clause_stack = [ ]
current_clause = [ ]
clause_operator = None
clause_symbol = None
def close ( ) :
prev_op , prev_symbol , prev_clause = clause_stack . pop ( )
prev_clause . append ( operators [ clause_operator ] ( * current_clause ) )
return prev_op , prev_symbol , prev_clause
for match in re . finditer ( scanner , s ) :
( space , group_start , group_end , operator , variable , end , error ) = match . groups ( )
if error is not None :
raise ParseError ( 'Invalid token in expression string: {}' . format ( repr ( match . group ( 0 ) ) ) , span = ( match . start ( ) , match . end ( ) ) )
elif space is not None :
continue
elif expect_operator and operator is not None :
operator = operator . lower ( )
if operator == 'and' and clause_operator != 'and' :
prev_term = current_clause . pop ( )
clause_stack . append ( ( clause_operator , clause_symbol , current_clause ) )
current_clause = [ prev_term ]
elif operator == 'or' and clause_operator == 'and' :
clause_operator , clause_symbol , current_clause = close ( )
clause_operator = operator
expect_operator = False
elif expect_operator and group_end is not None :
if clause_operator == 'and' :
clause_operator , clause_symbol , current_clause = close ( )
if len ( clause_stack ) == 0 :
raise ParseError ( 'Unbalanced parenthesis group in expression' , span = ( match . start ( ) , match . end ( ) ) )
if group_pairs [ group_end ] != clause_symbol :
raise ParseError ( 'Group started with {} ended with {}' . format ( clause_symbol , group_end ) , span = ( match . start ( ) , match . end ( ) ) )
clause_operator , clause_symbol , current_clause = close ( )
elif expect_operator and end is not None :
if clause_operator == 'and' :
clause_operator , clause_symbol , current_clause = close ( )
elif not expect_operator and variable is not None :
current_clause . append ( Variable ( variable ) )
expect_operator = True
elif not expect_operator and group_start is not None :
clause_stack . append ( ( clause_operator , clause_symbol , current_clause ) )
current_clause = [ ]
clause_operator = None
clause_symbol = group_start
else :
raise ParseError ( 'Invalid token in expression string: {!r}' . format ( match . group ( 0 ) ) , span = ( match . start ( ) , match . end ( ) ) )
if len ( clause_stack ) > 0 :
raise ParseError ( 'Unbalanced parenthesis group in expression' )
expr = operators [ clause_operator ] ( * current_clause )
return expr
|
def populateFromDirectory ( self , vcfDirectory ) :
"""Populates this VariantSet by examing all the VCF files in the
specified directory . This is mainly used for as a convenience
for testing purposes ."""
|
pattern = os . path . join ( vcfDirectory , "*.vcf.gz" )
dataFiles = [ ]
indexFiles = [ ]
for vcfFile in glob . glob ( pattern ) :
dataFiles . append ( vcfFile )
indexFiles . append ( vcfFile + ".tbi" )
self . populateFromFile ( dataFiles , indexFiles )
|
def commit_channel ( self , channel_id ) :
"""commit _ channel : commits channel to Kolibri Studio
Args :
channel _ id ( str ) : channel ' s id on Kolibri Studio
Returns : channel id and link to uploadedchannel"""
|
payload = { "channel_id" : channel_id , "stage" : config . STAGE , }
response = config . SESSION . post ( config . finish_channel_url ( ) , data = json . dumps ( payload ) )
if response . status_code != 200 :
config . LOGGER . error ( "\n\nCould not activate channel: {}\n" . format ( response . _content . decode ( 'utf-8' ) ) )
if response . status_code == 403 :
config . LOGGER . error ( "Channel can be viewed at {}\n\n" . format ( config . open_channel_url ( channel_id , staging = True ) ) )
sys . exit ( )
response . raise_for_status ( )
new_channel = json . loads ( response . _content . decode ( "utf-8" ) )
channel_link = config . open_channel_url ( new_channel [ 'new_channel' ] )
return channel_id , channel_link
|
def month_average_temperature ( self , older_year = None , newer_year = None , include_yearly = False , minimum_days = 23 ) :
'''> > station = get _ closest _ station ( 38.8572 , - 77.0369)
> > station _ data = StationDataGSOD ( station )
> > station _ data . month _ average _ temperature ( 1990 , 2000 , include _ yearly = False )
[276.1599380905833 , 277.5375516246206 , 281.1881231671554 , 286.7367003367004 , 291.8689638318671 , 296.79545454545456 , 299.51868686868687 , 298.2097914630174 , 294.4116161616162 , 288.25883023786247 , 282.3188552188553 , 277.8282339524275]'''
|
# Take years , make them inclusive ; add minimum valid days .
year_month_averages = { }
year_month_counts = { }
for year , data in self . parsed_data . items ( ) :
if not ( older_year <= year <= newer_year ) :
continue
# Ignore out - of - range years easily
year_month_averages [ year ] = [ 0.0 ] * 12
year_month_counts [ year ] = [ 0 ] * 12
for i , day in enumerate ( data ) :
if day is None :
continue
# Don ' t do these comparisons to make it fast
if day . DATE . year < older_year or day . DATE . year > newer_year :
continue
# Ignore out - of - range days as possible
T = day . TEMP
if T is None :
continue
# Cache these lookups
year_month_averages [ year ] [ day . DATE . month - 1 ] += T
year_month_counts [ year ] [ day . DATE . month - 1 ] += 1
for month in range ( 12 ) :
count = year_month_counts [ year ] [ month ]
if count < minimum_days :
ans = None
else :
ans = year_month_averages [ year ] [ month ] / count
year_month_averages [ year ] [ month ] = ans
# Compute the average of the month
actual_averages = [ 0.0 ] * 12
actual_averages_counts = [ 0 ] * 12
for year , average in year_month_averages . items ( ) :
for month in range ( 12 ) :
if average is not None and average [ month ] is not None :
count = actual_averages_counts [ month ]
if count is None :
count = 1
else :
count += 1
actual_averages_counts [ month ] = count
month_average_sum = actual_averages [ month ]
if month_average_sum is None :
month_average_sum = average [ month ]
else :
month_average_sum += average [ month ]
actual_averages [ month ] = month_average_sum
for month in range ( 12 ) :
actual_averages [ month ] = actual_averages [ month ] / actual_averages_counts [ month ]
# Don ' t set anything as properties - too many variables used in calculating thems
# Speed is not that important .
if include_yearly :
return actual_averages , year_month_averages
else :
return actual_averages
|
def messages ( self , query , ** kwargs ) :
"""https : / / api . slack . com / methods / search . messages"""
|
self . url = 'https://slack.com/api/search.messages'
return super ( Search , self ) . search_from_url ( query , ** kwargs )
|
def scm_find_files ( path , scm_files , scm_dirs ) :
"""setuptools compatible file finder that follows symlinks
- path : the root directory from which to search
- scm _ files : set of scm controlled files and symlinks
( including symlinks to directories )
- scm _ dirs : set of scm controlled directories
( including directories containing no scm controlled files )
scm _ files and scm _ dirs must be absolute with symlinks resolved ( realpath ) ,
with normalized case ( normcase )
Spec here : http : / / setuptools . readthedocs . io / en / latest / setuptools . html # adding - support - for - revision - control - systems"""
|
realpath = os . path . normcase ( os . path . realpath ( path ) )
seen = set ( )
res = [ ]
for dirpath , dirnames , filenames in os . walk ( realpath , followlinks = True ) : # dirpath with symlinks resolved
realdirpath = os . path . normcase ( os . path . realpath ( dirpath ) )
def _link_not_in_scm ( n ) :
fn = os . path . join ( realdirpath , os . path . normcase ( n ) )
return os . path . islink ( fn ) and fn not in scm_files
if realdirpath not in scm_dirs : # directory not in scm , don ' t walk it ' s content
dirnames [ : ] = [ ]
continue
if ( os . path . islink ( dirpath ) and not os . path . relpath ( realdirpath , realpath ) . startswith ( os . pardir ) ) : # a symlink to a directory not outside path :
# we keep it in the result and don ' t walk its content
res . append ( os . path . join ( path , os . path . relpath ( dirpath , path ) ) )
dirnames [ : ] = [ ]
continue
if realdirpath in seen : # symlink loop protection
dirnames [ : ] = [ ]
continue
dirnames [ : ] = [ dn for dn in dirnames if not _link_not_in_scm ( dn ) ]
for filename in filenames :
if _link_not_in_scm ( filename ) :
continue
# dirpath + filename with symlinks preserved
fullfilename = os . path . join ( dirpath , filename )
if os . path . normcase ( os . path . realpath ( fullfilename ) ) in scm_files :
res . append ( os . path . join ( path , os . path . relpath ( fullfilename , path ) ) )
seen . add ( realdirpath )
return res
|
def send ( self , event_data ) :
"""Sends an event data and blocks until acknowledgement is
received or operation times out .
: param event _ data : The event to be sent .
: type event _ data : ~ azure . eventhub . common . EventData
: raises : ~ azure . eventhub . common . EventHubError if the message fails to
send .
: return : The outcome of the message send .
: rtype : ~ uamqp . constants . MessageSendResult"""
|
if self . error :
raise self . error
if not self . running :
raise ValueError ( "Unable to send until client has been started." )
if event_data . partition_key and self . partition :
raise ValueError ( "EventData partition key cannot be used with a partition sender." )
event_data . message . on_send_complete = self . _on_outcome
try :
self . _handler . send_message ( event_data . message )
if self . _outcome != constants . MessageSendResult . Ok :
raise Sender . _error ( self . _outcome , self . _condition )
except errors . MessageException as failed :
error = EventHubError ( str ( failed ) , failed )
self . close ( exception = error )
raise error
except ( errors . TokenExpired , errors . AuthenticationException ) :
log . info ( "Sender disconnected due to token error. Attempting reconnect." )
self . reconnect ( )
except ( errors . LinkDetach , errors . ConnectionClose ) as shutdown :
if shutdown . action . retry and self . auto_reconnect :
log . info ( "Sender detached. Attempting reconnect." )
self . reconnect ( )
else :
log . info ( "Sender detached. Shutting down." )
error = EventHubError ( str ( shutdown ) , shutdown )
self . close ( exception = error )
raise error
except errors . MessageHandlerError as shutdown :
if self . auto_reconnect :
log . info ( "Sender detached. Attempting reconnect." )
self . reconnect ( )
else :
log . info ( "Sender detached. Shutting down." )
error = EventHubError ( str ( shutdown ) , shutdown )
self . close ( exception = error )
raise error
except Exception as e :
log . info ( "Unexpected error occurred (%r). Shutting down." , e )
error = EventHubError ( "Send failed: {}" . format ( e ) )
self . close ( exception = error )
raise error
else :
return self . _outcome
|
def download_shared_files ( job , input_args ) :
"""Downloads and stores shared inputs files in the FileStore
input _ args : dict Dictionary of input arguments ( from main ( ) )"""
|
shared_files = [ 'unc.bed' , 'hg19.transcripts.fa' , 'composite_exons.bed' , 'normalize.pl' , 'rsem_ref.zip' , 'ebwt.zip' , 'chromosomes.zip' ]
shared_ids = { }
for f in shared_files :
shared_ids [ f ] = job . addChildJobFn ( download_from_url , input_args [ f ] ) . rv ( )
if input_args [ 'config' ] or input_args [ 'config_fastq' ] :
job . addFollowOnJobFn ( parse_config_file , shared_ids , input_args )
else :
sample_path = input_args [ 'input' ]
uuid = os . path . splitext ( os . path . basename ( sample_path ) ) [ 0 ]
sample = ( uuid , sample_path )
job . addFollowOnJobFn ( download_sample , shared_ids , input_args , sample )
|
def connection ( self ) : # pragma : no cover
"""Establish LDAP connection ."""
|
# self . server allows us to fetch server info
# ( including LDAP schema list ) if we wish to
# add this feature later
self . server = ldap3 . Server ( self . host , port = self . port , get_info = ldap3 . ALL )
self . conn = ldap3 . Connection ( self . server , user = self . user_dn , password = self . user_pw , auto_bind = True , lazy = True , receive_timeout = 1 )
|
def get_fields_in_model ( instance ) :
"""Returns the list of fields in the given model instance . Checks whether to use the official _ meta API or use the raw
data . This method excludes many to many fields .
: param instance : The model instance to get the fields for
: type instance : Model
: return : The list of fields for the given model ( instance )
: rtype : list"""
|
assert isinstance ( instance , Model )
# Check if the Django 1.8 _ meta API is available
use_api = hasattr ( instance . _meta , 'get_fields' ) and callable ( instance . _meta . get_fields )
if use_api :
return [ f for f in instance . _meta . get_fields ( ) if track_field ( f ) ]
return instance . _meta . fields
|
def get_selected_elements_of_core_class ( self , core_element_type ) :
"""Returns all selected elements having the specified ` core _ element _ type ` as state element class
: return : Subset of the selection , only containing elements having ` core _ element _ type ` as state element class
: rtype : set"""
|
if core_element_type is Outcome :
return self . outcomes
elif core_element_type is InputDataPort :
return self . input_data_ports
elif core_element_type is OutputDataPort :
return self . output_data_ports
elif core_element_type is ScopedVariable :
return self . scoped_variables
elif core_element_type is Transition :
return self . transitions
elif core_element_type is DataFlow :
return self . data_flows
elif core_element_type is State :
return self . states
raise RuntimeError ( "Invalid core element type: " + core_element_type )
|
def load_metadata_for_topics ( self , * topics ) :
"""Discover topic metadata and brokers
Afkak internally calls this method whenever metadata is required .
: param str topics :
Topic names to look up . The resulting metadata includes the list of
topic partitions , brokers owning those partitions , and which
partitions are in sync .
Fetching metadata for a topic may trigger auto - creation if that is
enabled on the Kafka broker .
When no topic name is given metadata for * all * topics is fetched .
This is an expensive operation , but it does not trigger topic
creation .
: returns :
: class : ` Deferred ` for the completion of the metadata fetch .
This will fire with ` ` True ` ` on success , ` ` None ` ` on
cancellation , or fail with an exception on error .
On success , topic metadata is available from the attributes of
: class : ` KafkaClient ` : : data : ` ~ KafkaClient . topic _ partitions ` ,
: data : ` ~ KafkaClient . topics _ to _ brokers ` , etc ."""
|
topics = tuple ( _coerce_topic ( t ) for t in topics )
log . debug ( "%r: load_metadata_for_topics(%s)" , self , ', ' . join ( repr ( t ) for t in topics ) )
fetch_all_metadata = not topics
# create the request
requestId = self . _next_id ( )
request = KafkaCodec . encode_metadata_request ( self . _clientIdBytes , requestId , topics )
# Callbacks for the request deferred . . .
def _handleMetadataResponse ( response ) : # Decode the response
brokers , topics = KafkaCodec . decode_metadata_response ( response )
log . debug ( "%r: got metadata brokers=%r topics=%r" , self , brokers , topics )
# If we fetched the metadata for all topics , then store away the
# received metadata for diagnostics .
if fetch_all_metadata :
self . _brokers = brokers
self . _topics = topics
# Iff we were fetching for all topics , and we got at least one
# broker back , then remove brokers when we update our brokers
ok_to_remove = ( fetch_all_metadata and len ( brokers ) )
# Take the metadata we got back , update our self . clients , and
# if needed disconnect or connect from / to old / new brokers
self . _update_brokers ( brokers . values ( ) , remove = ok_to_remove )
# Now loop through all the topics / partitions in the response
# and setup our cache / data - structures
for topic , topic_metadata in topics . items ( ) :
_ , topic_error , partitions = topic_metadata
self . reset_topic_metadata ( topic )
self . topic_errors [ topic ] = topic_error
if not partitions :
log . warning ( 'No partitions for %s, Err:%d' , topic , topic_error )
continue
self . topic_partitions [ topic ] = [ ]
for partition , meta in partitions . items ( ) :
self . topic_partitions [ topic ] . append ( partition )
topic_part = TopicAndPartition ( topic , partition )
self . partition_meta [ topic_part ] = meta
if meta . leader == - 1 :
log . warning ( 'No leader for topic %s partition %s' , topic , partition )
self . topics_to_brokers [ topic_part ] = None
else :
self . topics_to_brokers [ topic_part ] = brokers [ meta . leader ]
self . topic_partitions [ topic ] = sorted ( self . topic_partitions [ topic ] )
return True
def _handleMetadataErr ( err ) : # This should maybe do more cleanup ?
if err . check ( t_CancelledError , CancelledError ) : # Eat the error
# XXX Shouldn ' t this return False ? The success branch
# returns True .
return None
log . error ( "Failed to retrieve metadata:%s" , err )
raise KafkaUnavailableError ( "Unable to load metadata from configured " "hosts: {!r}" . format ( err ) )
# Send the request , add the handlers
d = self . _send_broker_unaware_request ( requestId , request )
d . addCallbacks ( _handleMetadataResponse , _handleMetadataErr )
return d
|
def search_bm25 ( cls , term , weights = None , with_score = False , score_alias = 'score' , explicit_ordering = False ) :
"""Full - text search for selected ` term ` using BM25 algorithm ."""
|
return cls . _search ( term , weights , with_score , score_alias , cls . bm25 , explicit_ordering )
|
def clear ( self ) :
"""Clear the segment .
: return : None"""
|
for _ , frame in self . _segments . items ( ) :
frame . configure ( background = self . _bg_color )
|
def summary ( self ) :
"""Return a string summary of transaction"""
|
return "\n" . join ( [ "Transaction:" , " When: " + self . date . strftime ( "%a %d %b %Y" ) , " Description: " + self . desc . replace ( '\n' , ' ' ) , " For amount: {}" . format ( self . amount ) , " From: {}" . format ( ", " . join ( map ( lambda x : x . account , self . src ) ) if self . src else "UNKNOWN" ) , " To: {}" . format ( ", " . join ( map ( lambda x : x . account , self . dst ) ) if self . dst else "UNKNOWN" ) , "" ] )
|
def get_schema ( brain_or_object ) :
"""Get the schema of the content
: param brain _ or _ object : A single catalog brain or content object
: type brain _ or _ object : ATContentType / DexterityContentType / CatalogBrain
: returns : Schema object"""
|
obj = get_object ( brain_or_object )
if is_portal ( obj ) :
fail ( "get_schema can't return schema of portal root" )
if is_dexterity_content ( obj ) :
pt = get_tool ( "portal_types" )
fti = pt . getTypeInfo ( obj . portal_type )
return fti . lookupSchema ( )
if is_at_content ( obj ) :
return obj . Schema ( )
fail ( "{} has no Schema." . format ( brain_or_object ) )
|
def get_dates ( feed : "Feed" , * , as_date_obj : bool = False ) -> List [ str ] :
"""Return a list of dates for which the given " Feed " is valid , which
could be the empty list if the " Feed " has no calendar information .
Parameters
feed : " Feed "
as _ date _ obj : boolean
If ` ` True ` ` , then return the dates as ` ` datetime . date ` ` objects ;
otherwise return them as strings
Returns
list
Dates"""
|
dates = [ ]
if feed . calendar is not None and not feed . calendar . empty :
if "start_date" in feed . calendar . columns :
dates . append ( feed . calendar [ "start_date" ] . min ( ) )
if "end_date" in feed . calendar . columns :
dates . append ( feed . calendar [ "end_date" ] . max ( ) )
if feed . calendar_dates is not None and not feed . calendar_dates . empty :
if "date" in feed . calendar_dates . columns :
start = feed . calendar_dates [ "date" ] . min ( )
end = feed . calendar_dates [ "date" ] . max ( )
dates . extend ( [ start , end ] )
if not dates :
return [ ]
start_date , end_date = min ( dates ) , max ( dates )
start_date , end_date = map ( hp . datestr_to_date , [ start_date , end_date ] )
num_days = ( end_date - start_date ) . days
result = [ start_date + rd . relativedelta ( days = + d ) for d in range ( num_days + 1 ) ]
# Convert dates back to strings if required
if not as_date_obj :
result = [ hp . datestr_to_date ( x , inverse = True ) for x in result ]
return result
|
def byte_list_to_u32le_list ( data , pad = 0x00 ) :
"""! @ brief Convert a list of bytes to a list of 32 - bit integers ( little endian )
If the length of the data list is not a multiple of 4 , then the pad value is used
for the additional required bytes ."""
|
res = [ ]
for i in range ( len ( data ) // 4 ) :
res . append ( data [ i * 4 + 0 ] | data [ i * 4 + 1 ] << 8 | data [ i * 4 + 2 ] << 16 | data [ i * 4 + 3 ] << 24 )
remainder = ( len ( data ) % 4 )
if remainder != 0 :
padCount = 4 - remainder
res += byte_list_to_u32le_list ( list ( data [ - remainder : ] ) + [ pad ] * padCount )
return res
|
def get_deployment_group ( self , project , deployment_group_id , action_filter = None , expand = None ) :
"""GetDeploymentGroup .
[ Preview API ] Get a deployment group by its ID .
: param str project : Project ID or project name
: param int deployment _ group _ id : ID of the deployment group .
: param str action _ filter : Get the deployment group only if this action can be performed on it .
: param str expand : Include these additional details in the returned object .
: rtype : : class : ` < DeploymentGroup > < azure . devops . v5_0 . task _ agent . models . DeploymentGroup > `"""
|
route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if deployment_group_id is not None :
route_values [ 'deploymentGroupId' ] = self . _serialize . url ( 'deployment_group_id' , deployment_group_id , 'int' )
query_parameters = { }
if action_filter is not None :
query_parameters [ 'actionFilter' ] = self . _serialize . query ( 'action_filter' , action_filter , 'str' )
if expand is not None :
query_parameters [ '$expand' ] = self . _serialize . query ( 'expand' , expand , 'str' )
response = self . _send ( http_method = 'GET' , location_id = '083c4d89-ab35-45af-aa11-7cf66895c53e' , version = '5.0-preview.1' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( 'DeploymentGroup' , response )
|
def send_script_async ( self , conn_id , data , progress_callback , callback ) :
"""Asynchronously send a a script to this IOTile device
Args :
conn _ id ( int ) : A unique identifer that will refer to this connection
data ( bytes ) : the script to send to the device
progress _ callback ( callable ) : A function to be called with status on our progress , called as :
progress _ callback ( done _ count , total _ count )
callback ( callable ) : A callback for when we have finished sending the script . The callback will be called as "
callback ( connection _ id , adapter _ id , success , failure _ reason )
' connection _ id ' : the connection id
' adapter _ id ' : this adapter ' s id
' success ' : a bool indicating whether we received a response to our attempted RPC
' failure _ reason ' : a string with the reason for the failure if success = = False"""
|
found_handle = None
# Find the handle by connection id
for handle , conn in self . _connections . items ( ) :
if conn [ 'connection_id' ] == conn_id :
found_handle = handle
if found_handle is None :
callback ( conn_id , self . id , False , 'Invalid connection_id' )
return
services = self . _connections [ found_handle ] [ 'services' ]
self . _command_task . async_command ( [ '_send_script' , found_handle , services , data , 0 , progress_callback ] , self . _send_script_finished , { 'connection_id' : conn_id , 'callback' : callback } )
|
def main ( ) :
"""Run ftfy as a command - line utility ."""
|
import argparse
parser = argparse . ArgumentParser ( description = "ftfy (fixes text for you), version %s" % __version__ )
parser . add_argument ( 'filename' , default = '-' , nargs = '?' , help = 'The file whose Unicode is to be fixed. Defaults ' 'to -, meaning standard input.' )
parser . add_argument ( '-o' , '--output' , type = str , default = '-' , help = 'The file to output to. Defaults to -, meaning ' 'standard output.' )
parser . add_argument ( '-g' , '--guess' , action = 'store_true' , help = "Ask ftfy to guess the encoding of your input. " "This is risky. Overrides -e." )
parser . add_argument ( '-e' , '--encoding' , type = str , default = 'utf-8' , help = 'The encoding of the input. Defaults to UTF-8.' )
parser . add_argument ( '-n' , '--normalization' , type = str , default = 'NFC' , help = 'The normalization of Unicode to apply. ' 'Defaults to NFC. Can be "none".' )
parser . add_argument ( '--preserve-entities' , action = 'store_true' , help = "Leave HTML entities as they are. The default " "is to decode them, as long as no HTML tags " "have appeared in the file." )
args = parser . parse_args ( )
encoding = args . encoding
if args . guess :
encoding = None
if args . filename == '-' : # Get a standard input stream made of bytes , so we can decode it as
# whatever encoding is necessary .
file = sys . stdin . buffer
else :
file = open ( args . filename , 'rb' )
if args . output == '-' :
outfile = sys . stdout
else :
if os . path . realpath ( args . output ) == os . path . realpath ( args . filename ) :
sys . stderr . write ( SAME_FILE_ERROR_TEXT )
sys . exit ( 1 )
outfile = open ( args . output , 'w' , encoding = 'utf-8' )
normalization = args . normalization
if normalization . lower ( ) == 'none' :
normalization = None
if args . preserve_entities :
fix_entities = False
else :
fix_entities = 'auto'
try :
for line in fix_file ( file , encoding = encoding , fix_entities = fix_entities , normalization = normalization ) :
try :
outfile . write ( line )
except UnicodeEncodeError :
if sys . platform == 'win32' :
sys . stderr . write ( ENCODE_ERROR_TEXT_WINDOWS )
else :
sys . stderr . write ( ENCODE_ERROR_TEXT_UNIX )
sys . exit ( 1 )
except UnicodeDecodeError as err :
sys . stderr . write ( DECODE_ERROR_TEXT % ( encoding , err ) )
sys . exit ( 1 )
|
def get_name_DID_info ( self , name , lastblock = None ) :
"""Given a name , find its DID ( decentralized identifier ) information .
Returns { ' address ' : . . . , ' index ' : . . . }
Returns None if there is no such name"""
|
if lastblock is None :
lastblock = self . lastblock
cur = self . db . cursor ( )
did_info = namedb_get_name_DID_info ( cur , name , lastblock )
if did_info is None :
return None
return did_info
|
def main ( ) :
"""Run check .
anycast - healthchecker is a multi - threaded software and for each
service check it holds a thread . If a thread dies then the service
is not monitored anymore and the route for the IP associated with service
it wont be withdrawn in case service goes down in the meantime ."""
|
arguments = docopt ( __doc__ )
config_file = '/etc/anycast-healthchecker.conf'
config_dir = '/etc/anycast-healthchecker.d'
config = configparser . ConfigParser ( )
config_files = [ config_file ]
config_files . extend ( glob . glob ( os . path . join ( config_dir , '*.conf' ) ) )
config . read ( config_files )
try :
pid = get_processid ( config )
except ValueError as exc :
print ( "UNKNOWN: {e}" . format ( e = exc ) )
sys . exit ( 3 )
else :
process_up = running ( pid )
if not process_up :
print ( "CRITICAL: anycast-healthchecker with pid ({p}) isn't running" . format ( p = pid ) )
sys . exit ( 3 )
services = config . sections ( )
services . remove ( 'daemon' )
if not services :
print ( "UNKNOWN: No service checks are configured" )
sys . exit ( 3 )
enabled_service_checks = parse_services ( config , services )
if enabled_service_checks == 0 :
print ( "OK: Number of service checks is zero, no threads are running" )
sys . exit ( 0 )
else : # parent process plus nummber of threads for each service check
configured_threads = enabled_service_checks + 1
cmd = [ '/bin/ps' , 'h' , '-T' , '-p' , '{n}' . format ( n = pid ) ]
try :
if arguments [ '-v' ] :
print ( "running {}" . format ( ' ' . join ( cmd ) ) )
out = subprocess . check_output ( cmd , timeout = 1 )
except subprocess . CalledProcessError as exc :
print ( "UNKNOWN: running '{c}' failed with return code: {r}" . format ( c = ' ' . join ( cmd ) , r = exc . returncode ) )
sys . exit ( 3 )
except subprocess . TimeoutExpired :
print ( "UNKNOWN: running '{}' timed out" . format ( ' ' . join ( cmd ) ) )
sys . exit ( 3 )
else :
output_lines = out . splitlines ( )
if arguments [ '-v' ] :
for line in output_lines :
print ( line )
running_threads = len ( output_lines )
if running_threads == configured_threads :
print ( "OK: UP (pid={p}) and all threads ({t}) are running" . format ( p = pid , t = configured_threads - 1 ) )
sys . exit ( 0 )
elif running_threads - 1 == 0 : # minus parent process
print ( "CRITICAL: No threads are running OpDocs ANYCAST-03" )
sys . exit ( 2 )
else :
print ( "CRITICAL: Found {n} running threads while configured " "number of threads is {c} OpDocs ANYCAST-03" . format ( n = running_threads - 1 , c = configured_threads - 1 ) )
sys . exit ( 2 )
|
def reconnected ( self , conn ) :
'''Subscribe connection and manipulate its RDY state'''
|
conn . sub ( self . _topic , self . _channel )
conn . rdy ( 1 )
|
def load ( self , loc ) :
'''Load a pickled model .'''
|
try :
w_td_c = pickle . load ( open ( loc , 'rb' ) )
except IOError :
msg = ( "Missing trontagger.pickle file." )
raise MissingCorpusError ( msg )
self . model . weights , self . tagdict , self . classes = w_td_c
self . model . classes = self . classes
return None
|
def authorize ( self , authentication_request , # type : oic . oic . message . AuthorizationRequest
user_id , # type : str
extra_id_token_claims = None # type : Optional [ Union [ Mapping [ str , Union [ str , List [ str ] ] ] , Callable [ [ str , str ] , Mapping [ str , Union [ str , List [ str ] ] ] ] ]
) : # type : ( . . . ) - > oic . oic . message . AuthorizationResponse
"""Creates an Authentication Response for the specified authentication request and local identifier of the
authenticated user ."""
|
custom_sub = self . userinfo [ user_id ] . get ( 'sub' )
if custom_sub :
self . authz_state . subject_identifiers [ user_id ] = { 'public' : custom_sub }
sub = custom_sub
else :
sub = self . _create_subject_identifier ( user_id , authentication_request [ 'client_id' ] , authentication_request [ 'redirect_uri' ] )
self . _check_subject_identifier_matches_requested ( authentication_request , sub )
response = AuthorizationResponse ( )
authz_code = None
if 'code' in authentication_request [ 'response_type' ] :
authz_code = self . authz_state . create_authorization_code ( authentication_request , sub )
response [ 'code' ] = authz_code
access_token_value = None
if 'token' in authentication_request [ 'response_type' ] :
access_token = self . authz_state . create_access_token ( authentication_request , sub )
access_token_value = access_token . value
self . _add_access_token_to_response ( response , access_token )
if 'id_token' in authentication_request [ 'response_type' ] :
if extra_id_token_claims is None :
extra_id_token_claims = { }
elif callable ( extra_id_token_claims ) :
extra_id_token_claims = extra_id_token_claims ( user_id , authentication_request [ 'client_id' ] )
requested_claims = self . _get_requested_claims_in ( authentication_request , 'id_token' )
if len ( authentication_request [ 'response_type' ] ) == 1 : # only id token is issued - > no way of doing userinfo request , so include all claims in ID Token ,
# even those requested by the scope parameter
requested_claims . update ( scope2claims ( authentication_request [ 'scope' ] , extra_scope_dict = self . extra_scopes ) )
user_claims = self . userinfo . get_claims_for ( user_id , requested_claims )
response [ 'id_token' ] = self . _create_signed_id_token ( authentication_request [ 'client_id' ] , sub , user_claims , authentication_request . get ( 'nonce' ) , authz_code , access_token_value , extra_id_token_claims )
logger . debug ( 'issued id_token=%s from requested_claims=%s userinfo=%s extra_claims=%s' , response [ 'id_token' ] , requested_claims , user_claims , extra_id_token_claims )
if 'state' in authentication_request :
response [ 'state' ] = authentication_request [ 'state' ]
return response
|
def water ( target , temperature = 'pore.temperature' , salinity = 'pore.salinity' ) :
r"""Calculates density of pure water or seawater at atmospheric pressure
using Eq . ( 8 ) given by Sharqawy et . al [ 1 ] . Values at temperature higher
than the normal boiling temperature are calculated at the saturation
pressure .
Parameters
target : OpenPNM Object
The object for which these values are being calculated . This
controls the length of the calculated array , and also provides
access to other necessary thermofluid properties .
temperature : string
The dictionary key containing the temperature values . Temperature must
be in Kelvin for this emperical equation to work
salinity : string
The dictionary key containing the salinity values . Salinity must be
expressed in g of salt per kg of solution ( ppt ) .
Returns
The density of water / seawater in [ kg / m3]
Notes
T must be in K , and S in g of salt per kg of phase , or ppt ( parts per
thousand )
VALIDITY : 273 < T < 453 K ; 0 < S < 160 g / kg ;
ACCURACY : 0.1 %
References
[1 ] Sharqawy M . H . , Lienhard J . H . , and Zubair , S . M . , Desalination and
Water Treatment , 2010."""
|
T = target [ temperature ]
if salinity in target . keys ( ) :
S = target [ salinity ]
else :
S = 0
a1 = 9.9992293295E+02
a2 = 2.0341179217E-02
a3 = - 6.1624591598E-03
a4 = 2.2614664708E-05
a5 = - 4.6570659168E-08
b1 = 8.0200240891E-01
b2 = - 2.0005183488E-03
b3 = 1.6771024982E-05
b4 = - 3.0600536746E-08
b5 = - 1.6132224742E-11
TC = T - 273.15
rho_w = a1 + a2 * TC + a3 * TC ** 2 + a4 * TC ** 3 + a5 * TC ** 4
d_rho = b1 * S + b2 * S * TC + b3 * S * ( TC ** 2 ) + b4 * S * ( TC ** 3 ) + b5 * ( S ** 2 ) * ( TC ** 2 )
rho_sw = rho_w + d_rho
value = rho_sw
return value
|
def set_cover_tilt_position ( self , position , channel = None ) :
"""Seek a specific value by specifying a float ( ) from 0.0 to 1.0."""
|
try :
position = float ( position )
except Exception as err :
LOG . debug ( "HelperActorBlindTilt.set_level_2: Exception %s" % ( err , ) )
return False
level = self . getWriteData ( "LEVEL" , channel )
self . writeNodeData ( "LEVEL_2" , position , channel )
# set level after level _ 2 to have level _ 2 updated
self . writeNodeData ( "LEVEL" , level , channel )
|
def start_in_keepedalive_processes ( obj , nb_process ) :
"""Start nb _ process and keep them alive . Send job to them multiple times , then close thems ."""
|
processes = [ ]
readers_pipes = [ ]
writers_pipes = [ ]
for i in range ( nb_process ) : # Start process with Pipes for communicate
local_read_pipe , local_write_pipe = Pipe ( duplex = False )
process_read_pipe , process_write_pipe = Pipe ( duplex = False )
readers_pipes . append ( local_read_pipe )
writers_pipes . append ( process_write_pipe )
p = Process ( target = run_keepedalive_process , args = ( local_write_pipe , process_read_pipe , obj ) )
p . start ( )
processes . append ( p )
# Send to process some job to do
for job in range ( 3 ) :
print ( 'send new job to processes:' )
for process_number in range ( nb_process ) : # Send data to process
writers_pipes [ process_number ] . send ( obj )
reader_useds = [ ]
# Wait response from processes
while readers_pipes :
for r in wait ( readers_pipes ) :
try :
r . recv ( )
except EOFError :
pass
finally :
reader_useds . append ( r )
readers_pipes . remove ( r )
readers_pipes = reader_useds
# Kill processes
for writer_pipe in writers_pipes :
writer_pipe . send ( 'stop' )
|
def write_model_map ( self , model_name , name = None ) :
"""Save the counts model map to a FITS file .
Parameters
model _ name : str
String that will be append to the name of the output file .
name : str
Name of the component .
Returns"""
|
maps = [ c . write_model_map ( model_name , name ) for c in self . components ]
outfile = os . path . join ( self . workdir , 'mcube_%s.fits' % ( model_name ) )
mmap = Map . from_geom ( self . geom )
for m in maps :
mmap . coadd ( m )
mmap . write ( outfile , overwrite = True , conv = 'fgst-ccube' )
return [ mmap ] + maps
|
def add_interaction ( self , u , v , t = None , e = None ) :
"""Add an interaction between u and v at time t vanishing ( optional ) at time e .
The nodes u and v will be automatically added if they are
not already in the graph .
Parameters
u , v : nodes
Nodes can be , for example , strings or numbers .
Nodes must be hashable ( and not None ) Python objects .
t : appearance snapshot id , mandatory
e : vanishing snapshot id , optional ( default = None )
See Also
add _ edges _ from : add a collection of interaction at time t
Notes
Adding an interaction that already exists but with different snapshot id updates the interaction data .
Examples
The following all add the interaction e = ( 1,2 , 0 ) to graph G :
> > > G = dn . DynGraph ( )
> > > G . add _ interaction ( 1 , 2 , 0 ) # explicit two - node form
> > > G . add _ interaction ( [ ( 1,2 ) ] , t = 0 ) # add interaction from iterable container
Specify the vanishing of the interaction
> > > > G . add _ interaction ( 1 , 3 , t = 1 , e = 10)
will produce an interaction present in snapshots [ 0 , 9]"""
|
if t is None :
raise nx . NetworkXError ( "The t argument must be specified." )
if u not in self . _node :
self . _adj [ u ] = self . adjlist_inner_dict_factory ( )
self . _node [ u ] = { }
if v not in self . _node :
self . _adj [ v ] = self . adjlist_inner_dict_factory ( )
self . _node [ v ] = { }
if type ( t ) != list :
t = [ t , t ]
for idt in [ t [ 0 ] ] :
if self . has_edge ( u , v ) and not self . edge_removal :
continue
else :
if idt not in self . time_to_edge :
self . time_to_edge [ idt ] = { ( u , v , "+" ) : None }
else :
if ( u , v , "+" ) not in self . time_to_edge [ idt ] :
self . time_to_edge [ idt ] [ ( u , v , "+" ) ] = None
if e is not None and self . edge_removal :
t [ 1 ] = e - 1
if e not in self . time_to_edge :
self . time_to_edge [ e ] = { ( u , v , "-" ) : None }
else :
self . time_to_edge [ e ] [ ( u , v , "-" ) ] = None
# add the interaction
datadict = self . _adj [ u ] . get ( v , self . edge_attr_dict_factory ( ) )
if 't' in datadict :
app = datadict [ 't' ]
max_end = app [ - 1 ] [ 1 ]
if max_end == app [ - 1 ] [ 0 ] and t [ 0 ] == app [ - 1 ] [ 0 ] + 1 :
app [ - 1 ] = [ app [ - 1 ] [ 0 ] , t [ 1 ] ]
if app [ - 1 ] [ 0 ] + 1 in self . time_to_edge and ( u , v , "+" ) in self . time_to_edge [ app [ - 1 ] [ 0 ] + 1 ] :
del self . time_to_edge [ app [ - 1 ] [ 0 ] + 1 ] [ ( u , v , "+" ) ]
else :
if t [ 0 ] < app [ - 1 ] [ 0 ] :
raise ValueError ( "The specified interaction extension is broader than " "the ones already present for the given nodes." )
if t [ 0 ] <= max_end < t [ 1 ] :
app [ - 1 ] [ 1 ] = t [ 1 ]
if max_end + 1 in self . time_to_edge :
if self . edge_removal :
del self . time_to_edge [ max_end + 1 ] [ ( u , v , "-" ) ]
del self . time_to_edge [ t [ 0 ] ] [ ( u , v , "+" ) ]
elif max_end == t [ 0 ] - 1 :
if max_end + 1 in self . time_to_edge and ( u , v , "+" ) in self . time_to_edge [ max_end + 1 ] :
del self . time_to_edge [ max_end + 1 ] [ ( u , v , "+" ) ]
if self . edge_removal :
if max_end + 1 in self . time_to_edge and ( u , v , '-' ) in self . time_to_edge [ max_end + 1 ] :
del self . time_to_edge [ max_end + 1 ] [ ( u , v , '-' ) ]
if t [ 1 ] + 1 in self . time_to_edge :
self . time_to_edge [ t [ 1 ] + 1 ] [ ( u , v , "-" ) ] = None
else :
self . time_to_edge [ t [ 1 ] + 1 ] = { ( u , v , "-" ) : None }
app [ - 1 ] [ 1 ] = t [ 1 ]
else :
app . append ( t )
else :
datadict [ 't' ] = [ t ]
if e is not None :
span = range ( t [ 0 ] , t [ 1 ] + 1 )
for idt in span :
if idt not in self . snapshots :
self . snapshots [ idt ] = 1
else :
self . snapshots [ idt ] += 1
else :
for idt in t :
if idt is not None :
if idt not in self . snapshots :
self . snapshots [ idt ] = 1
else :
self . snapshots [ idt ] += 1
self . _adj [ u ] [ v ] = datadict
self . _adj [ v ] [ u ] = datadict
|
def match_range ( self , el , condition ) :
"""Match range .
Behavior is modeled after what we see in browsers . Browsers seem to evaluate
if the value is out of range , and if not , it is in range . So a missing value
will not evaluate out of range ; therefore , value is in range . Personally , I
feel like this should evaluate as neither in or out of range ."""
|
out_of_range = False
itype = self . get_attribute_by_name ( el , 'type' ) . lower ( )
mn = self . get_attribute_by_name ( el , 'min' , None )
if mn is not None :
mn = Inputs . parse_value ( itype , mn )
mx = self . get_attribute_by_name ( el , 'max' , None )
if mx is not None :
mx = Inputs . parse_value ( itype , mx )
# There is no valid min or max , so we cannot evaluate a range
if mn is None and mx is None :
return False
value = self . get_attribute_by_name ( el , 'value' , None )
if value is not None :
value = Inputs . parse_value ( itype , value )
if value is not None :
if itype in ( "date" , "datetime-local" , "month" , "week" , "number" , "range" ) :
if mn is not None and value < mn :
out_of_range = True
if not out_of_range and mx is not None and value > mx :
out_of_range = True
elif itype == "time" :
if mn is not None and mx is not None and mn > mx : # Time is periodic , so this is a reversed / discontinuous range
if value < mn and value > mx :
out_of_range = True
else :
if mn is not None and value < mn :
out_of_range = True
if not out_of_range and mx is not None and value > mx :
out_of_range = True
return not out_of_range if condition & ct . SEL_IN_RANGE else out_of_range
|
def chk_genes ( self , study , pop , assoc = None ) :
"""Check gene sets ."""
|
if len ( pop ) < len ( study ) :
exit ( "\nERROR: The study file contains more elements than the population file. " "Please check that the study file is a subset of the population file.\n" )
# check the fraction of genomic ids that overlap between study and population
overlap = self . get_overlap ( study , pop )
if overlap < 0.95 :
sys . stderr . write ( "\nWARNING: only {} fraction of genes/proteins in study are found in " "the population background.\n\n" . format ( overlap ) )
if overlap <= self . args . min_overlap :
exit ( "\nERROR: only {} of genes/proteins in the study are found in the " "background population. Please check.\n" . format ( overlap ) )
# Population and associations
if assoc is not None and pop . isdisjoint ( assoc . keys ( ) ) :
if self . objanno . name == 'gene2go' :
err = ( '**FATAL: NO POPULATION ITEMS SEEN IN THE NCBI gene2go ANNOTATIONS ' 'FOR taxid({T}). TRY: --taxid=<taxid number>' )
exit ( err . format ( T = next ( iter ( self . objanno . taxid2asscs . keys ( ) ) ) ) )
else :
exit ( '**FATAL: NO POPULATION ITEMS SEEN IN THE ANNOTATIONS' )
|
def load_example ( name ) :
"""Load an example problem by name .
Parameters
name : string ( e . g . ' airfoil ' )
Name of the example to load
Notes
Each example is stored in a dictionary with the following keys :
- ' A ' : sparse matrix
- ' B ' : near - nullspace candidates
- ' vertices ' : dense array of nodal coordinates
- ' elements ' : dense array of element indices
Current example names are : % s
Examples
> > > from pyamg . gallery import load _ example
> > > ex = load _ example ( ' knot ' )"""
|
if name not in example_names :
raise ValueError ( 'no example with name (%s)' % name )
else :
return loadmat ( os . path . join ( example_dir , name + '.mat' ) , struct_as_record = True )
|
def _get_all_forums ( self ) :
"""Returns all forums ."""
|
if not hasattr ( self , '_all_forums' ) :
self . _all_forums = list ( Forum . objects . all ( ) )
return self . _all_forums
|
def addDataProducts ( self , dps ) :
"""Adds new data products to listview . dps is a list of DP objects .
Returns True if new ( non - quiet ) DPs are added , or if existing non - quiet dps are updated .
( this usually tells the main window to wake up )"""
|
busy = Purr . BusyIndicator ( )
wakeup = False
# build up list of items to be inserted
itemlist = [ ]
for dp in dps :
item = self . dpitems . get ( dp . sourcepath )
# If item already exists , it needs to be moved to its new position
# If takeTopLevelItem ( ) returns None , then item was already removed ( this shouldn ' t happen ,
# but let ' s be defensive ) , and we make a new one anyway .
if item and self . takeTopLevelItem ( self . indexOfTopLevelItem ( item ) ) :
itemlist . append ( item )
else :
itemlist . append ( self . _makeDPItem ( None , dp ) )
wakeup = wakeup or not ( dp . ignored or dp . quiet )
# if these DPs were added as a result of a drag - and - drop , we need to insert them in FRONT of the dropped - on item
if self . _dropped_on :
index = self . indexOfTopLevelItem ( self . _dropped_on )
# else insert at end ( after = None )
else :
index = self . topLevelItemCount ( )
if itemlist :
self . insertTopLevelItems ( index , itemlist )
self . emit ( SIGNAL ( "updated" ) )
for item in itemlist : # ensure combobox widgets are made
self . _itemComboBox ( item , self . ColAction )
self . _itemComboBox ( item , self . ColRender )
return wakeup
|
def main ( md = None , filename = None , cols = None , theme = None , c_theme = None , bg = None , c_no_guess = None , display_links = None , from_txt = None , do_html = None , no_colors = None , ** kw ) :
"""md is markdown string . alternatively we use filename and read"""
|
args = locals ( )
if not md :
if not filename :
print 'Using sample markdown:'
make_sample ( )
md = args [ 'md' ] = md_sample
print md
print
print 'Styling Result'
else :
with open ( filename ) as f :
md = f . read ( )
global term_columns
# style rolers requested ?
if c_theme == 'all' or theme == 'all' :
args . pop ( 'kw' )
themes = read_themes ( )
for k , v in themes . items ( ) :
if not filename :
yl = 'You like *%s*, *%s*?' % ( k , v [ 'name' ] )
args [ 'md' ] = md_sample . replace ( you_like , yl )
print col ( '%s%s%s' % ( '\n\n' , '=' * term_columns , '\n' ) , L )
# should really create an iterator here :
if theme == 'all' :
args [ 'theme' ] = k
else :
args [ 'c_theme' ] = k
print main ( ** args )
return ''
if cols :
term_columns = int ( cols )
global show_links
show_links = display_links
if bg and bg == 'light' : # not in use rite now :
global background , color
background = BGL
color = T
set_theme ( theme )
global guess_lexer
guess_lexer = not c_no_guess
if not c_theme :
c_theme = theme or 'default'
if c_theme == 'None' :
c_theme = None
if c_theme :
set_theme ( c_theme , for_code = 1 )
if c_theme or c_guess : # info :
if not have_pygments :
print col ( 'No pygments, can not analyze code for hilite' , R )
# Create an instance of the Markdown class with the new extension
MD = markdown . Markdown ( extensions = [ AnsiPrintExtension ( ) , TableExtension ( ) , fenced_code . FencedCodeExtension ( ) ] )
# html ?
the_html = MD . convert ( md )
if do_html :
return the_html
# who wants html , here is our result :
try :
ansi = MD . ansi
except :
if html : # can this happen ? At least show :
print "we have markdown result but no ansi."
print html
else :
ansi = 'n.a. (no parsing result)'
# The RAW html within source , incl . fenced code blocks :
# phs are numbered like this in the md , we replace back :
PH = markdown . util . HTML_PLACEHOLDER
stash = MD . htmlStash
nr = - 1
tags = Tags ( )
for ph in stash . rawHtmlBlocks :
nr += 1
raw = html_parser . unescape ( ph [ 0 ] )
pre = '<pre><code'
if raw . startswith ( pre ) :
pre , raw = raw . split ( pre , 1 )
raw = raw . split ( '>' , 1 ) [ 1 ] . rsplit ( '</code>' , 1 ) [ 0 ]
if 'class="' in pre : # language :
lang = pre . split ( 'class="' , 1 ) [ 1 ] . split ( '"' ) [ 0 ]
else :
lang = ''
raw = tags . code ( raw . strip ( ) , from_fenced_block = 1 , lang = lang )
ansi = ansi . replace ( PH % nr , raw )
# don ' t want these : gone through the extension now :
# ansi = ansi . replace ( ' ` ` ` ' , ' ' )
# sub part display ( the - f feature )
if from_txt :
if not from_txt . split ( ':' , 1 ) [ 0 ] in ansi : # display from top then :
from_txt = ansi . strip ( ) [ 1 ]
from_txt , mon_lines = ( from_txt + ':%s' % ( term_rows - 6 ) ) . split ( ':' ) [ : 2 ]
mon_lines = int ( mon_lines )
pre , post = ansi . split ( from_txt , 1 )
post = '\n' . join ( post . split ( '\n' ) [ : mon_lines ] )
ansi = '\n(...)%s%s%s' % ( '\n' . join ( pre . rsplit ( '\n' , 2 ) [ - 2 : ] ) , from_txt , post )
ansi = set_hr_widths ( ansi ) + '\n'
if no_colors :
return clean_ansi ( ansi )
return ansi + '\n'
|
def ensureiterable ( value , iterable = list , exclude = None ) :
"""Convert a value into an iterable if it is not .
: param object value : object to convert
: param type iterable : iterable type to apply ( default : list )
: param type / tuple exclude : types to not convert
: Example :
> > > ensureiterable ( [ ] )
> > > ensureiterable ( [ ] , iterable = tuple )
> > > ensureiterable ( ' test ' , exclude = str )
[ ' test ' ]
> > > ensureiterable ( ' test ' )
[ ' t ' , ' e ' , ' s ' , ' t ' ]"""
|
result = value
if not isiterable ( value , exclude = exclude ) :
result = [ value ]
result = iterable ( result )
else :
result = iterable ( value )
return result
|
def load ( * fps , missing = Missing . silent ) :
"""Read a ` . Configuration ` instance from file - like objects .
: param fps : file - like objects ( supporting ` ` . read ( ) ` ` )
: param missing : policy to be used when a configured key is missing , either
as a ` . Missing ` instance or a default value
: return : a ` . Configuration ` instance providing values from * fps *
: rtype : ` . Configuration `"""
|
return Configuration ( * ( yaml . safe_load ( fp . read ( ) ) for fp in fps ) , missing = missing )
|
def countRemovedDataProducts ( self ) :
"""Returns number of DPs marked for removal"""
|
return len ( [ item for item , dp in self . wdplv . getItemDPList ( ) if dp . policy == "remove" ] )
|
def mat2euler ( rmat , axes = "sxyz" ) :
"""Converts given rotation matrix to euler angles in radian .
Args :
rmat : 3x3 rotation matrix
axes : One of 24 axis sequences as string or encoded tuple
Returns :
converted euler angles in radian vec3 float"""
|
try :
firstaxis , parity , repetition , frame = _AXES2TUPLE [ axes . lower ( ) ]
except ( AttributeError , KeyError ) :
firstaxis , parity , repetition , frame = axes
i = firstaxis
j = _NEXT_AXIS [ i + parity ]
k = _NEXT_AXIS [ i - parity + 1 ]
M = np . array ( rmat , dtype = np . float32 , copy = False ) [ : 3 , : 3 ]
if repetition :
sy = math . sqrt ( M [ i , j ] * M [ i , j ] + M [ i , k ] * M [ i , k ] )
if sy > EPS :
ax = math . atan2 ( M [ i , j ] , M [ i , k ] )
ay = math . atan2 ( sy , M [ i , i ] )
az = math . atan2 ( M [ j , i ] , - M [ k , i ] )
else :
ax = math . atan2 ( - M [ j , k ] , M [ j , j ] )
ay = math . atan2 ( sy , M [ i , i ] )
az = 0.0
else :
cy = math . sqrt ( M [ i , i ] * M [ i , i ] + M [ j , i ] * M [ j , i ] )
if cy > EPS :
ax = math . atan2 ( M [ k , j ] , M [ k , k ] )
ay = math . atan2 ( - M [ k , i ] , cy )
az = math . atan2 ( M [ j , i ] , M [ i , i ] )
else :
ax = math . atan2 ( - M [ j , k ] , M [ j , j ] )
ay = math . atan2 ( - M [ k , i ] , cy )
az = 0.0
if parity :
ax , ay , az = - ax , - ay , - az
if frame :
ax , az = az , ax
return vec ( ( ax , ay , az ) )
|
def export_csv ( self , path , idx = None , header = None , formatted = False , sort_idx = True , fmt = '%.18e' ) :
"""Export to a csv file
Parameters
path : str
path of the csv file to save
idx : None or array - like , optional
the indices of the variables to export . Export all by default
header : None or array - like , optional
customized header if not ` None ` . Use the names from the lst file
by default
formatted : bool , optional
Use LaTeX - formatted header . Does not apply when using customized
header
sort _ idx : bool , optional
Sort by idx or not , # TODO : implement sort
fmt : str
cell formatter"""
|
if not idx :
idx = self . _idx
if not header :
header = self . get_header ( idx , formatted = formatted )
assert len ( idx ) == len ( header ) , "Idx length does not match header length"
body = self . get_values ( idx )
with open ( path , 'w' ) as fd :
fd . write ( ',' . join ( header ) + '\n' )
np . savetxt ( fd , body , fmt = fmt , delimiter = ',' )
|
def add_region_location ( self , region , locations = None , use_live = True ) : # type : ( str , Optional [ List [ str ] ] , bool ) - > bool
"""Add all countries in a region . If a 3 digit UNStats M49 region code is not provided , value is parsed as a
region name . If any country is already added , it is ignored .
Args :
region ( str ) : M49 region , intermediate region or subregion to add
locations ( Optional [ List [ str ] ] ) : Valid locations list . Defaults to list downloaded from HDX .
use _ live ( bool ) : Try to get use latest country data from web rather than file in package . Defaults to True .
Returns :
bool : True if all countries in region added or False if any already present ."""
|
return self . add_country_locations ( Country . get_countries_in_region ( region , exception = HDXError , use_live = use_live ) , locations = locations )
|
def _dropout_sparse_coo_matrix ( sparse_matrix , rate , min_dropout_rate , max_dropout_rate ) :
"""Drop values from a sparse matrix encoded as a SciPy coo matrix .
Args :
sparse _ matrix : a SciPy coo sparse matrix .
rate : if rate > 0 then non - zero elements of the input matrix
will be droped uniformly at random .
min _ dropout _ rate : minimum value for the dropout rate . If None
FLAGS . min _ dropout _ rate is used . If dropout _ rate is lower than
min _ dropout _ rate it will clipped to min _ dropout _ rate .
max _ dropout _ rate : minimum value for the dropout rate . If None
FLAGS . max _ dropout _ rate is used . If dropout _ rate is greater than
max _ dropout _ rate it will clipped to max _ dropout _ rate .
Returns :
A SciPy coo matrix containing those non zero elements that have not been
dropped out ."""
|
if min_dropout_rate is None :
min_dropout_rate = FLAGS . min_dropout_rate
if max_dropout_rate is None :
max_dropout_rate = FLAGS . max_dropout_rate
if min_dropout_rate > max_dropout_rate :
raise ValueError ( "min_dropout_rate (%f) should be less or equal to " "max_dropout_rate (%f)" % ( min_dropout_rate , max_dropout_rate ) )
max_frac = 1.0 - min_dropout_rate
min_frac = 1.0 - max_dropout_rate
sampling_rate = 1.0 - rate
sampled_fraction = min ( max ( sampling_rate , min_frac ) , max_frac )
if sampled_fraction != sampling_rate :
logging . warning ( "Minimum sampling rate is %2f." , min_frac )
logging . warning ( "Maximum sampling rate is %2f." , max_frac )
logging . warning ( "Desired sampling rate is %2f." , sampling_rate )
logging . warning ( "Desired sampling rate %2f clipped to %2f." , sampling_rate , sampled_fraction )
num_sampled = min ( max ( int ( sparse_matrix . nnz * sampled_fraction ) , 1 ) , sparse_matrix . nnz )
sampled_indices = np . random . choice ( sparse_matrix . nnz , size = num_sampled , replace = False )
return sparse . coo_matrix ( ( sparse_matrix . data [ sampled_indices ] , ( sparse_matrix . row [ sampled_indices ] , sparse_matrix . col [ sampled_indices ] ) ) , shape = sparse_matrix . shape )
|
def isnat ( val ) :
"""Checks if the value is a NaT . Should only be called on datetimelike objects ."""
|
if ( isinstance ( val , ( np . datetime64 , np . timedelta64 ) ) or ( isinstance ( val , np . ndarray ) and val . dtype . kind == 'M' ) ) :
if numpy_version >= '1.13' :
return np . isnat ( val )
else :
return val . view ( 'i8' ) == nat_as_integer
elif pd and val is pd . NaT :
return True
elif pd and isinstance ( val , pandas_datetime_types + pandas_timedelta_types ) :
return pd . isna ( val )
else :
return False
|
def _cleanup_channel ( self , channel_id ) :
"""Remove the the channel from the list of available channels .
: param int channel _ id : Channel id
: return :"""
|
with self . lock :
if channel_id not in self . _channels :
return
del self . _channels [ channel_id ]
|
def _reroot ( self ) :
'''Run the re - rooting algorithm in the Rerooter class .'''
|
rerooter = Rerooter ( )
self . tree = rerooter . reroot_by_tree ( self . reference_tree , self . tree )
|
def move_user ( self , user_id , group_id ) :
"""移动用户分组
详情请参考
http : / / mp . weixin . qq . com / wiki / 0/56d992c605a97245eb7e617854b169fc . html
: param user _ id : 用户 ID , 可以是单个或者列表 , 为列表时为批量移动用户分组
: param group _ id : 分组 ID
: return : 返回的 JSON 数据包
使用示例 : :
from wechatpy import WeChatClient
client = WeChatClient ( ' appid ' , ' secret ' )
res = client . group . move _ user ( ' openid ' , 1234)"""
|
data = { 'to_groupid' : group_id }
if isinstance ( user_id , ( tuple , list ) ) :
endpoint = 'groups/members/batchupdate'
data [ 'openid_list' ] = user_id
else :
endpoint = 'groups/members/update'
data [ 'openid' ] = user_id
return self . _post ( endpoint , data = data )
|
def sphericalAngSep ( ra0 , dec0 , ra1 , dec1 , radians = False ) :
"""Compute the spherical angular separation between two
points on the sky .
/ / Taken from http : / / www . movable - type . co . uk / scripts / gis - faq - 5.1 . html
NB : For small distances you can probably use
sqrt ( dDec * * 2 + cos ^ 2 ( dec ) * dRa )
where dDec = dec1 - dec0 and
dRa = ra1 - ra0
and dec1 \a pprox dec \a pprox dec0"""
|
if radians == False :
ra0 = np . radians ( ra0 )
dec0 = np . radians ( dec0 )
ra1 = np . radians ( ra1 )
dec1 = np . radians ( dec1 )
deltaRa = ra1 - ra0
deltaDec = dec1 - dec0
val = haversine ( deltaDec )
val += np . cos ( dec0 ) * np . cos ( dec1 ) * haversine ( deltaRa )
val = min ( 1 , np . sqrt ( val ) ) ;
# Guard against round off error ?
val = 2 * np . arcsin ( val )
# Convert back to degrees if necessary
if radians == False :
val = np . degrees ( val )
return val
|
def get_handler ( progname , address = None , proto = None , facility = None , fmt = None , datefmt = None , ** _ ) :
"""Helper function to create a Syslog handler .
See ` ulogger . syslog . SyslogHandlerBuilder ` for arguments and
supported keyword arguments .
Returns :
( obj ) : Instance of ` logging . SysLogHandler `"""
|
builder = SyslogHandlerBuilder ( progname , address = address , proto = proto , facility = facility , fmt = fmt , datefmt = datefmt )
return builder . get_handler ( )
|
def dispatch ( self ) :
"""Dispatch http request to registerd commands .
Example : :
slack = Slack ( app )
app . add _ url _ rule ( ' / ' , view _ func = slack . dispatch )"""
|
from flask import request
method = request . method
data = request . args
if method == 'POST' :
data = request . form
token = data . get ( 'token' )
team_id = data . get ( 'team_id' )
command = data . get ( 'command' ) or data . get ( 'trigger_word' )
if isinstance ( command , string_types ) :
command = command . strip ( ) . lstrip ( '/' )
try :
self . validate ( command , token , team_id , method )
except SlackError as e :
return self . response ( e . msg )
func , _ , _ , kwargs = self . _commands [ ( team_id , command ) ]
kwargs . update ( data . to_dict ( ) )
return func ( ** kwargs )
|
def topk ( self , column_name , k = 10 , reverse = False ) :
"""Get top k rows according to the given column . Result is according to and
sorted by ` column _ name ` in the given order ( default is descending ) .
When ` k ` is small , ` topk ` is more efficient than ` sort ` .
Parameters
column _ name : string
The column to sort on
k : int , optional
The number of rows to return
reverse : bool , optional
If True , return the top k rows in ascending order , otherwise , in
descending order .
Returns
out : SFrame
an SFrame containing the top k rows sorted by column _ name .
See Also
sort
Examples
> > > sf = turicreate . SFrame ( { ' id ' : range ( 1000 ) } )
> > > sf [ ' value ' ] = - sf [ ' id ' ]
> > > sf . topk ( ' id ' , k = 3)
| id | value |
| 999 | - 999 |
| 998 | - 998 |
| 997 | - 997 |
[3 rows x 2 columns ]
> > > sf . topk ( ' value ' , k = 3)
| id | value |
| 1 | - 1 |
| 2 | - 2 |
| 3 | - 3 |
[3 rows x 2 columns ]"""
|
if type ( column_name ) is not str :
raise TypeError ( "column_name must be a string" )
sf = self [ self [ column_name ] . is_topk ( k , reverse ) ]
return sf . sort ( column_name , ascending = reverse )
|
def inFootprint ( self , pixels , nside = None ) :
"""Open each valid filename for the set of pixels and determine the set
of subpixels with valid data ."""
|
if np . isscalar ( pixels ) :
pixels = np . array ( [ pixels ] )
if nside is None :
nside = self . nside_likelihood
inside = np . zeros ( len ( pixels ) , dtype = 'bool' )
if not self . nside_catalog :
catalog_pix = [ 0 ]
else :
catalog_pix = superpixel ( pixels , nside , self . nside_catalog )
catalog_pix = np . intersect1d ( catalog_pix , self . catalog_pixels )
for filenames in self . filenames [ catalog_pix ] : # ADW : Need to replace with healpix functions . . .
# logger . debug ( " Loading % s " % filenames [ ' mask _ 1 ' ] )
# subpix _ 1 , val _ 1 = ugali . utils . skymap . readSparseHealpixMap ( filenames [ ' mask _ 1 ' ] , ' MAGLIM ' , construct _ map = False )
_n , subpix_1 , val_1 = read_partial_map ( filenames [ 'mask_1' ] , 'MAGLIM' , fullsky = False )
# logger . debug ( " Loading % s " % filenames [ ' mask _ 2 ' ] )
# subpix _ 2 , val _ 2 = ugali . utils . skymap . readSparseHealpixMap ( filenames [ ' mask _ 2 ' ] , ' MAGLIM ' , construct _ map = False )
_n , subpix_2 , val_2 = read_partial_map ( filenames [ 'mask_2' ] , 'MAGLIM' , fullsky = False )
subpix = np . intersect1d ( subpix_1 , subpix_2 )
superpix = np . unique ( superpixel ( subpix , self . nside_pixel , nside ) )
inside |= np . in1d ( pixels , superpix )
return inside
|
def xsrf_token ( self ) -> bytes :
"""The XSRF - prevention token for the current user / session .
To prevent cross - site request forgery , we set an ' _ xsrf ' cookie
and include the same ' _ xsrf ' value as an argument with all POST
requests . If the two do not match , we reject the form submission
as a potential forgery .
See http : / / en . wikipedia . org / wiki / Cross - site _ request _ forgery
This property is of type ` bytes ` , but it contains only ASCII
characters . If a character string is required , there is no
need to base64 - encode it ; just decode the byte string as
UTF - 8.
. . versionchanged : : 3.2.2
The xsrf token will now be have a random mask applied in every
request , which makes it safe to include the token in pages
that are compressed . See http : / / breachattack . com for more
information on the issue fixed by this change . Old ( version 1)
cookies will be converted to version 2 when this method is called
unless the ` ` xsrf _ cookie _ version ` ` ` Application ` setting is
set to 1.
. . versionchanged : : 4.3
The ` ` xsrf _ cookie _ kwargs ` ` ` Application ` setting may be
used to supply additional cookie options ( which will be
passed directly to ` set _ cookie ` ) . For example ,
` ` xsrf _ cookie _ kwargs = dict ( httponly = True , secure = True ) ` `
will set the ` ` secure ` ` and ` ` httponly ` ` flags on the
` ` _ xsrf ` ` cookie ."""
|
if not hasattr ( self , "_xsrf_token" ) :
version , token , timestamp = self . _get_raw_xsrf_token ( )
output_version = self . settings . get ( "xsrf_cookie_version" , 2 )
cookie_kwargs = self . settings . get ( "xsrf_cookie_kwargs" , { } )
if output_version == 1 :
self . _xsrf_token = binascii . b2a_hex ( token )
elif output_version == 2 :
mask = os . urandom ( 4 )
self . _xsrf_token = b"|" . join ( [ b"2" , binascii . b2a_hex ( mask ) , binascii . b2a_hex ( _websocket_mask ( mask , token ) ) , utf8 ( str ( int ( timestamp ) ) ) , ] )
else :
raise ValueError ( "unknown xsrf cookie version %d" , output_version )
if version is None :
if self . current_user and "expires_days" not in cookie_kwargs :
cookie_kwargs [ "expires_days" ] = 30
self . set_cookie ( "_xsrf" , self . _xsrf_token , ** cookie_kwargs )
return self . _xsrf_token
|
def set ( self , status_item , status ) :
"""sets the status item to the passed in paramaters
args :
status _ item : the name if the item to set
status : boolean value to set the item"""
|
lg = logging . getLogger ( "%s.%s" % ( self . ln , inspect . stack ( ) [ 0 ] [ 3 ] ) )
lg . setLevel ( self . log_level )
sparql = '''
DELETE {{
kdr:{0} kds:{1} ?o
}}
INSERT {{
kdr:{0} kds:{1} "{2}"^^xsd:boolean
}}
WHERE {{
OPTIONAL {{ kdr:{0} kds:{1} ?o }}
}}'''
return self . conn . query ( sparql = sparql . format ( self . group , status_item , str ( status ) . lower ( ) ) , mode = 'update' )
|
def GetAmi ( ec2 , ami_spec ) :
"""Get the boto ami object given a AmiSpecification object ."""
|
images = ec2 . get_all_images ( owners = [ ami_spec . owner_id ] )
requested_image = None
for image in images :
if image . name == ami_spec . ami_name :
requested_image = image
break
return requested_image
|
def query_balance ( self , asset : str , b58_address : str ) -> int :
"""This interface is used to query the account ' s ONT or ONG balance .
: param asset : a string which is used to indicate which asset we want to check the balance .
: param b58 _ address : a base58 encode account address .
: return : account balance ."""
|
raw_address = Address . b58decode ( b58_address ) . to_bytes ( )
contract_address = self . get_asset_address ( asset )
invoke_code = build_native_invoke_code ( contract_address , b'\x00' , "balanceOf" , raw_address )
tx = Transaction ( 0 , 0xd1 , int ( time ( ) ) , 0 , 0 , None , invoke_code , bytearray ( ) , list ( ) )
response = self . __sdk . rpc . send_raw_transaction_pre_exec ( tx )
try :
balance = ContractDataParser . to_int ( response [ 'Result' ] )
return balance
except SDKException :
return 0
|
def cmd ( send , msg , _ ) :
"""Gets a slogan .
Syntax : { command } [ text ]"""
|
if not msg :
msg = textutils . gen_word ( )
send ( textutils . gen_slogan ( msg ) )
|
def find ( max_depth = 3 ) :
"""Returns the path of a Pipfile in parent directories ."""
|
i = 0
for c , d , f in walk_up ( os . getcwd ( ) ) :
i += 1
if i < max_depth :
if 'Pipfile' :
p = os . path . join ( c , 'Pipfile' )
if os . path . isfile ( p ) :
return p
raise RuntimeError ( 'No Pipfile found!' )
|
def get ( self , r ) :
"""Returns precomputed value of the given expression"""
|
if r is None :
return None
if r . lower ( ) == '(sp)' and self . stack :
return self . stack [ - 1 ]
if r [ : 1 ] == '(' :
return self . mem [ r [ 1 : - 1 ] ]
r = r . lower ( )
if is_number ( r ) :
return str ( valnum ( r ) )
if not is_register ( r ) :
return None
return self . regs [ r ]
|
def tachogram ( data , sample_rate , signal = False , in_seconds = False , out_seconds = False ) :
"""Function for generation of ECG Tachogram .
Parameters
data : list
ECG signal or R peak list . When the input is a raw signal the input flag signal should be
True .
sample _ rate : int
Sampling frequency .
signal : boolean
If True , then the data argument contains the set of the ECG acquired samples .
in _ seconds : boolean
If the R peaks list defined as the input argument " data " contains the sample numbers where
the R peaks occur , then in _ seconds needs to be False .
out _ seconds : boolean
If True then each sample of the returned time axis is expressed in seconds .
Returns
out : list , list
List of tachogram samples . List of instants where each cardiac cycle ends ."""
|
if signal is False : # data is a list of R peaks position .
data_copy = data
time_axis = numpy . array ( data )
# . cumsum ( )
if out_seconds is True and in_seconds is False :
time_axis = time_axis / sample_rate
else : # data is a ECG signal .
# Detection of R peaks .
data_copy = detect_r_peaks ( data , sample_rate , time_units = out_seconds , volts = False , resolution = None , plot_result = False ) [ 0 ]
time_axis = data_copy
# Generation of Tachogram .
tachogram_data = numpy . diff ( time_axis )
tachogram_time = time_axis [ 1 : ]
return tachogram_data , tachogram_time
|
def example ( self ) -> str :
"""Same as str ( self ) , except the color codes are actually used ."""
|
if self . rgb_mode :
colorcode = '\033[38;2;{};{};{}m' . format ( * self . rgb )
else :
colorcode = '\033[38;5;{}m' . format ( self . code )
return '{code}{s}\033[0m' . format ( code = colorcode , s = self )
|
def _ensure_session ( self , session = None ) :
"""If provided session is None , lend a temporary session ."""
|
if session :
return session
try : # Don ' t make implicit sessions causally consistent . Applications
# should always opt - in .
return self . __start_session ( True , causal_consistency = False )
except ( ConfigurationError , InvalidOperation ) : # Sessions not supported , or multiple users authenticated .
return None
|
def _execute_after_prepare ( self , host , connection , pool , response ) :
"""Handle the response to our attempt to prepare a statement .
If it succeeded , run the original query again against the same host ."""
|
if pool :
pool . return_connection ( connection )
if self . _final_exception :
return
if isinstance ( response , ResultMessage ) :
if response . kind == RESULT_KIND_PREPARED :
if self . prepared_statement : # result metadata is the only thing that could have
# changed from an alter
( _ , _ , _ , self . prepared_statement . result_metadata , new_metadata_id ) = response . results
if new_metadata_id is not None :
self . prepared_statement . result_metadata_id = new_metadata_id
# use self . _ query to re - use the same host and
# at the same time properly borrow the connection
request_id = self . _query ( host )
if request_id is None : # this host errored out , move on to the next
self . send_request ( )
else :
self . _set_final_exception ( ConnectionException ( "Got unexpected response when preparing statement " "on host %s: %s" % ( host , response ) ) )
elif isinstance ( response , ErrorMessage ) :
if hasattr ( response , 'to_exception' ) :
self . _set_final_exception ( response . to_exception ( ) )
else :
self . _set_final_exception ( response )
elif isinstance ( response , ConnectionException ) :
log . debug ( "Connection error when preparing statement on host %s: %s" , host , response )
# try again on a different host , preparing again if necessary
self . _errors [ host ] = response
self . send_request ( )
else :
self . _set_final_exception ( ConnectionException ( "Got unexpected response type when preparing " "statement on host %s: %s" % ( host , response ) ) )
|
def _get_full_model_smt_script ( self , constraints = ( ) , variables = ( ) ) :
"""Returns a SMT script that declare all the symbols and constraint and checks
their satisfiability ( check - sat )
: param extra - constraints : list of extra constraints that we want to evaluate only
in the scope of this call
: return string : smt - lib representation of the script that checks the satisfiability"""
|
smt_script = '(set-logic ALL)\n'
smt_script += '(set-option :produce-models true)\n'
smt_script += self . _smtlib_exprs ( variables )
smt_script += self . _smtlib_exprs ( constraints )
smt_script += '(check-sat)\n'
smt_script += '(get-model)\n'
return smt_script
|
def describe_table ( cls , db : DATABASE_SUPPORTER_FWD_REF , table : str ) -> List [ List [ Any ] ] :
"""Returns details on a specific table ."""
|
raise RuntimeError ( _MSG_NO_FLAVOUR )
|
def stash ( self , storage , url ) :
"""Stores the uploaded file in a temporary storage location ."""
|
result = { }
if self . is_valid ( ) :
upload = self . cleaned_data [ 'upload' ]
name = storage . save ( upload . name , upload )
result [ 'filename' ] = os . path . basename ( name )
try :
result [ 'url' ] = storage . url ( name )
except NotImplementedError :
result [ 'url' ] = None
result [ 'stored' ] = serialize_upload ( name , storage , url )
return result
|
def get_translations ( self ) :
"""Returns the correct gettext translations that should be used for
this request . This will never fail and return a dummy translation
object if used outside of the request or if a translation cannot be
found ."""
|
ctx = stack . top
if ctx is None :
return NullTranslations ( )
locale = get_locale ( )
cache = self . get_translations_cache ( ctx )
translations = cache . get ( str ( locale ) )
if translations is None :
translations_dir = self . get_translations_path ( ctx )
translations = Translations . load ( translations_dir , locale , domain = self . domain )
# Load plugins translations
if isinstance ( translations , Translations ) : # Load core extensions translations
from wtforms . i18n import messages_path
wtforms_translations = Translations . load ( messages_path ( ) , locale , domain = 'wtforms' )
translations . merge ( wtforms_translations )
import flask_security
flask_security_translations = Translations . load ( join ( flask_security . __path__ [ 0 ] , 'translations' ) , locale , domain = 'flask_security' )
translations . merge ( flask_security_translations )
for pkg in entrypoints . get_roots ( current_app ) :
package = pkgutil . get_loader ( pkg )
path = join ( package . filename , 'translations' )
domains = [ f . replace ( path , '' ) . replace ( '.pot' , '' ) [ 1 : ] for f in iglob ( join ( path , '*.pot' ) ) ]
for domain in domains :
translations . merge ( Translations . load ( path , locale , domain = domain ) )
# Allows the theme to provide or override translations
from . import theme
theme_translations_dir = join ( theme . current . path , 'translations' )
if exists ( theme_translations_dir ) :
domain = theme . current . identifier
theme_translations = Translations . load ( theme_translations_dir , locale , domain = domain )
translations . merge ( theme_translations )
cache [ str ( locale ) ] = translations
return translations
|
def _combine_sets ( self , sets , final_set ) :
"""Given a list of set , combine them to create the final set that will be
used to make the final redis call .
If we have a least a sorted set , use zinterstore insted of sunionstore"""
|
if self . _has_sortedsets :
self . cls . get_connection ( ) . zinterstore ( final_set , list ( sets ) )
else :
final_set = super ( ExtendedCollectionManager , self ) . _combine_sets ( sets , final_set )
return final_set
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.