signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def writeline ( self , line = b'' , sep = b'\n' , echo = None ) :
"""Write a byte sequences to the channel and terminate it with carriage
return and line feed .
Args :
line ( bytes ) : The line to send .
sep ( bytes ) : The separator to use after each line .
echo ( bool ) : Whether to echo the written data to stdout .
Raises :
EOFError : If the channel was closed before all data was sent .""" | self . writelines ( [ line ] , sep , echo ) |
def parse ( url ) :
"""Parse a URL .
> > > parse ( ' http : / / example . com / foo / ' )
URL ( scheme = ' http ' , . . . , domain = ' example ' , tld = ' com ' , . . . , path = ' / foo / ' , . . . )""" | parts = split ( url )
if parts . scheme :
username , password , host , port = split_netloc ( parts . netloc )
subdomain , domain , tld = split_host ( host )
else :
username = password = subdomain = domain = tld = port = ''
return URL ( parts . scheme , username , password , subdomain , domain , tld , port , parts . path , parts . query , parts . fragment , url ) |
def streamer ( frontend , backend ) :
"""Simple push / pull streamer
: param int frontend : fontend zeromq port
: param int backend : backend zeromq port""" | try :
context = zmq . Context ( )
front_pull = context . socket ( zmq . PULL )
front_pull . set_hwm ( 0 )
front_pull . bind ( "tcp://*:%d" % frontend )
back_push = context . socket ( zmq . PUSH )
back_push . bind ( "tcp://*:%d" % backend )
print ( "streamer started, backend on port : %d\tfrontend on port: %d" % ( backend , frontend ) )
zmq . proxy ( front_pull , back_push )
except Exception as e :
print ( e )
finally :
front_pull . close ( )
back_push . close ( )
context . term ( ) |
def load_config ( json_path ) :
"""Load config info from a . json file and return it .""" | with open ( json_path , 'r' ) as json_file :
config = json . loads ( json_file . read ( ) )
# sanity - test the config :
assert ( config [ 'tree' ] [ 0 ] [ 'page' ] == 'index' )
return config |
def load ( self , session_id = None ) :
"""Load the session from the store .
session _ id can be :
- None : load from cookie
- ' ' : create a new cookieless session _ id
- a string which is the session _ id to be used .""" | if session_id is None :
cookie_name = self . _config . cookie_name
self . _data [ "session_id" ] = web . cookies ( ) . get ( cookie_name )
self . _data [ "cookieless" ] = False
else :
if session_id == '' :
self . _data [ "session_id" ] = None
# will be created
else :
self . _data [ "session_id" ] = session_id
self . _data [ "cookieless" ] = True
# protection against session _ id tampering
if self . _data [ "session_id" ] and not self . _valid_session_id ( self . _data [ "session_id" ] ) :
self . _data [ "session_id" ] = None
self . _check_expiry ( )
if self . _data [ "session_id" ] :
d = self . store [ self . _data [ "session_id" ] ]
self . update ( d )
self . _validate_ip ( )
if not self . _data [ "session_id" ] :
self . _data [ "session_id" ] = self . _generate_session_id ( )
if self . _initializer :
if isinstance ( self . _initializer , dict ) :
self . update ( deepcopy ( self . _initializer ) )
elif hasattr ( self . _initializer , '__call__' ) :
self . _initializer ( )
self . _data [ "ip" ] = web . ctx . ip |
def get_media_descriptions_metadata ( self ) :
"""Gets the metadata for all media descriptions .
return : ( osid . Metadata ) - metadata for the media descriptions
* compliance : mandatory - - This method must be implemented . *""" | metadata = dict ( self . _media_descriptions_metadata )
metadata . update ( { 'existing_string_values' : [ t [ 'text' ] for t in self . my_osid_object_form . _my_map [ 'mediaDescriptions' ] ] } )
return Metadata ( ** metadata ) |
def _dfs_preorder ( node , visited ) :
"""Iterate through nodes in DFS pre - order .""" | if node not in visited :
visited . add ( node )
yield node
if node . lo is not None :
yield from _dfs_preorder ( node . lo , visited )
if node . hi is not None :
yield from _dfs_preorder ( node . hi , visited ) |
def mergeTablets ( self , login , tableName , startRow , endRow ) :
"""Parameters :
- login
- tableName
- startRow
- endRow""" | self . send_mergeTablets ( login , tableName , startRow , endRow )
self . recv_mergeTablets ( ) |
def do_join ( self , cmdargs , nick , msgtype , send , c ) :
"""Join a channel .
| Checks if bot is already joined to channel .""" | if not cmdargs :
send ( "Join what?" )
return
if cmdargs == '0' :
send ( "I'm sorry, Dave. I'm afraid I can't do that." )
return
if not cmdargs . startswith ( ( '#' , '+' , '@' ) ) :
cmdargs = '#' + cmdargs
cmd = cmdargs . split ( )
# FIXME : use argparse
if cmd [ 0 ] in self . channels and not ( len ( cmd ) > 1 and cmd [ 1 ] == "force" ) :
send ( "%s is already a member of %s" % ( self . config [ 'core' ] [ 'nick' ] , cmd [ 0 ] ) )
return
c . join ( cmd [ 0 ] )
self . send ( cmd [ 0 ] , nick , "Joined at the request of " + nick , msgtype ) |
def _tag_type_to_explicit_implicit ( params ) :
"""Converts old - style " tag _ type " and " tag " params to " explicit " and " implicit "
: param params :
A dict of parameters to convert from tag _ type / tag to explicit / implicit""" | if 'tag_type' in params :
if params [ 'tag_type' ] == 'explicit' :
params [ 'explicit' ] = ( params . get ( 'class' , 2 ) , params [ 'tag' ] )
elif params [ 'tag_type' ] == 'implicit' :
params [ 'implicit' ] = ( params . get ( 'class' , 2 ) , params [ 'tag' ] )
del params [ 'tag_type' ]
del params [ 'tag' ]
if 'class' in params :
del params [ 'class' ] |
def browse_httpauth_write_apis ( request , database_name = None , collection_name = None ) :
"""Deprecated""" | name = "Write APIs Using HTTPAuth Authentication"
if database_name and collection_name :
wapis = WriteAPIHTTPAuth . objects . filter ( database_name = database_name , collection_name = collection_name )
else :
wapis = WriteAPIHTTPAuth . objects . all ( )
context = { 'name' : name , 'wapis' : wapis , 'database_name' : database_name , 'collection_name' : collection_name }
return render ( request , 'djmongo/console/browse-httpauth-write-apis.html' , context ) |
def parse_partition ( rule ) :
'''Parse the partition line''' | parser = argparse . ArgumentParser ( )
rules = shlex . split ( rule )
rules . pop ( 0 )
parser . add_argument ( 'mntpoint' )
parser . add_argument ( '--size' , dest = 'size' , action = 'store' )
parser . add_argument ( '--grow' , dest = 'grow' , action = 'store_true' )
parser . add_argument ( '--maxsize' , dest = 'maxsize' , action = 'store' )
parser . add_argument ( '--noformat' , dest = 'noformat' , action = 'store_true' )
parser . add_argument ( '--onpart' , '--usepart' , dest = 'onpart' , action = 'store' )
parser . add_argument ( '--ondisk' , '--ondrive' , dest = 'ondisk' , action = 'store' )
parser . add_argument ( '--asprimary' , dest = 'asprimary' , action = 'store_true' )
parser . add_argument ( '--fsprofile' , dest = 'fsprofile' , action = 'store' )
parser . add_argument ( '--fstype' , dest = 'fstype' , action = 'store' )
parser . add_argument ( '--fsoptions' , dest = 'fsoptions' , action = 'store' )
parser . add_argument ( '--label' , dest = 'label' , action = 'store' )
parser . add_argument ( '--recommended' , dest = 'recommended' , action = 'store_true' )
parser . add_argument ( '--onbiosdisk' , dest = 'onbiosdisk' , action = 'store' )
parser . add_argument ( '--encrypted' , dest = 'encrypted' , action = 'store_true' )
parser . add_argument ( '--passphrase' , dest = 'passphrase' , action = 'store' )
parser . add_argument ( '--escrowcert' , dest = 'escrowcert' , action = 'store' )
parser . add_argument ( '--backupphrase' , dest = 'backupphrase' , action = 'store' )
args = clean_args ( vars ( parser . parse_args ( rules ) ) )
parser = None
return args |
def find_lines_wo_child ( config = None , config_path = None , parent_regex = None , child_regex = None , ignore_ws = False , saltenv = 'base' ) :
'''Return a list of parent ` ` ciscoconfparse . IOSCfgLine ` ` lines as text , which
matched the ` ` parent _ regex ` ` and whose children did * not * match ` ` child _ regex ` ` .
Only the parent ` ` ciscoconfparse . IOSCfgLine ` ` text lines will be returned .
For simplicity , this method only finds oldest ancestors without immediate
children that match .
config
The configuration sent as text .
. . note : :
This argument is ignored when ` ` config _ path ` ` is specified .
config _ path
The absolute or remote path to the file with the configuration to be
parsed . This argument supports the usual Salt filesystem URIs , e . g . ,
` ` salt : / / ` ` , ` ` https : / / ` ` , ` ` ftp : / / ` ` , ` ` s3 : / / ` ` , etc .
parent _ regex
The regular expression to match the parent lines against .
child _ regex
The regular expression to match the child lines against .
ignore _ ws : ` ` False ` `
Whether to ignore the white spaces .
saltenv : ` ` base ` `
Salt fileserver environment from which to retrieve the file . This
argument is ignored when ` ` config _ path ` ` is not a ` ` salt : / / ` ` URL .
CLI Example :
. . code - block : : bash
salt ' * ' ciscoconfparse . find _ lines _ wo _ child config _ path = https : / / bit . ly / 2mAdq7z parent _ line = ' line con ' child _ line = ' stopbits ' ''' | lines = find_objects_wo_child ( config = config , config_path = config_path , parent_regex = parent_regex , child_regex = child_regex , ignore_ws = ignore_ws , saltenv = saltenv )
return [ line . text for line in lines ] |
def append_items ( self , items , ** kwargs ) :
"""Method to append data to multiple : class : ` ~ . Item ` objects .
This method differs from the normal : meth : ` append _ multi ` in that
each ` Item ` ' s ` value ` field is updated with the appended data
upon successful completion of the operation .
: param items : The item dictionary . The value for each key should
contain a ` ` fragment ` ` field containing the object to append
to the value on the server .
: type items : : class : ` ~ couchbase . items . ItemOptionDict ` .
The rest of the options are passed verbatim to
: meth : ` append _ multi `
. . seealso : : : meth : ` append _ multi ` , : meth : ` append `""" | rv = self . append_multi ( items , ** kwargs )
# Assume this is an ' ItemOptionDict '
for k , v in items . dict . items ( ) :
if k . success :
k . value += v [ 'fragment' ]
return rv |
def _GetDistinctValues ( self , field_name ) :
"""Query database for unique field types .
Args :
field _ name ( str ) : name of the filed to retrieve .
Returns :
dict [ str , int ] : counts of field types by name .""" | self . _cursor . execute ( 'SELECT {0:s}, COUNT({0:s}) FROM log2timeline GROUP BY {0:s}' . format ( field_name ) )
result = { }
row = self . _cursor . fetchone ( )
while row :
if row [ 0 ] :
result [ row [ 0 ] ] = row [ 1 ]
row = self . _cursor . fetchone ( )
return result |
def _notify_mutated ( self , obj , old , hint = None ) :
'''A method to call when a container is mutated " behind our back "
and we detect it with our | PropertyContainer | wrappers .
Args :
obj ( HasProps ) :
The object who ' s container value was mutated
old ( object ) :
The " old " value of the container
In this case , somewhat weirdly , ` ` old ` ` is a copy and the
new value should already be set unless we change it due to
validation .
hint ( event hint or None , optional )
An optional update event hint , e . g . ` ` ColumnStreamedEvent ` `
( default : None )
Update event hints are usually used at times when better
update performance can be obtained by special - casing in
some way ( e . g . streaming or patching column data sources )
Returns :
None''' | value = self . __get__ ( obj , obj . __class__ )
# re - validate because the contents of ' old ' have changed ,
# in some cases this could give us a new object for the value
value = self . property . prepare_value ( obj , self . name , value )
self . _real_set ( obj , old , value , hint = hint ) |
def default_logging ( grab_log = None , # ' / tmp / grab . log ' ,
network_log = None , # ' / tmp / grab . network . log ' ,
level = logging . DEBUG , mode = 'a' , propagate_network_logger = False ) :
"""Customize logging output to display all log messages
except grab network logs .
Redirect grab network logs into file .""" | logging . basicConfig ( level = level )
network_logger = logging . getLogger ( 'grab.network' )
network_logger . propagate = propagate_network_logger
if network_log :
hdl = logging . FileHandler ( network_log , mode )
network_logger . addHandler ( hdl )
network_logger . setLevel ( level )
grab_logger = logging . getLogger ( 'grab' )
if grab_log :
hdl = logging . FileHandler ( grab_log , mode )
grab_logger . addHandler ( hdl )
grab_logger . setLevel ( level ) |
def get_ceph_nodes ( relation = 'ceph' ) :
"""Query named relation to determine current nodes .""" | hosts = [ ]
for r_id in relation_ids ( relation ) :
for unit in related_units ( r_id ) :
hosts . append ( relation_get ( 'private-address' , unit = unit , rid = r_id ) )
return hosts |
def _compute_diff ( configured , expected ) :
'''Computes the differences between the actual config and the expected config''' | diff = { 'add' : { } , 'update' : { } , 'remove' : { } }
configured_users = set ( configured . keys ( ) )
expected_users = set ( expected . keys ( ) )
add_usernames = expected_users - configured_users
remove_usernames = configured_users - expected_users
common_usernames = expected_users & configured_users
add = dict ( ( username , expected . get ( username ) ) for username in add_usernames )
remove = dict ( ( username , configured . get ( username ) ) for username in remove_usernames )
update = { }
for username in common_usernames :
user_configuration = configured . get ( username )
user_expected = expected . get ( username )
if user_configuration == user_expected :
continue
update [ username ] = { }
for field , field_value in six . iteritems ( user_expected ) :
if user_configuration . get ( field ) != field_value :
update [ username ] [ field ] = field_value
diff . update ( { 'add' : add , 'update' : update , 'remove' : remove } )
return diff |
def getbit ( self , name , offset ) :
"""Returns a boolean indicating the value of ` ` offset ` ` in ` ` name ` `
Like * * Redis . GETBIT * *
: param string name : the key name
: param int offset : the bit position
: param bool val : the bit value
: return : the bit at the ` ` offset ` ` , ` ` False ` ` if key doesn ' t exist or
offset exceeds the string length .
: rtype : bool
> > > ssdb . set ( ' bit _ test ' , 1)
True
> > > ssdb . getbit ( ' bit _ test ' , 0)
True
> > > ssdb . getbit ( ' bit _ test ' , 1)
False""" | offset = get_positive_integer ( 'offset' , offset )
return self . execute_command ( 'getbit' , name , offset ) |
def Rohsenow ( rhol , rhog , mul , kl , Cpl , Hvap , sigma , Te = None , q = None , Csf = 0.013 , n = 1.7 ) :
r'''Calculates heat transfer coefficient for a evaporator operating
in the nucleate boiling regime according to [ 2 ] _ as presented in [ 1 ] _ .
Either heat flux or excess temperature is required .
With ` Te ` specified :
. . math : :
h = { { \ mu } _ { L } } \ Delta H _ { vap } \ left [ \ frac { g ( \ rho _ L - \ rho _ v ) }
{ \ sigma } \ right ] ^ { 0.5 } \ left [ \ frac { C _ { p , L } \ Delta T _ e ^ { 2/3 } } { C _ { sf }
\ Delta H _ { vap } Pr _ L ^ n } \ right ] ^ 3
With ` q ` specified :
. . math : :
h = \ left ( { { \ mu } _ { L } } \ Delta H _ { vap } \ left [ \ frac { g ( \ rho _ L - \ rho _ v ) }
{ \ sigma } \ right ] ^ { 0.5 } \ left [ \ frac { C _ { p , L } \ Delta T _ e ^ { 2/3 } } { C _ { sf }
\ Delta H _ { vap } Pr _ L ^ n } \ right ] ^ 3 \ right ) ^ { 1/3 } q ^ { 2/3}
Parameters
rhol : float
Density of the liquid [ kg / m ^ 3]
rhog : float
Density of the produced gas [ kg / m ^ 3]
mul : float
Viscosity of liquid [ Pa * s ]
kl : float
Thermal conductivity of liquid [ W / m / K ]
Cpl : float
Heat capacity of liquid [ J / kg / K ]
Hvap : float
Heat of vaporization of the fluid at P , [ J / kg ]
sigma : float
Surface tension of liquid [ N / m ]
Te : float , optional
Excess wall temperature , [ K ]
q : float , optional
Heat flux , [ W / m ^ 2]
Csf : float
Rohsenow coefficient specific to fluid and metal [ - ]
n : float
Constant , 1 for water , 1.7 ( default ) for other fluids usually [ - ]
Returns
h : float
Heat transfer coefficient [ W / m ^ 2 / K ]
Notes
No further work is required on this correlation . Multiple sources confirm
its form and rearrangement .
Examples
h for water at atmospheric pressure on oxidized aluminum .
> > > Rohsenow ( rhol = 957.854 , rhog = 0.595593 , mul = 2.79E - 4 , kl = 0.680 , Cpl = 4217,
. . . Hvap = 2.257E6 , sigma = 0.0589 , Te = 4.9 , Csf = 0.011 , n = 1.26)
3723.655267067467
References
. . [ 1 ] Cao , Eduardo . Heat Transfer in Process Engineering .
McGraw Hill Professional , 2009.
. . [ 2 ] Rohsenow , Warren M . " A Method of Correlating Heat Transfer Data for
Surface Boiling of Liquids . " Technical Report . Cambridge , Mass . : M . I . T .
Division of Industrial Cooporation , 1951''' | if Te :
return mul * Hvap * ( g * ( rhol - rhog ) / sigma ) ** 0.5 * ( Cpl * Te ** ( 2 / 3. ) / Csf / Hvap / ( Cpl * mul / kl ) ** n ) ** 3
elif q :
A = mul * Hvap * ( g * ( rhol - rhog ) / sigma ) ** 0.5 * ( Cpl / Csf / Hvap / ( Cpl * mul / kl ) ** n ) ** 3
return A ** ( 1 / 3. ) * q ** ( 2 / 3. )
else :
raise Exception ( 'Either q or Te is needed for this correlation' ) |
def start ( self , * args , ** kw ) :
"""Start the daemon .""" | pid = None
if os . path . exists ( self . pidfile ) :
with open ( self . pidfile , 'r' ) as fp :
pid = int ( fp . read ( ) . strip ( ) )
if pid :
msg = 'pidfile (%s) exists. Daemon already running?\n'
sys . stderr . write ( msg % self . pidfile )
sys . exit ( 1 )
self . daemonize ( )
self . run ( * args , ** kw ) |
def gpio_properties ( self ) :
"""Returns the properties of the user - controllable GPIOs .
Provided the device supports user - controllable GPIOs , they will be
returned by this method .
Args :
self ( JLink ) : the ` ` JLink ` ` instance
Returns :
A list of ` ` JLinkGPIODescriptor ` ` instances totalling the number of
requested properties .
Raises :
JLinkException : on error .""" | res = self . _dll . JLINK_EMU_GPIO_GetProps ( 0 , 0 )
if res < 0 :
raise errors . JLinkException ( res )
num_props = res
buf = ( structs . JLinkGPIODescriptor * num_props ) ( )
res = self . _dll . JLINK_EMU_GPIO_GetProps ( ctypes . byref ( buf ) , num_props )
if res < 0 :
raise errors . JLinkException ( res )
return list ( buf ) |
def _get_node ( template , context , name ) :
'''taken originally from
http : / / stackoverflow . com / questions / 2687173 / django - how - can - i - get - a - block - from - a - template''' | for node in template :
if isinstance ( node , BlockNode ) and node . name == name :
return node . nodelist . render ( context )
elif isinstance ( node , ExtendsNode ) :
return _get_node ( node . nodelist , context , name )
# raise Exception ( " Node ' % s ' could not be found in template . " % name )
return "" |
def register_view ( self , view ) :
"""Called when the View was registered""" | super ( MenuBarController , self ) . register_view ( view )
data_flow_mode = global_runtime_config . get_config_value ( "DATA_FLOW_MODE" , False )
view [ "data_flow_mode" ] . set_active ( data_flow_mode )
show_data_flows = global_runtime_config . get_config_value ( "SHOW_DATA_FLOWS" , True )
view [ "show_data_flows" ] . set_active ( show_data_flows )
show_data_values = global_runtime_config . get_config_value ( "SHOW_DATA_FLOW_VALUE_LABELS" , True )
view [ "show_data_values" ] . set_active ( show_data_values )
show_aborted_preempted = global_runtime_config . get_config_value ( "SHOW_ABORTED_PREEMPTED" , False )
view [ "show_aborted_preempted" ] . set_active ( show_aborted_preempted )
view [ "expert_view" ] . hide ( )
view [ "grid" ] . hide ( )
# use dedicated function to connect the buttons to be able to access the handler id later on
self . connect_button_to_function ( 'new' , 'activate' , self . on_new_activate )
self . connect_button_to_function ( 'open' , 'activate' , self . on_open_activate )
self . connect_button_to_function ( 'save' , 'activate' , self . on_save_activate )
self . connect_button_to_function ( 'save_as' , 'activate' , self . on_save_as_activate )
self . connect_button_to_function ( 'save_as_copy' , 'activate' , self . on_save_as_copy_activate )
self . connect_button_to_function ( 'menu_preferences' , 'activate' , self . on_menu_preferences_activate )
self . connect_button_to_function ( 'refresh_all' , 'activate' , self . on_refresh_all_activate )
self . connect_button_to_function ( 'refresh_libraries' , 'activate' , self . on_refresh_libraries_activate )
self . connect_button_to_function ( 'bake_state_machine' , 'activate' , self . on_bake_state_machine_activate )
self . connect_button_to_function ( 'quit' , 'activate' , self . on_quit_activate )
self . connect_button_to_function ( 'cut' , 'activate' , self . on_cut_selection_activate )
self . connect_button_to_function ( 'copy' , 'activate' , self . on_copy_selection_activate )
self . connect_button_to_function ( 'paste' , 'activate' , self . on_paste_clipboard_activate )
self . connect_button_to_function ( 'delete' , 'activate' , self . on_delete_activate )
self . connect_button_to_function ( 'is_start_state' , 'activate' , self . on_toggle_is_start_state_active )
self . connect_button_to_function ( 'add' , 'activate' , self . on_add_state_activate )
self . connect_button_to_function ( 'group' , 'activate' , self . on_group_states_activate )
self . connect_button_to_function ( 'ungroup' , 'activate' , self . on_ungroup_state_activate )
self . connect_button_to_function ( 'substitute_state' , 'activate' , self . on_substitute_selected_state_activate )
self . connect_button_to_function ( 'save_state_as' , 'activate' , self . on_save_selected_state_as_activate )
self . connect_button_to_function ( 'undo' , 'activate' , self . on_undo_activate )
self . connect_button_to_function ( 'redo' , 'activate' , self . on_redo_activate )
self . connect_button_to_function ( 'grid' , 'activate' , self . on_grid_toggled )
self . connect_button_to_function ( 'data_flow_mode' , 'toggled' , self . on_data_flow_mode_toggled )
self . connect_button_to_function ( 'show_data_flows' , 'toggled' , self . on_show_data_flows_toggled )
self . connect_button_to_function ( 'show_data_values' , 'toggled' , self . on_show_data_values_toggled )
self . connect_button_to_function ( 'show_aborted_preempted' , 'toggled' , self . on_show_aborted_preempted_toggled )
self . connect_button_to_function ( 'expert_view' , 'activate' , self . on_expert_view_activate )
self . connect_button_to_function ( 'full_screen' , 'toggled' , self . on_full_screen_mode_toggled )
self . connect_button_to_function ( 'start' , 'activate' , self . on_start_activate )
self . connect_button_to_function ( 'start_from_selected' , 'activate' , self . on_start_from_selected_state_activate )
self . connect_button_to_function ( 'run_to_selected' , 'activate' , self . on_run_to_selected_state_activate )
self . connect_button_to_function ( 'pause' , 'activate' , self . on_pause_activate )
self . connect_button_to_function ( 'stop' , 'activate' , self . on_stop_activate )
self . connect_button_to_function ( 'step_mode' , 'activate' , self . on_step_mode_activate )
self . connect_button_to_function ( 'step_into' , 'activate' , self . on_step_into_activate )
self . connect_button_to_function ( 'step_over' , 'activate' , self . on_step_over_activate )
self . connect_button_to_function ( 'step_out' , 'activate' , self . on_step_out_activate )
self . connect_button_to_function ( 'backward_step' , 'activate' , self . on_backward_step_activate )
self . connect_button_to_function ( 'about' , 'activate' , self . on_about_activate )
self . full_screen_window . connect ( 'key_press_event' , self . on_escape_key_press_event_leave_full_screen )
self . view [ 'menu_edit' ] . connect ( 'select' , self . check_edit_menu_items_status )
self . registered_view = True
self . _update_recently_opened_state_machines ( )
# do not move next line - here to show warning in GUI debug console
self . create_logger_warning_if_shortcuts_are_overwritten_by_menu_bar ( ) |
def to_dict ( self ) -> Dict :
"""Export the CAG to a dict that can be serialized to JSON .""" | return { "name" : self . name , "dateCreated" : str ( self . dateCreated ) , "variables" : lmap ( lambda n : self . export_node ( n ) , self . nodes ( data = True ) ) , "timeStep" : str ( self . Δt ) , "edge_data" : lmap ( export_edge , self . edges ( data = True ) ) , } |
def _Open ( self , path_spec , mode = 'rb' ) :
"""Opens the file system defined by path specification .
Args :
path _ spec ( PathSpec ) : a path specification .
mode ( Optional [ str ] ) ) : file access mode . The default is ' rb ' read - only
binary .
Raises :
AccessError : if the access to open the file was denied .
IOError : if the file system object could not be opened .
PathSpecError : if the path specification is incorrect .
ValueError : if the path specification is invalid .""" | if not path_spec . HasParent ( ) :
raise errors . PathSpecError ( 'Unsupported path specification without parent.' )
file_object = resolver . Resolver . OpenFileObject ( path_spec . parent , resolver_context = self . _resolver_context )
try :
fsapfs_container = pyfsapfs . container ( )
fsapfs_container . open_file_object ( file_object )
except :
file_object . close ( )
raise
self . _file_object = file_object
self . _fsapfs_container = fsapfs_container |
def set_param ( self , idx ) :
"""Adds the parameter to the conversion modifier .
: param idx : Provides the ending index of the parameter string .""" | self . modifier . set_param ( self . format [ self . param_begin : idx ] ) |
def node_path ( self , node ) :
"""Return two lists describing the path from this node to another
Parameters
node : instance of Node
The other node .
Returns
p1 : list
First path ( see below ) .
p2 : list
Second path ( see below ) .
Notes
The first list starts with this node and ends with the common parent
between the endpoint nodes . The second list contains the remainder of
the path from the common parent to the specified ending node .
For example , consider the following scenegraph : :
A - - - B - - - C - - - D
- - - E - - - F
Calling ` D . node _ path ( F ) ` will return : :
( [ D , C , B ] , [ E , F ] )""" | p1 = self . parent_chain ( )
p2 = node . parent_chain ( )
cp = None
for p in p1 :
if p in p2 :
cp = p
break
if cp is None :
raise RuntimeError ( "No single-path common parent between nodes %s " "and %s." % ( self , node ) )
p1 = p1 [ : p1 . index ( cp ) + 1 ]
p2 = p2 [ : p2 . index ( cp ) ] [ : : - 1 ]
return p1 , p2 |
def _get_path ( self , file ) :
"""Creates the cache directory if it doesn ' t already exist . Returns the
full path to the specified file inside the cache directory .""" | dir = self . _cache_directory ( )
if not os . path . exists ( dir ) :
os . makedirs ( dir )
return os . path . join ( dir , file ) |
def load_or_create_client_key ( pem_path ) :
"""Load the client key from a directory , creating it if it does not exist .
. . note : : The client key that will be created will be a 2048 - bit RSA key .
: type pem _ path : ` ` twisted . python . filepath . FilePath ` `
: param pem _ path : The certificate directory
to use , as with the endpoint .""" | acme_key_file = pem_path . asTextMode ( ) . child ( u'client.key' )
if acme_key_file . exists ( ) :
key = serialization . load_pem_private_key ( acme_key_file . getContent ( ) , password = None , backend = default_backend ( ) )
else :
key = generate_private_key ( u'rsa' )
acme_key_file . setContent ( key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = serialization . NoEncryption ( ) ) )
return JWKRSA ( key = key ) |
def parsePoint ( line ) :
"""Parse a line of text into an MLlib LabeledPoint object .""" | values = [ float ( s ) for s in line . split ( ' ' ) ]
if values [ 0 ] == - 1 : # Convert - 1 labels to 0 for MLlib
values [ 0 ] = 0
return LabeledPoint ( values [ 0 ] , values [ 1 : ] ) |
def root ( self : BoardT ) -> BoardT :
"""Returns a copy of the root position .""" | if self . _stack :
board = type ( self ) ( None , chess960 = self . chess960 )
self . _stack [ 0 ] . restore ( board )
return board
else :
return self . copy ( stack = False ) |
def generate ( env ) :
"""Add Builders and construction variables for Visual Age linker to
an Environment .""" | link . generate ( env )
env [ 'SMARTLINKFLAGS' ] = smart_linkflags
env [ 'LINKFLAGS' ] = SCons . Util . CLVar ( '$SMARTLINKFLAGS' )
env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -qmkshrobj -qsuppress=1501-218' )
env [ 'SHLIBSUFFIX' ] = '.a' |
def decompress ( self , image_path , quiet = True ) :
'''decompress will ( properly ) decompress an image''' | if not os . path . exists ( image_path ) :
bot . exit ( "Cannot find image %s" % image_path )
extracted_file = image_path . replace ( '.gz' , '' )
cmd = [ 'gzip' , '-d' , '-f' , image_path ]
result = self . run_command ( cmd , quiet = quiet )
# exits if return code ! = 0
return extracted_file |
def long_to_bytes ( n , blocksize = 0 ) :
"""long _ to _ bytes ( n : long , blocksize : int ) : string
Convert a long integer to a byte string .
If optional blocksize is given and greater than zero , pad the front of the
byte string with binary zeros so that the length is a multiple of
blocksize .""" | # after much testing , this algorithm was deemed to be the fastest
s = b''
if USING_PYTHON2 :
n = long ( n )
# noqa
pack = struct . pack
while n > 0 :
s = pack ( b'>I' , n & 0xffffffff ) + s
n = n >> 32
# strip off leading zeros
for i in range ( len ( s ) ) :
if s [ i ] != b'\000' [ 0 ] :
break
else : # only happens when n = = 0
s = b'\000'
i = 0
s = s [ i : ]
# add back some pad bytes . this could be done more efficiently w . r . t . the
# de - padding being done above , but sigh . . .
if blocksize > 0 and len ( s ) % blocksize :
s = ( blocksize - len ( s ) % blocksize ) * b'\000' + s
return s |
def getScoreProperties ( self ) :
"""Returns the accidental dignity score of the object
as dict .""" | obj = self . obj
score = { }
# Peregrine
isPeregrine = essential . isPeregrine ( obj . id , obj . sign , obj . signlon )
score [ 'peregrine' ] = - 5 if isPeregrine else 0
# Ruler - Ruler and Exalt - Exalt mutual receptions
mr = self . eqMutualReceptions ( )
score [ 'mr_ruler' ] = + 5 if 'ruler' in mr else 0
score [ 'mr_exalt' ] = + 4 if 'exalt' in mr else 0
# House scores
score [ 'house' ] = self . houseScore ( )
# Joys
score [ 'joy_sign' ] = + 3 if self . inSignJoy ( ) else 0
score [ 'joy_house' ] = + 2 if self . inHouseJoy ( ) else 0
# Relations with sun
score [ 'cazimi' ] = + 5 if self . isCazimi ( ) else 0
score [ 'combust' ] = - 6 if self . isCombust ( ) else 0
score [ 'under_sun' ] = - 4 if self . isUnderSun ( ) else 0
score [ 'no_under_sun' ] = 0
if obj . id != const . SUN and not self . sunRelation ( ) :
score [ 'no_under_sun' ] = + 5
# Light
score [ 'light' ] = 0
if obj . id != const . SUN :
score [ 'light' ] = + 1 if self . isAugmentingLight ( ) else - 1
# Orientality
score [ 'orientality' ] = 0
if obj . id in [ const . SATURN , const . JUPITER , const . MARS ] :
score [ 'orientality' ] = + 2 if self . isOriental ( ) else - 2
elif obj . id in [ const . VENUS , const . MERCURY , const . MOON ] :
score [ 'orientality' ] = - 2 if self . isOriental ( ) else + 2
# Moon nodes
score [ 'north_node' ] = - 3 if self . isConjNorthNode ( ) else 0
score [ 'south_node' ] = - 5 if self . isConjSouthNode ( ) else 0
# Direction and speed
score [ 'direction' ] = 0
if obj . id not in [ const . SUN , const . MOON ] :
score [ 'direction' ] = + 4 if obj . isDirect ( ) else - 5
score [ 'speed' ] = + 2 if obj . isFast ( ) else - 2
# Aspects to benefics
aspBen = self . aspectBenefics ( )
score [ 'benefic_asp0' ] = + 5 if const . CONJUNCTION in aspBen else 0
score [ 'benefic_asp120' ] = + 4 if const . TRINE in aspBen else 0
score [ 'benefic_asp60' ] = + 3 if const . SEXTILE in aspBen else 0
# Aspects to malefics
aspMal = self . aspectMalefics ( )
score [ 'malefic_asp0' ] = - 5 if const . CONJUNCTION in aspMal else 0
score [ 'malefic_asp180' ] = - 4 if const . OPPOSITION in aspMal else 0
score [ 'malefic_asp90' ] = - 3 if const . SQUARE in aspMal else 0
# Auxily and Surround
score [ 'auxilied' ] = + 5 if self . isAuxilied ( ) else 0
score [ 'surround' ] = - 5 if self . isSurrounded ( ) else 0
# Voc and Feral
score [ 'feral' ] = - 3 if self . isFeral ( ) else 0
score [ 'void' ] = - 2 if ( self . isVoc ( ) and score [ 'feral' ] == 0 ) else 0
# Haiz
haiz = self . haiz ( )
score [ 'haiz' ] = 0
if haiz == HAIZ :
score [ 'haiz' ] = + 3
elif haiz == CHAIZ :
score [ 'haiz' ] = - 2
# Moon via combusta
score [ 'viacombusta' ] = 0
if obj . id == const . MOON and viaCombusta ( obj ) :
score [ 'viacombusta' ] = - 2
return score |
def create_title ( article , language , title , slug = None , description = None , page_title = None , menu_title = None , meta_description = None , creation_date = None , image = None ) :
"""Create an article title .""" | # validate article
assert isinstance ( article , Article )
# validate language :
assert language in get_language_list ( article . tree . node . site_id )
# validate creation date
if creation_date :
assert isinstance ( creation_date , datetime . date )
# set default slug :
if not slug :
slug = settings . CMS_ARTICLES_SLUG_FORMAT . format ( now = creation_date or now ( ) , slug = slugify ( title ) , )
# find unused slug :
base_slug = slug
qs = Title . objects . filter ( language = language )
used_slugs = list ( s for s in qs . values_list ( 'slug' , flat = True ) if s . startswith ( base_slug ) )
i = 1
while slug in used_slugs :
slug = '%s-%s' % ( base_slug , i )
i += 1
# create title
title = Title . objects . create ( article = article , language = language , title = title , slug = slug , description = description , page_title = page_title , menu_title = menu_title , meta_description = meta_description , image = image , )
return title |
def _set_launcher_property ( self , driver_arg_key , spark_property_key ) :
"""Handler for a special property that exists in both the launcher arguments and the spark conf dictionary .
This will use the launcher argument if set falling back to the spark conf argument . If neither are set this is
a noop ( which means that the standard spark defaults will be used ) .
Since ` spark . driver . memory ` ( eg ) can be set erroneously by a user on the standard spark conf , we want to be able
to use that value if present . If we do not have this fall - back behavior then these settings are IGNORED when
starting up the spark driver JVM under client mode ( standalone , local , yarn - client or mesos - client ) .
Parameters
driver _ arg _ key : string
Eg : " driver - memory "
spark _ property _ key : string
Eg : " spark . driver . memory " """ | value = self . _spark_launcher_args . get ( driver_arg_key , self . conf . _conf_dict . get ( spark_property_key ) )
if value :
self . _spark_launcher_args [ driver_arg_key ] = value
self . conf [ spark_property_key ] = value |
def FIELD_DECL ( self , cursor ) :
"""Handles Field declarations .
Some specific treatment for a bitfield .""" | # name , type
parent = cursor . semantic_parent
# field name :
# either its cursor . spelling or it is an anonymous field
# we do NOT rely on get _ unique _ name for a Field name .
# Anonymous Field :
# We have to create a name
# it will be the indice of the field ( _ 0 , _ 1 , . . . )
# offset of field :
# we will need it late . get the offset of the field in the record
name = cursor . spelling
# after dealing with anon bitfields , it could happen . an unnammed bitfield member is not is _ anonymous ( )
if cursor . is_anonymous ( ) or ( name == '' and cursor . is_bitfield ( ) ) : # get offset by iterating all fields of parent
# corner case for anonymous fields
# if offset = = - 5 : use field . get _ offset _ of ( )
offset = cursor . get_field_offsetof ( )
prev = fieldnum = - 1
for i , _f in enumerate ( parent . type . get_fields ( ) ) :
if _f == cursor :
fieldnum = i
break
prev = _f
# make a name
if fieldnum == - 1 :
raise ValueError ( "Anonymous field was not found in get_fields()" )
name = "_%d" % fieldnum
log . debug ( "FIELD_DECL: anonymous field renamed to %s" , name )
else :
offset = parent . type . get_offset ( name )
# some debug
if offset < 0 :
log . error ( 'FIELD_DECL: BAD RECORD, Bad offset: %d for %s' , offset , name )
# incomplete record definition , gives us an error here on fields .
# BUG clang bindings ?
# FIXME if c + + class ?
log . debug ( 'FIELD_DECL: field offset is %d' , offset )
# bitfield checks
bits = None
if cursor . is_bitfield ( ) :
log . debug ( 'FIELD_DECL: field is part of a bitfield' )
bits = cursor . get_bitfield_width ( )
else :
bits = cursor . type . get_size ( ) * 8
if bits < 0 :
log . warning ( 'Bad source code, bitsize == %d <0 on %s' , bits , name )
bits = 0
log . debug ( 'FIELD_DECL: field is %d bits' , bits )
# try to get a representation of the type
# _ canonical _ type = cursor . type . get _ canonical ( )
# t - t - t - t -
_type = None
_canonical_type = cursor . type . get_canonical ( )
_decl = cursor . type . get_declaration ( )
if ( self . is_array_type ( _canonical_type ) or self . is_fundamental_type ( _canonical_type ) or self . is_pointer_type ( _canonical_type ) ) :
_type = self . parse_cursor_type ( _canonical_type )
else :
children = list ( cursor . get_children ( ) )
log . debug ( 'FIELD_DECL: we now look for the declaration name.' 'kind %s' , _decl . kind )
if len ( children ) > 0 and _decl . kind == CursorKind . NO_DECL_FOUND : # constantarray of typedef of pointer , and other cases ?
_decl_name = self . get_unique_name ( list ( cursor . get_children ( ) ) [ 0 ] )
else :
_decl_name = self . get_unique_name ( _decl )
log . debug ( 'FIELD_DECL: the declaration name %s' , _decl_name )
# rename anonymous field type name
# 2015-06-26 handled in get _ name
# if cursor . is _ anonymous ( ) :
# _ decl _ name + = name
# log . debug ( ' FIELD _ DECL : IS _ ANONYMOUS the declaration name % s ' , _ decl _ name )
if self . is_registered ( _decl_name ) :
log . debug ( 'FIELD_DECL: used type from cache: %s' , _decl_name )
_type = self . get_registered ( _decl_name )
# then we shortcut
else : # is it always the case ?
log . debug ( "FIELD_DECL: name:'%s'" , _decl_name )
log . debug ( "FIELD_DECL: %s: nb children:%s" , cursor . type . kind , len ( children ) )
# recurse into the right function
_type = self . parse_cursor_type ( _canonical_type )
if _type is None :
log . warning ( "Field %s is an %s type - ignoring field type" , name , _canonical_type . kind . name )
return None
if cursor . is_anonymous ( ) : # we have to unregister the _ type and register a alternate named
# type .
self . parser . remove_registered ( _type . name )
_type . name = _decl_name
self . register ( _decl_name , _type )
return typedesc . Field ( name , _type , offset , bits , is_bitfield = cursor . is_bitfield ( ) , is_anonymous = cursor . is_anonymous ( ) ) |
def invoke_editor ( before_editing , cluster_name ) :
"""Starts editor command to edit configuration in human readable format
: param before _ editing : human representation before editing
: returns tuple of human readable and parsed datastructure after changes""" | editor_cmd = os . environ . get ( 'EDITOR' )
if not editor_cmd :
raise PatroniCtlException ( 'EDITOR environment variable is not set' )
with temporary_file ( contents = before_editing . encode ( 'utf-8' ) , suffix = '.yaml' , prefix = '{0}-config-' . format ( cluster_name ) ) as tmpfile :
ret = subprocess . call ( [ editor_cmd , tmpfile ] )
if ret :
raise PatroniCtlException ( "Editor exited with return code {0}" . format ( ret ) )
with codecs . open ( tmpfile , encoding = 'utf-8' ) as fd :
after_editing = fd . read ( )
return after_editing , yaml . safe_load ( after_editing ) |
def importDirectory ( self , login , tableName , importDir , failureDir , setTime ) :
"""Parameters :
- login
- tableName
- importDir
- failureDir
- setTime""" | self . send_importDirectory ( login , tableName , importDir , failureDir , setTime )
self . recv_importDirectory ( ) |
def is_double_reversed_equal ( num : int ) -> bool :
"""Given an integer num , this function reverses num to get reversed1 , then reverses reversed1 to get reversed2.
The function returns True if reversed2 equals num , and False otherwise .
Leading zeros in the reversed integers are not retained .
Parameters :
num ( int ) : The integer to be reversed .
Returns :
bool : True if num equals its double - reversed version , False otherwise .
Examples :
> > > is _ double _ reversed _ equal ( 526)
True
# Reverse num to get 625 , then reverse 625 to get 526 , which equals num .
> > > is _ double _ reversed _ equal ( 1800)
False
# Reverse num to get 81 , then reverse 81 to get 18 , which does not equal num .""" | reversed1 = int ( str ( num ) [ : : - 1 ] )
reversed2 = int ( str ( reversed1 ) [ : : - 1 ] )
return reversed2 == num |
def possible_parameter ( nb , jsonable_parameter = True , end_cell_index = None ) :
"""Find the possible parameters from a jupyter notebook ( python3 only ) .
The possible parameters are obtained by parsing the abstract syntax tree of
the python code generated from the jupyter notebook .
For a jupuyter notebook , a variable can be a possible parameter if :
- it is defined in a cell that contains only comments or assignments ,
- its name is not used in the current cell beside the assignment nor previously .
Parameters
nb : str , nbformat . notebooknode . NotebookNode
Jupyter notebook path or its content as a NotebookNode object .
jsonable _ parameter : bool , optional
Consider only jsonable parameters .
end _ cell _ index : int , optional
End cell index used to slice the notebook in finding the possible parameters .
Returns
list [ collections . namedtuple ]
If jsonable _ parameter is true the fields are ( ' name ' , ' value ' , ' cell _ index ' ) , otherwise ( ' name ' , ' cell _ index ' ) .
The list is ordered by the name of the parameters .""" | jh = _JupyterNotebookHelper ( nb , jsonable_parameter , end_cell_index )
if jsonable_parameter is True :
PossibleParameter = collections . namedtuple ( 'PossibleParameter' , [ 'name' , 'value' , 'cell_index' ] )
else :
PossibleParameter = collections . namedtuple ( 'PossibleParameter' , [ 'name' , 'cell_index' ] )
res = [ ]
for name , cell_index in jh . param_cell_index . items ( ) :
if jsonable_parameter is True :
res . append ( PossibleParameter ( name = name , value = jh . param_value [ name ] , cell_index = cell_index ) )
else :
res . append ( PossibleParameter ( name = name , cell_index = cell_index ) )
return sorted ( res , key = lambda x : ( x . name ) ) |
def update_index_for_direction ( self , index_direction , index ) :
""": type index _ direction : SliceDirection
: type index : int""" | indexes = self . _slice_data_source . indexes_for_direction ( index_direction )
if index < 0 :
index = 0
elif index >= len ( indexes ) :
index = len ( indexes ) - 1
for m in self . _available_slice_models :
if m . index_direction == index_direction :
m . index = index
if m . x_index_direction == index_direction :
m . x_index = index
if m . y_index_direction == index_direction :
m . y_index = index
self . load_data ( ) |
def list_of_lists_to_dict ( l ) :
"""Convert list of key , value lists to dict
[ [ ' id ' , 1 ] , [ ' id ' , 2 ] , [ ' id ' , 3 ] , [ ' foo ' : 4 ] ]
{ ' id ' : [ 1 , 2 , 3 ] , ' foo ' : [ 4 ] }""" | d = { }
for key , val in l :
d . setdefault ( key , [ ] ) . append ( val )
return d |
def minimum_needs_unit ( field , feature , parent ) :
"""Retrieve units of the given minimum needs field name .
For instance :
* minimum _ needs _ unit ( ' minimum _ needs _ _ clean _ water ' ) - > ' l / weekly '""" | _ = feature , parent
# NOQA
field_definition = definition ( field , 'field_name' )
if field_definition :
unit_abbreviation = None
frequency = None
if field_definition . get ( 'need_parameter' ) :
need = field_definition [ 'need_parameter' ]
if isinstance ( need , ResourceParameter ) :
unit_abbreviation = need . unit . abbreviation
frequency = need . frequency
elif field_definition . get ( 'unit' ) :
need_unit = field_definition . get ( 'unit' )
unit_abbreviation = need_unit . get ( 'abbreviation' )
if field_definition . get ( 'frequency' ) and not frequency :
frequency = field_definition . get ( 'frequency' )
if not unit_abbreviation :
unit_abbreviation = exposure_unit [ 'plural_name' ]
once_frequency_field_keys = [ 'minimum_needs__toilets_count_field' ]
if not frequency or ( field_definition [ 'key' ] in once_frequency_field_keys ) :
return unit_abbreviation . lower ( )
unit_format = '{unit_abbreviation}/{frequency}'
return unit_format . format ( unit_abbreviation = unit_abbreviation , frequency = frequency ) . lower ( )
return None |
def validate_filters ( self , branchset_node , uncertainty_type , filters ) :
"""See superclass ' method for description and signature specification .
Checks that the following conditions are met :
* " sourceModel " uncertainties can not have filters .
* Absolute uncertainties must have only one filter - -
" applyToSources " , with only one source id .
* All other uncertainty types can have either no or one filter .
* Filter " applyToSources " must mention only source ids that
exist in source models .
* Filter " applyToTectonicRegionType " must mention only tectonic
region types that exist in source models .
* Filter " applyToSourceType " must mention only source types
that exist in source models .""" | if uncertainty_type == 'sourceModel' and filters :
raise LogicTreeError ( branchset_node , self . filename , 'filters are not allowed on source model uncertainty' )
if len ( filters ) > 1 :
raise LogicTreeError ( branchset_node , self . filename , "only one filter is allowed per branchset" )
if 'applyToTectonicRegionType' in filters :
if not filters [ 'applyToTectonicRegionType' ] in self . tectonic_region_types :
raise LogicTreeError ( branchset_node , self . filename , "source models don't define sources of tectonic region " "type '%s'" % filters [ 'applyToTectonicRegionType' ] )
if uncertainty_type in ( 'abGRAbsolute' , 'maxMagGRAbsolute' , 'simpleFaultGeometryAbsolute' , 'complexFaultGeometryAbsolute' ) :
if not filters or not list ( filters ) == [ 'applyToSources' ] or not len ( filters [ 'applyToSources' ] . split ( ) ) == 1 :
raise LogicTreeError ( branchset_node , self . filename , "uncertainty of type '%s' must define 'applyToSources' " "with only one source id" % uncertainty_type )
if uncertainty_type in ( 'simpleFaultDipRelative' , 'simpleFaultDipAbsolute' ) :
if not filters or ( not ( 'applyToSources' in filters . keys ( ) ) and not ( 'applyToSourceType' in filters . keys ( ) ) ) :
raise LogicTreeError ( branchset_node , self . filename , "uncertainty of type '%s' must define either" "'applyToSources' or 'applyToSourceType'" % uncertainty_type )
if 'applyToSourceType' in filters :
if not filters [ 'applyToSourceType' ] in self . source_types :
raise LogicTreeError ( branchset_node , self . filename , "source models don't define sources of type '%s'" % filters [ 'applyToSourceType' ] )
if 'applyToSources' in filters :
for source_id in filters [ 'applyToSources' ] . split ( ) :
for source_ids in self . source_ids . values ( ) :
if source_id not in source_ids :
raise LogicTreeError ( branchset_node , self . filename , "source with id '%s' is not defined in source " "models" % source_id ) |
def delete ( self , key ) :
"""Remove a value from the ` DotDict ` .
The ` key ` parameter can either be a regular string key ,
e . g . " foo " , or it can be a string key with dot notation ,
e . g . " foo . bar . baz " , to signify a nested element .
If the key does not exist in the ` DotDict ` , it will continue
silently .
Args :
key ( str ) : The key to remove .""" | dct = self
keys = key . split ( '.' )
last_key = keys [ - 1 ]
for k in keys : # if the key is the last one , e . g . ' z ' in ' x . y . z ' , try
# to delete it from its dict .
if k == last_key :
del dct [ k ]
break
# if the dct is a DotDict , get the value for the key ` k ` from it .
if isinstance ( dct , DotDict ) :
dct = super ( DotDict , dct ) . __getitem__ ( k )
# otherwise , just get the value from the default _ _ getitem _ _
# implementation .
else :
dct = dct . __getitem__ ( k )
if not isinstance ( dct , ( DotDict , dict ) ) :
raise KeyError ( 'Subkey "{}" in "{}" invalid for deletion' . format ( k , key ) ) |
def import_from_dict ( cls , session , dict_rep , parent = None , recursive = True , sync = [ ] ) :
"""Import obj from a dictionary""" | parent_refs = cls . _parent_foreign_key_mappings ( )
export_fields = set ( cls . export_fields ) | set ( parent_refs . keys ( ) )
new_children = { c : dict_rep . get ( c ) for c in cls . export_children if c in dict_rep }
unique_constrains = cls . _unique_constrains ( )
filters = [ ]
# Using these filters to check if obj already exists
# Remove fields that should not get imported
for k in list ( dict_rep ) :
if k not in export_fields :
del dict_rep [ k ]
if not parent :
if cls . export_parent :
for p in parent_refs . keys ( ) :
if p not in dict_rep :
raise RuntimeError ( '{0}: Missing field {1}' . format ( cls . __name__ , p ) )
else : # Set foreign keys to parent obj
for k , v in parent_refs . items ( ) :
dict_rep [ k ] = getattr ( parent , v )
# Add filter for parent obj
filters . extend ( [ getattr ( cls , k ) == dict_rep . get ( k ) for k in parent_refs . keys ( ) ] )
# Add filter for unique constraints
ucs = [ and_ ( * [ getattr ( cls , k ) == dict_rep . get ( k ) for k in cs if dict_rep . get ( k ) is not None ] ) for cs in unique_constrains ]
filters . append ( or_ ( * ucs ) )
# Check if object already exists in DB , break if more than one is found
try :
obj_query = session . query ( cls ) . filter ( and_ ( * filters ) )
obj = obj_query . one_or_none ( )
except MultipleResultsFound as e :
logging . error ( 'Error importing %s \n %s \n %s' , cls . __name__ , str ( obj_query ) , yaml . safe_dump ( dict_rep ) )
raise e
if not obj :
is_new_obj = True
# Create new DB object
obj = cls ( ** dict_rep )
logging . info ( 'Importing new %s %s' , obj . __tablename__ , str ( obj ) )
if cls . export_parent and parent :
setattr ( obj , cls . export_parent , parent )
session . add ( obj )
else :
is_new_obj = False
logging . info ( 'Updating %s %s' , obj . __tablename__ , str ( obj ) )
# Update columns
for k , v in dict_rep . items ( ) :
setattr ( obj , k , v )
# Recursively create children
if recursive :
for c in cls . export_children :
child_class = cls . __mapper__ . relationships [ c ] . argument . class_
added = [ ]
for c_obj in new_children . get ( c , [ ] ) :
added . append ( child_class . import_from_dict ( session = session , dict_rep = c_obj , parent = obj , sync = sync ) )
# If children should get synced , delete the ones that did not
# get updated .
if c in sync and not is_new_obj :
back_refs = child_class . _parent_foreign_key_mappings ( )
delete_filters = [ getattr ( child_class , k ) == getattr ( obj , back_refs . get ( k ) ) for k in back_refs . keys ( ) ]
to_delete = set ( session . query ( child_class ) . filter ( and_ ( * delete_filters ) ) ) . difference ( set ( added ) )
for o in to_delete :
logging . info ( 'Deleting %s %s' , c , str ( obj ) )
session . delete ( o )
return obj |
def get_list_dimensions ( _list ) :
"""Takes a nested list and returns the size of each dimension followed
by the element type in the list""" | if isinstance ( _list , list ) or isinstance ( _list , tuple ) :
return [ len ( _list ) ] + get_list_dimensions ( _list [ 0 ] )
return [ ] |
def closePanel ( self ) :
"""Closes a full view panel .""" | # make sure we can close all the widgets in the view first
for i in range ( self . count ( ) ) :
if not self . widget ( i ) . canClose ( ) :
return False
container = self . parentWidget ( )
viewWidget = self . viewWidget ( )
# close all the child views
for i in xrange ( self . count ( ) - 1 , - 1 , - 1 ) :
self . widget ( i ) . close ( )
self . tabBar ( ) . clear ( )
if isinstance ( container , XSplitter ) :
parent_container = container . parentWidget ( )
if container . count ( ) == 2 :
if isinstance ( parent_container , XSplitter ) :
sizes = parent_container . sizes ( )
widget = container . widget ( int ( not container . indexOf ( self ) ) )
index = parent_container . indexOf ( container )
parent_container . insertWidget ( index , widget )
container . setParent ( None )
container . close ( )
container . deleteLater ( )
parent_container . setSizes ( sizes )
elif parent_container . parentWidget ( ) == viewWidget :
widget = container . widget ( int ( not container . indexOf ( self ) ) )
widget . setParent ( viewWidget )
if projexui . QT_WRAPPER == 'PySide' :
_ = viewWidget . takeWidget ( )
else :
old_widget = viewWidget . widget ( )
old_widget . setParent ( None )
old_widget . close ( )
old_widget . deleteLater ( )
QtGui . QApplication . instance ( ) . processEvents ( )
viewWidget . setWidget ( widget )
else :
container . setParent ( None )
container . close ( )
container . deleteLater ( )
else :
self . setFocus ( )
self . _hintLabel . setText ( self . hint ( ) )
self . _hintLabel . show ( )
return True |
def pixy_value_update ( blocks ) :
"""Prints the Pixy blocks data .""" | if len ( blocks ) > 0 :
pan_error = X_CENTER - blocks [ 0 ] [ "x" ]
tilt_error = blocks [ 0 ] [ "y" ] - Y_CENTER
pan_loop . update ( pan_error )
tilt_loop . update ( tilt_error )
loop = asyncio . get_event_loop ( )
if loop . is_running ( ) : # This is the version that will be used since we are in a callback , but I wanted to show how to
# properly protect against calls to board . something when you don ' t know if you are in the non - async
# land vs when you are currently executing code from within async land .
asyncio . ensure_future ( board . core . analog_write ( PIN_PAN_SERVO , int ( pan_loop . position * 180 / 1000 ) ) )
asyncio . ensure_future ( board . core . analog_write ( PIN_TILT_SERVO , int ( tilt_loop . position * 180 / 1000 ) ) )
else :
board . analog_write ( PIN_PAN_SERVO , int ( pan_loop . position * 180 / 1000 ) )
board . analog_write ( PIN_TILT_SERVO , int ( tilt_loop . position * 180 / 1000 ) ) |
def build_specfile ( target , source , env ) :
"""Builds a RPM specfile from a dictionary with string metadata and
by analyzing a tree of nodes .""" | file = open ( target [ 0 ] . get_abspath ( ) , 'w' )
try :
file . write ( build_specfile_header ( env ) )
file . write ( build_specfile_sections ( env ) )
file . write ( build_specfile_filesection ( env , source ) )
file . close ( )
# call a user specified function
if 'CHANGE_SPECFILE' in env :
env [ 'CHANGE_SPECFILE' ] ( target , source )
except KeyError as e :
raise SCons . Errors . UserError ( '"%s" package field for RPM is missing.' % e . args [ 0 ] ) |
def enable_broadcasting ( self ) :
"""Begin accumulating broadcast reports received from all devices .
This method will allocate a queue to receive broadcast reports that
will be filled asynchronously as broadcast reports are received .
Returns :
queue . Queue : A queue that will be filled with braodcast reports .""" | if self . _broadcast_reports is not None :
_clear_queue ( self . _broadcast_reports )
return self . _broadcast_reports
self . _broadcast_reports = queue . Queue ( )
return self . _broadcast_reports |
def _checkAndConvertIndex ( self , index ) :
"""Check integer index , convert from less than zero notation""" | if index < 0 :
index = len ( self ) + index
if index < 0 or index >= self . _doc . blockCount ( ) :
raise IndexError ( 'Invalid block index' , index )
return index |
def Deserialize ( self , reader ) :
"""Deserialize full object .
Args :
reader ( neo . IO . BinaryReader ) :""" | self . Script = reader . ReadVarBytes ( )
self . ParameterList = reader . ReadVarBytes ( )
self . ReturnType = reader . ReadByte ( ) |
def _get_proj4_name ( self , projection ) :
"""Map CF projection name to PROJ . 4 name .""" | gmap_name = projection . attrs [ 'grid_mapping_name' ]
proj = { 'geostationary' : 'geos' , 'lambert_conformal_conic' : 'lcc' , 'polar_stereographic' : 'stere' , 'mercator' : 'merc' , } . get ( gmap_name , gmap_name )
return proj |
def build_class_graph ( modules , klass = None , graph = None ) :
"""Builds up a graph of the DictCell subclass structure""" | if klass is None :
class_graph = nx . DiGraph ( )
for name , classmember in inspect . getmembers ( modules , inspect . isclass ) :
if issubclass ( classmember , Referent ) and classmember is not Referent :
TaxonomyCell . build_class_graph ( modules , classmember , class_graph )
return class_graph
else :
parents = getattr ( klass , '__bases__' )
for parent in parents :
if parent != Referent :
graph . add_edge ( parent . __name__ , klass . __name__ )
# store pointer to classes in property ' class '
graph . node [ parent . __name__ ] [ 'class' ] = parent
graph . node [ klass . __name__ ] [ 'class' ] = klass
if issubclass ( parent , Referent ) :
TaxonomyCell . build_class_graph ( modules , parent , graph ) |
def load_units ( self ) :
"""Build a set of systemd units that Ellis will watch .
This set will be used to filter journald entries so that we only
process entries that were produced by these units .
This should result in better performance .""" | # Of course , we only consider valid Rules .
for rule in self . rules :
try :
systemd_unit = self . config . get ( rule . name , 'systemd_unit' )
except configparser . NoOptionError :
warnings . warn ( "Rule '{0}' doesn't have a `systemd_unit` " "option set.\nThe filters will be checked " "against all journald entries, which will " "probably result in poor performance." . format ( rule . name ) )
# At this point , we can clear ` self . units ` because in any
# case , we will need to process every journald entries
# for THIS Rule .
self . units . clear ( )
# And we can also stop looping through rules .
break
else : # Append " . service " if not present .
# Note that we don ' t check if the service actually exists .
# FIXME ?
if not systemd_unit . endswith ( ".service" ) :
systemd_unit += ".service"
self . units . add ( systemd_unit )
return self |
def _volumes ( self ) :
"""returns a map { volume _ id : { serial : , vendor _ id : , product _ id : , tty : }""" | # to find all the possible mbed volumes , we look for registry entries
# under all possible USB tree which have a " BSD Name " that starts with
# " disk " # ( i . e . this is a USB disk ) , and have a IORegistryEntryName that
# matches / \ cmbed /
# Once we ' ve found a disk , we can search up for a parent with a valid
# serial number , and then search down again to find a tty that ' s part
# of the same composite device
# ioreg - a - r - n < usb _ controller _ name > - l
usb_controllers = [ # Leaving these here for reference . The code nominally scanned each controller ,
# but a bug ( ? ) caused it to only pay attention to the last one . That seems to
# work fine , so the others are commented out .
# " AppleUSBXHCI " ,
# " AppleUSBUHCI " ,
# " AppleUSBEHCI " ,
# " AppleUSBOHCI " ,
"IOUSBHostDevice" , ]
cmp_par = "-n"
# For El Captain we need to list all the instances of ( - c ) rather than
# compare names ( - n )
if self . mac_version >= 10.11 :
cmp_par = "-c"
for usb_controller in usb_controllers :
ioreg_usb = subprocess . Popen ( [ "ioreg" , "-a" , "-r" , cmp_par , usb_controller , "-l" ] , stdout = subprocess . PIPE , )
usb_tree = _plist_from_popen ( ioreg_usb )
r = { }
for name , obj in enumerate ( usb_tree ) :
pruned_obj = _prune ( obj , [ "USB Serial Number" , "idVendor" , "BSD Name" , "IORegistryEntryName" , "idProduct" , ] , )
r . update ( _dfs_usb_info ( pruned_obj , [ ] ) )
return r |
def calculate ( self , order , transaction ) :
"""Pay commission based on dollar value of shares .""" | cost_per_share = transaction . price * self . cost_per_dollar
return abs ( transaction . amount ) * cost_per_share |
def create_namedlayer ( self , name ) :
"""Create a L { NamedLayer } in this SLD .
@ type name : string
@ param name : The name of the layer .
@ rtype : L { NamedLayer }
@ return : The named layer , attached to this SLD .""" | namedlayer = self . get_or_create_element ( 'sld' , 'NamedLayer' )
namedlayer . Name = name
return namedlayer |
def clean_markup ( self , markup , parser = None ) :
"""Apply ` ` Cleaner ` ` to markup string or document and return a cleaned string or document .""" | result_type = type ( markup )
if isinstance ( markup , six . string_types ) :
doc = fromstring ( markup , parser = parser )
else :
doc = copy . deepcopy ( markup )
self ( doc )
if issubclass ( result_type , six . binary_type ) :
return tostring ( doc , encoding = 'utf-8' )
elif issubclass ( result_type , six . text_type ) :
return tostring ( doc , encoding = 'unicode' )
else :
return doc |
def send_keyevents_long_press ( self , keyevent : int ) -> None :
'''Simulates typing keyevents long press .''' | self . _execute ( '-s' , self . device_sn , 'shell' , 'input' , 'keyevent' , '--longpress' , str ( keyevent ) ) |
def move_to ( self , target , position = 'first-child' ) :
"""Invalidate cache when moving""" | # Invalidate both in case position matters ,
# otherwise only target is needed .
self . invalidate ( )
target . invalidate ( )
super ( Page , self ) . move_to ( target , position = position ) |
def get_parents ( obj , ** kwargs ) :
"""Return the MRO of an object . Do regex on each element to remove the " < class . . . " bit .""" | num_of_mro = kwargs . get ( "num_of_mro" , 5 )
mro = getmro ( obj . __class__ )
mro_string = ', ' . join ( [ extract_type ( str ( t ) ) for t in mro [ : num_of_mro ] ] )
return "Hierarchy: {}" . format ( mro_string ) |
def _compute_head_process_tail ( self , audio_file_mfcc ) :
"""Set the audio file head or tail ,
by either reading the explicit values
from the Task configuration ,
or using SD to determine them .
This function returns the lengths , in seconds ,
of the ( head , process , tail ) .
: rtype : tuple ( float , float , float )""" | head_length = self . task . configuration [ "i_a_head" ]
process_length = self . task . configuration [ "i_a_process" ]
tail_length = self . task . configuration [ "i_a_tail" ]
head_max = self . task . configuration [ "i_a_head_max" ]
head_min = self . task . configuration [ "i_a_head_min" ]
tail_max = self . task . configuration [ "i_a_tail_max" ]
tail_min = self . task . configuration [ "i_a_tail_min" ]
if ( ( head_length is not None ) or ( process_length is not None ) or ( tail_length is not None ) ) :
self . log ( u"Setting explicit head process tail" )
else :
self . log ( u"Detecting head tail..." )
sd = SD ( audio_file_mfcc , self . task . text_file , rconf = self . rconf , logger = self . logger )
head_length = TimeValue ( "0.000" )
process_length = None
tail_length = TimeValue ( "0.000" )
if ( head_min is not None ) or ( head_max is not None ) :
self . log ( u"Detecting HEAD..." )
head_length = sd . detect_head ( head_min , head_max )
self . log ( [ u"Detected HEAD: %.3f" , head_length ] )
self . log ( u"Detecting HEAD... done" )
if ( tail_min is not None ) or ( tail_max is not None ) :
self . log ( u"Detecting TAIL..." )
tail_length = sd . detect_tail ( tail_min , tail_max )
self . log ( [ u"Detected TAIL: %.3f" , tail_length ] )
self . log ( u"Detecting TAIL... done" )
self . log ( u"Detecting head tail... done" )
self . log ( [ u"Head: %s" , gf . safe_float ( head_length , None ) ] )
self . log ( [ u"Process: %s" , gf . safe_float ( process_length , None ) ] )
self . log ( [ u"Tail: %s" , gf . safe_float ( tail_length , None ) ] )
return ( head_length , process_length , tail_length ) |
def save_plain_image_as_file ( self , filepath , format = 'png' , quality = 90 ) :
"""Used for generating thumbnails . Does not include overlaid
graphics .""" | pixbuf = self . get_plain_image_as_pixbuf ( )
options = { }
if format == 'jpeg' :
options [ 'quality' ] = str ( quality )
pixbuf . save ( filepath , format , options ) |
def bootstrap_alert ( visitor , items ) :
"""Format :
[ [ alert ( class = error ) ] ] :
message""" | txt = [ ]
for x in items :
cls = x [ 'kwargs' ] . get ( 'class' , '' )
if cls :
cls = 'alert-%s' % cls
txt . append ( '<div class="alert %s">' % cls )
if 'close' in x [ 'kwargs' ] :
txt . append ( '<button class="close" data-dismiss="alert">×</button>' )
text = visitor . parse_text ( x [ 'body' ] , 'article' )
txt . append ( text )
txt . append ( '</div>' )
return '\n' . join ( txt ) |
def __get_config ( self , data_sources = None ) :
"""Build a dictionary with the Report configuration with the data sources and metrics to be included
in each section of the report
: param data _ sources : list of data sources to be included in the report
: return : a dict with the data sources and metrics to be included in the report""" | if not data_sources : # For testing
data_sources = [ "gerrit" , "git" , "github_issues" , "mls" ]
# In new _ config a dict with all the metrics for all data sources is created
new_config = { }
for ds in data_sources :
ds_config = self . ds2class [ ds ] . get_section_metrics ( )
for section in ds_config :
if section not in new_config : # Just create the section with the data for the ds
new_config [ section ] = ds_config [ section ]
else :
for metric_section in ds_config [ section ] :
if ds_config [ section ] [ metric_section ] is not None :
if ( metric_section not in new_config [ section ] or new_config [ section ] [ metric_section ] is None ) :
new_config [ section ] [ metric_section ] = ds_config [ section ] [ metric_section ]
else :
new_config [ section ] [ metric_section ] += ds_config [ section ] [ metric_section ]
# Fields that are not linked to a data source
new_config [ 'overview' ] [ 'activity_file_csv' ] = "data_source_evolution.csv"
new_config [ 'overview' ] [ 'efficiency_file_csv' ] = "efficiency.csv"
new_config [ 'project_process' ] [ 'time_to_close_title' ] = "Days to close (median and average)"
new_config [ 'project_process' ] [ 'time_to_close_review_title' ] = "Days to close review (median and average)"
for i in range ( 0 , len ( data_sources ) ) :
ds = data_sources [ i ]
ds_config = self . ds2class [ ds ] . get_section_metrics ( )
activity_metrics = ds_config [ 'project_activity' ] [ 'metrics' ]
new_config [ 'project_activity' ] [ 'ds' + str ( i + 1 ) + "_metrics" ] = activity_metrics
return new_config |
def close ( self ) :
"""Write final shp , shx , and dbf headers , close opened files .""" | # Check if any of the files have already been closed
shp_open = self . shp and not ( hasattr ( self . shp , 'closed' ) and self . shp . closed )
shx_open = self . shx and not ( hasattr ( self . shx , 'closed' ) and self . shx . closed )
dbf_open = self . dbf and not ( hasattr ( self . dbf , 'closed' ) and self . dbf . closed )
# Balance if already not balanced
if self . shp and shp_open and self . dbf and dbf_open :
if self . autoBalance :
self . balance ( )
if self . recNum != self . shpNum :
raise ShapefileException ( "When saving both the dbf and shp file, " "the number of records (%s) must correspond " "with the number of shapes (%s)" % ( self . recNum , self . shpNum ) )
# Fill in the blank headers
if self . shp and shp_open :
self . __shapefileHeader ( self . shp , headerType = 'shp' )
if self . shx and shx_open :
self . __shapefileHeader ( self . shx , headerType = 'shx' )
# Update the dbf header with final length etc
if self . dbf and dbf_open :
self . __dbfHeader ( )
# Close files , if target is a filepath
if self . target :
for attribute in ( self . shp , self . shx , self . dbf ) :
if hasattr ( attribute , 'close' ) :
try :
attribute . close ( )
except IOError :
pass |
def UpdateFlows ( self , client_id_flow_id_pairs , pending_termination = db . Database . unchanged ) :
"""Updates flow objects in the database .""" | for client_id , flow_id in client_id_flow_id_pairs :
try :
self . UpdateFlow ( client_id , flow_id , pending_termination = pending_termination )
except db . UnknownFlowError :
pass |
def _make_fast_url_quote ( charset = "utf-8" , errors = "strict" , safe = "/:" , unsafe = "" ) :
"""Precompile the translation table for a URL encoding function .
Unlike : func : ` url _ quote ` , the generated function only takes the
string to quote .
: param charset : The charset to encode the result with .
: param errors : How to handle encoding errors .
: param safe : An optional sequence of safe characters to never encode .
: param unsafe : An optional sequence of unsafe characters to always encode .""" | if isinstance ( safe , text_type ) :
safe = safe . encode ( charset , errors )
if isinstance ( unsafe , text_type ) :
unsafe = unsafe . encode ( charset , errors )
safe = ( frozenset ( bytearray ( safe ) ) | _always_safe ) - frozenset ( bytearray ( unsafe ) )
table = [ chr ( c ) if c in safe else "%%%02X" % c for c in range ( 256 ) ]
if not PY2 :
def quote ( string ) :
return "" . join ( [ table [ c ] for c in string ] )
else :
def quote ( string ) :
return "" . join ( [ table [ c ] for c in bytearray ( string ) ] )
return quote |
def find_n_nearest ( self , lat , lng , n = 5 , radius = None ) :
"""Find n nearest point within certain distance from a point .
: param lat : latitude of center point .
: param lng : longitude of center point .
: param n : max number of record to return .
: param radius : only search point within ` ` radius ` ` distance .
* * 中文文档 * *""" | engine , t_point = self . engine , self . t_point
if radius : # Use a simple box filter to minimize candidates
# Define latitude longitude boundary
dist_btwn_lat_deg = 69.172
dist_btwn_lon_deg = cos ( lat ) * 69.172
lat_degr_rad = abs ( radius * 1.05 / dist_btwn_lat_deg )
lon_degr_rad = abs ( radius * 1.05 / dist_btwn_lon_deg )
lat_lower = lat - lat_degr_rad
lat_upper = lat + lat_degr_rad
lng_lower = lng - lon_degr_rad
lng_upper = lng + lon_degr_rad
filters = [ t_point . c . lat >= lat_lower , t_point . c . lat <= lat_upper , t_point . c . lat >= lng_lower , t_point . c . lat >= lng_upper , ]
else :
radius = 999999.9
filters = [ ]
s = select ( [ t_point ] ) . where ( and_ ( * filters ) )
heap = list ( )
for row in engine . execute ( s ) :
dist = great_circle ( ( lat , lng ) , ( row . lat , row . lng ) )
if dist <= radius :
heap . append ( ( dist , row . data ) )
# Use heap sort to find top - K nearest
n_nearest = heapq . nsmallest ( n , heap , key = lambda x : x [ 0 ] )
return n_nearest |
def get_output_files ( self ) :
"""Returns a list of the files output by the job , querying the server if
necessary . If the job has output no files , an empty list will be
returned .""" | # Lazily load info
if self . info is None :
self . get_info ( )
if 'outputFiles' in self . info :
return [ GPFile ( self . server_data , f [ 'link' ] [ 'href' ] ) for f in self . info [ 'outputFiles' ] ]
else :
return [ ] |
def iter_stack_frames ( frames = None ) :
"""Given an optional list of frames ( defaults to current stack ) ,
iterates over all frames that do not contain the ` ` _ _ traceback _ hide _ _ ` `
local variable .""" | if not frames :
frames = inspect . stack ( ) [ 1 : ]
for frame , lineno in ( ( f [ 0 ] , f [ 2 ] ) for f in reversed ( frames ) ) :
f_locals = getattr ( frame , 'f_locals' , { } )
if not _getitem_from_frame ( f_locals , '__traceback_hide__' ) :
yield frame , lineno |
def row ( self , content = '' , align = 'left' ) :
"""A row of the menu , which comprises the left and right verticals plus the given content .
Returns :
str : A row of this menu component with the specified content .""" | return u"{lm}{vert}{cont}{vert}" . format ( lm = ' ' * self . margins . left , vert = self . border_style . outer_vertical , cont = self . _format_content ( content , align ) ) |
def _zero_many ( self , i , j ) :
"""Sets value at each ( i , j ) to zero , preserving sparsity structure .
Here ( i , j ) index major and minor respectively .""" | i , j , M , N = self . _prepare_indices ( i , j )
n_samples = len ( i )
offsets = np . empty ( n_samples , dtype = self . indices . dtype )
ret = _sparsetools . csr_sample_offsets ( M , N , self . indptr , self . indices , n_samples , i , j , offsets )
if ret == 1 : # rinse and repeat
self . sum_duplicates ( )
_sparsetools . csr_sample_offsets ( M , N , self . indptr , self . indices , n_samples , i , j , offsets )
# only assign zeros to the existing sparsity structure
self . data [ list ( offsets [ offsets > - 1 ] ) ] = 0 |
def load_p2th_privkey_into_local_node ( provider : RpcNode , prod : bool = True ) -> None :
'''Load PeerAssets P2TH privkey into the local node .''' | assert isinstance ( provider , RpcNode ) , { "error" : "Import only works with local node." }
error = { "error" : "Loading P2TH privkey failed." }
pa_params = param_query ( provider . network )
if prod :
provider . importprivkey ( pa_params . P2TH_wif , "PAPROD" )
# now verify if ismine = = True
if not provider . validateaddress ( pa_params . P2TH_addr ) [ 'ismine' ] :
raise P2THImportFailed ( error )
else :
provider . importprivkey ( pa_params . test_P2TH_wif , "PATEST" )
if not provider . validateaddress ( pa_params . test_P2TH_addr ) [ 'ismine' ] :
raise P2THImportFailed ( error ) |
def string_to_34_array ( sou = None , pin = None , man = None , honors = None ) :
"""Method to convert one line string tiles format to the 34 array
We need it to increase readability of our tests""" | results = TilesConverter . string_to_136_array ( sou , pin , man , honors )
results = TilesConverter . to_34_array ( results )
return results |
def cudaMemcpy_dtoh ( dst , src , count ) :
"""Copy memory from device to host .
Copy data from device memory to host memory .
Parameters
dst : ctypes pointer
Host memory pointer .
src : ctypes pointer
Device memory pointer .
count : int
Number of bytes to copy .""" | status = _libcudart . cudaMemcpy ( dst , src , ctypes . c_size_t ( count ) , cudaMemcpyDeviceToHost )
cudaCheckStatus ( status ) |
def getPlayAreaRect ( self ) :
"""Returns the 4 corner positions of the Play Area ( formerly named Soft Bounds ) .
Corners are in counter - clockwise order .
Standing center ( 0,0,0 ) is the center of the Play Area .
It ' s a rectangle .
2 sides are parallel to the X axis and 2 sides are parallel to the Z axis .
Height of every corner is 0Y ( on the floor ) .""" | fn = self . function_table . getPlayAreaRect
rect = HmdQuad_t ( )
result = fn ( byref ( rect ) )
return result , rect |
def Nu_horizontal_cylinder_Morgan ( Pr , Gr ) :
r'''Calculates Nusselt number for natural convection around a horizontal
cylinder according to the Morgan [ 1 ] _ correlations , a product of a very
large review of the literature . Sufficiently common as to be shown in [ 2 ] _ .
Cylinder must be isothermal .
. . math : :
Nu _ D = C Ra _ D ^ n
| Gr min | Gr max | C | n |
| 10E - 10 | 10E - 2 | 0.675 | 0.058 |
| 10E - 2 | 10E2 | 1.02 | 0.148 |
| 10E2 | 10E4 | 0.850 | 0.188 |
| 10E4 | 10E7 | 0.480 | 0.250 |
| 10E7 | 10E12 | 0.125 | 0.333 |
Parameters
Pr : float
Prandtl number [ - ]
Gr : float
Grashof number [ - ]
Returns
Nu : float
Nusselt number , [ - ]
Notes
Most comprehensive review with a new proposed equation to date .
Discontinuous among the jumps in range . Blindly runs outside if upper and
lower limits without warning .
Examples
> > > Nu _ horizontal _ cylinder _ Morgan ( 0.69 , 2.63E9)
151.3881997228419
References
. . [ 1 ] Morgan , V . T . , The Overall Convective Heat Transfer from Smooth
Circular Cylinders , in Advances in Heat Transfer , eds . T . F . Irvin and
J . P . Hartnett , V 11 , 199-264 , 1975.
. . [ 2 ] Boetcher , Sandra K . S . " Natural Convection Heat Transfer From
Vertical Cylinders . " In Natural Convection from Circular Cylinders ,
23-42 . Springer , 2014.''' | Ra = Pr * Gr
if Ra < 1E-2 :
C , n = 0.675 , 0.058
elif Ra < 1E2 :
C , n = 1.02 , 0.148
elif Ra < 1E4 :
C , n = 0.850 , 0.188
elif Ra < 1E7 :
C , n = 0.480 , 0.250
else : # up to 1E12
C , n = 0.125 , 0.333
return C * Ra ** n |
def get_voltage ( self , channel , unit = 'V' ) :
'''Reading voltage''' | kwargs = self . _ch_map [ channel ] [ 'ADCV' ]
voltage_raw = self . _get_adc_value ( ** kwargs )
voltage = ( voltage_raw - self . _ch_cal [ channel ] [ 'ADCV' ] [ 'offset' ] ) / self . _ch_cal [ channel ] [ 'ADCV' ] [ 'gain' ]
if unit == 'raw' :
return voltage_raw
elif unit == 'V' :
return voltage
elif unit == 'mV' :
return voltage * 1000
else :
raise TypeError ( "Invalid unit type." ) |
def make_step_rcont ( transition ) :
"""Return a ufunc - like step function that is right - continuous . Returns 1 if
x > = transition , 0 otherwise .""" | if not np . isfinite ( transition ) :
raise ValueError ( '"transition" argument must be finite number; got %r' % transition )
def step_rcont ( x ) :
x = np . asarray ( x )
x1 = np . atleast_1d ( x )
r = ( x1 >= transition ) . astype ( x . dtype )
if x . ndim == 0 :
return np . asscalar ( r )
return r
step_rcont . __doc__ = ( 'Right-continuous step function. Returns 1 if x >= ' '%g, 0 otherwise.' ) % ( transition , )
return step_rcont |
def load_cert_chain ( self , certfile , keyfile = None ) :
"""Load a private key and the corresponding certificate . The certfile
string must be the path to a single file in PEM format containing the
certificate as well as any number of CA certificates needed to
establish the certificate ' s authenticity . The keyfile string , if
present , must point to a file containing the private key . Otherwise
the private key will be taken from certfile as well .""" | self . _certfile = certfile
self . _keyfile = keyfile |
def GetHashObject ( self ) :
"""Returns a ` Hash ` object with appropriate fields filled - in .""" | hash_object = rdf_crypto . Hash ( )
hash_object . num_bytes = self . _bytes_read
for algorithm in self . _hashers :
setattr ( hash_object , algorithm , self . _hashers [ algorithm ] . digest ( ) )
return hash_object |
def iterate ( self , params , repetition , iteration ) :
"""For each iteration try to infer the object represented by the ' iteration '
parameter returning Whether or not the object was unambiguously classified .
: param params : Specific parameters for this iteration . See ' experiments . cfg '
for list of parameters
: param repetition : Current repetition
: param iteration : Use the iteration to select the object to infer
: return : Whether or not the object was classified""" | objname , sensations = self . objects . items ( ) [ iteration ]
# Select sensations to infer
np . random . shuffle ( sensations [ 0 ] )
sensations = [ sensations [ 0 ] [ : self . numOfSensations ] ]
self . network . sendReset ( )
# Collect all statistics for every inference .
# See L246aNetwork . _ updateInferenceStats
stats = defaultdict ( list )
self . network . infer ( sensations = sensations , stats = stats , objname = objname )
stats . update ( { "name" : objname } )
return stats |
def get_gradebook_column_gradebook_assignment_session ( self , proxy ) :
"""Gets the session for assigning gradebook column to gradebook mappings .
arg : proxy ( osid . proxy . Proxy ) : a proxy
return : ( osid . grading . GradebookColumnGradebookAssignmentSession )
- a ` ` GradebookColumnGradebookAssignmentSession ` `
raise : NullArgument - ` ` proxy ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented -
` ` supports _ gradebook _ column _ gradebook _ assignment ( ) ` ` is
` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ gradebook _ column _ gradebook _ assignment ( ) ` ` is
` ` true ` ` . *""" | if not self . supports_gradebook_column_gradebook_assignment ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . GradebookColumnGradebookAssignmentSession ( proxy = proxy , runtime = self . _runtime ) |
def values ( service , id , ranges ) :
"""Fetch and return spreadsheet cell values with Google sheets API .""" | params = { 'majorDimension' : 'ROWS' , 'valueRenderOption' : 'UNFORMATTED_VALUE' , 'dateTimeRenderOption' : 'FORMATTED_STRING' }
params . update ( spreadsheetId = id , ranges = ranges )
response = service . spreadsheets ( ) . values ( ) . batchGet ( ** params ) . execute ( )
return response [ 'valueRanges' ] |
def _bnot8 ( ins ) :
"""Negates ( BITWISE NOT ) top of the stack ( 8 bits in AF )""" | output = _8bit_oper ( ins . quad [ 2 ] )
output . append ( 'cpl' )
# Gives carry only if A = 0
output . append ( 'push af' )
return output |
def combine ( line , left , intersect , right ) :
"""Zip borders between items in ` line ` .
e . g . ( ' l ' , ' 1 ' , ' c ' , ' 2 ' , ' c ' , ' 3 ' , ' r ' )
: param iter line : List to iterate .
: param left : Left border .
: param intersect : Column separator .
: param right : Right border .
: return : Yields combined objects .""" | # Yield left border .
if left :
yield left
# Yield items with intersect characters .
if intersect :
try :
for j , i in enumerate ( line , start = - len ( line ) + 1 ) :
yield i
if j :
yield intersect
except TypeError : # Generator .
try :
item = next ( line )
except StopIteration : # Was empty all along .
pass
else :
while True :
yield item
try :
peek = next ( line )
except StopIteration :
break
yield intersect
item = peek
else :
for i in line :
yield i
# Yield right border .
if right :
yield right |
def activate ( ) :
"""Usage :
containment activate""" | # This is derived from the clone
cli = CommandLineInterface ( )
cli . ensure_config ( )
cli . write_dockerfile ( )
cli . build ( )
cli . run ( ) |
def clean ( self ) :
"""Make sure there is at least a translation has been filled in . If a
default language has been specified , make sure that it exists amongst
translations .""" | # First make sure the super ' s clean method is called upon .
super ( TranslationFormSet , self ) . clean ( )
if settings . HIDE_LANGUAGE :
return
if len ( self . forms ) > 0 : # If a default language has been provided , make sure a translation
# is available
if settings . DEFAULT_LANGUAGE and not any ( self . errors ) : # Don ' t bother validating the formset unless each form is
# valid on its own . Reference :
# http : / / docs . djangoproject . com / en / dev / topics / forms / formsets / # custom - formset - validation
for form in self . forms :
language_code = form . cleaned_data . get ( 'language_code' , None )
if language_code == settings . DEFAULT_LANGUAGE : # All is good , don ' t bother checking any further
return
raise forms . ValidationError ( _ ( 'No translation provided for default language \'%s\'.' ) % settings . DEFAULT_LANGUAGE )
else :
raise forms . ValidationError ( _ ( 'At least one translation should be provided.' ) ) |
def libvlc_audio_set_format_callbacks ( mp , setup , cleanup ) :
'''Set decoded audio format . This only works in combination with
L { libvlc _ audio _ set _ callbacks } ( ) .
@ param mp : the media player .
@ param setup : callback to select the audio format ( cannot be NULL ) .
@ param cleanup : callback to release any allocated resources ( or NULL ) .
@ version : LibVLC 2.0.0 or later .''' | f = _Cfunctions . get ( 'libvlc_audio_set_format_callbacks' , None ) or _Cfunction ( 'libvlc_audio_set_format_callbacks' , ( ( 1 , ) , ( 1 , ) , ( 1 , ) , ) , None , None , MediaPlayer , AudioSetupCb , AudioCleanupCb )
return f ( mp , setup , cleanup ) |
def halt ( self , subid , params = None ) :
'''/ v1 / server / halt
POST - account
Halt a virtual machine . This is a hard power off ( basically , unplugging
the machine ) . The data on the machine will not be modified , and you
will still be billed for the machine . To completely delete a
machine , see v1 / server / destroy
Link : https : / / www . vultr . com / api / # server _ halt''' | params = update_params ( params , { 'SUBID' : subid } )
return self . request ( '/v1/server/halt' , params , 'POST' ) |
def _import_data ( self ) :
"""Import data from a stat file .""" | # set default state to ironpython for very old ironpython ( 2.7.0)
iron_python = True
try :
iron_python = True if platform . python_implementation ( ) == 'IronPython' else False
except ValueError as e : # older versions of IronPython fail to parse version correctly
# failed to parse IronPython sys . version : ' 2.7.5 ( IronPython 2.7.5 ( 2.7.5.0)
# on . NET 4.0.30319.42000 ( 64 - bit ) ) '
if 'IronPython' in str ( e ) :
iron_python = True
if iron_python :
statwin = codecs . open ( self . file_path , 'r' )
else :
statwin = codecs . open ( self . file_path , 'r' , encoding = 'utf-8' , errors = 'ignore' )
try :
line = statwin . readline ( )
# import header with location
self . _header = [ line ] + [ statwin . readline ( ) for i in xrange ( 9 ) ]
self . _body = statwin . read ( )
except Exception as e :
import traceback
raise Exception ( '{}\n{}' . format ( e , traceback . format_exc ( ) ) )
else : # import location data
loc_name = self . _header [ 2 ] . strip ( ) . replace ( 'Location -- ' , '' )
if ' - ' in loc_name :
city = ' ' . join ( loc_name . split ( ' - ' ) [ : - 1 ] )
else : # for US stat files it is full name separated by spaces
city = ' ' . join ( loc_name . split ( ) [ : - 2 ] )
country = loc_name . split ( ' ' ) [ - 1 ]
source = self . _header [ 6 ] . strip ( ) . replace ( 'Data Source -- ' , '' )
station_id = self . _header [ 8 ] . strip ( ) . replace ( 'WMO Station ' , '' )
if iron_python : # IronPython
coord_pattern = re . compile ( r"{([NSEW])(\s*\d*)deg(\s*\d*)" )
matches = coord_pattern . findall ( self . _header [ 3 ] . replace ( '\xb0' , 'deg' ) )
else : # CPython
coord_pattern = re . compile ( r"{([NSEW])(\s*\d*) (\s*\d*)" )
matches = coord_pattern . findall ( self . _header [ 3 ] )
lat_sign = - 1 if matches [ 0 ] [ 0 ] == 'S' else 1
latitude = lat_sign * ( float ( matches [ 0 ] [ 1 ] ) + ( float ( matches [ 0 ] [ 2 ] ) / 60 ) )
lon_sign = - 1 if matches [ 1 ] [ 0 ] == 'W' else 1
longitude = lon_sign * ( float ( matches [ 1 ] [ 1 ] ) + ( float ( matches [ 1 ] [ 2 ] ) / 60 ) )
time_zone = self . _regex_check ( r"{GMT\s*(\S*)\s*Hours}" , self . _header [ 3 ] )
elev_pattern = re . compile ( r"Elevation\s*[-]*\s*(\d*)m\s*(\S*)" )
elev_matches = elev_pattern . findall ( self . _header [ 4 ] )
if len ( elev_matches ) == 0 :
elev_pattern = re . compile ( r"Elevation\s*[-]*\s*(\d*)\s*m\s*(\S*)" )
elev_matches = elev_pattern . findall ( self . _header [ 4 ] )
elev_sign = - 1 if elev_matches [ 0 ] [ - 1 ] . lower ( ) == 'below' else 1
elevation = elev_sign * float ( elev_matches [ 0 ] [ 0 ] )
self . _location = Location ( )
self . _location . city = city
self . _location . country = country
self . _location . source = source
self . _location . station_id = station_id
self . _location . latitude = latitude
self . _location . longitude = longitude
self . _location . time_zone = time_zone
self . _location . elevation = elevation
# pull out individual properties
self . _stand_press_at_elev = self . _regex_check ( r"Elevation\s*[-]*\s*(\d*)" , self . _header [ 5 ] )
self . _ashrae_climate_zone = self . _regex_check ( r'Climate type\s"(\S*)"\s\(A' , self . _body )
self . _koppen_climate_zone = self . _regex_check ( r'Climate type\s"(\S*)"\s\(K' , self . _body )
# pull out extreme and seasonal weeks .
self . _extreme_hot_week = self . _regex_week_parse ( r"Extreme Hot Week Period selected:" "\s*(\w{3})\s*(\d{1,2}):\s*(\w{3})\s*(\d{1,2})," )
self . _extreme_cold_week = self . _regex_week_parse ( r"Extreme Cold Week Period selected:" "\s*(\w{3})\s*(\d{1,2}):\s*(\w{3})\s*(\d{1,2})," )
self . _typical_weeks = self . _regex_typical_week_parse ( )
# pull out annual design days
winter_vals = self . _regex_parse ( r"Heating\s(\d.*)" )
for key , val in zip ( DesignDay . heating_keys , winter_vals ) :
self . _winter_des_day_dict [ key ] = val
summer_vals = self . _regex_parse ( r"Cooling\s(\d.*)" )
for key , val in zip ( DesignDay . cooling_keys , summer_vals ) :
self . _summer_des_day_dict [ key ] = val
# Pull out relevant monthly information
self . _monthly_tau_beam = self . _regex_parse ( r"taub \(beam\)(.*)" )
self . _monthly_tau_diffuse = self . _regex_parse ( r"taud \(diffuse\)(.*)" )
self . _monthly_db_50 = self . _regex_parse ( r"Drybulb 5.0%(.*)" )
self . _monthly_wb_50 = self . _regex_parse ( r"Coincident Wetbulb 5.0%(.*)" )
self . _monthly_db_100 = self . _regex_parse ( r"Drybulb 10.%(.*)" )
self . _monthly_wb_100 = self . _regex_parse ( r"Coincident Wetbulb 10.%(.*)" )
self . _monthly_db_20 = self . _regex_parse ( r"Drybulb 2.0%(.*)" )
self . _monthly_wb_20 = self . _regex_parse ( r"Coincident Wetbulb 2.0%(.*)" )
self . _monthly_db_04 = self . _regex_parse ( r"Drybulb 0.4%(.*)" )
self . _monthly_wb_04 = self . _regex_parse ( r"Coincident Wetbulb 0.4%(.*)" )
self . _monthly_db_range_50 = self . _regex_parse ( r"Drybulb range - DB 5%(.*)" )
self . _monthly_wb_range_50 = self . _regex_parse ( r"Wetbulb range - DB 5%(.*)" )
self . _monthly_wind = self . _regex_parse ( r"Monthly Statistics for Wind Speed[\s\S]*Daily Avg(.*)" )
for direction in self . _wind_dir_names :
re_string = r"Monthly Wind Direction %[\s\S]*" + direction + r"\s(.*)"
dirs = self . _regex_parse ( re_string )
if dirs != [ ] :
self . _monthly_wind_dirs . append ( dirs )
if self . _monthly_wind_dirs == [ ] :
self . _monthly_wind_dirs = [ [ 0 ] * 12 for i in xrange ( 8 ) ]
finally :
statwin . close ( ) |
def connect ( self , signal , slot , transform = None , condition = None ) :
"""Defines a connection between this objects signal and another objects slot
signal : the signal this class will emit , to cause the slot method to be called
receiver : the object containing the slot method to be called
slot : the slot method to call
transform : an optional value override to pass into the slot method as the first variable
condition : only call the slot if the value emitted matches the required value or calling required returns True""" | if not signal in self . signals :
print ( "WARNING: {0} is trying to connect a slot to an undefined signal: {1}" . format ( self . __class__ . __name__ , str ( signal ) ) )
return
if not hasattr ( self , 'connections' ) :
self . connections = { }
connection = self . connections . setdefault ( signal , { } )
connection = connection . setdefault ( condition , { } )
connection [ slot ] = transform |
def get_min_vertex_distance ( coor , guess ) :
"""Can miss the minimum , but is enough for our purposes .""" | # Sort by x .
ix = nm . argsort ( coor [ : , 0 ] )
scoor = coor [ ix ]
mvd = 1e16
# Get mvd in chunks potentially smaller than guess .
n_coor = coor . shape [ 0 ]
print n_coor
i0 = i1 = 0
x0 = scoor [ i0 , 0 ]
while 1 :
while ( ( scoor [ i1 , 0 ] - x0 ) < guess ) and ( i1 < ( n_coor - 1 ) ) :
i1 += 1
# print i0 , i1 , x0 , scoor [ i1,0]
aim , aa1 , aa2 , aux = get_min_vertex_distance_naive ( scoor [ i0 : i1 + 1 ] )
if aux < mvd :
im , a1 , a2 = aim , aa1 + i0 , aa2 + i0
mvd = min ( mvd , aux )
i0 = i1 = int ( 0.5 * ( i1 + i0 ) ) + 1
# i0 + = 1
x0 = scoor [ i0 , 0 ]
# print ' - ' , i0
if i1 == n_coor - 1 :
break
print im , ix [ a1 ] , ix [ a2 ] , a1 , a2 , scoor [ a1 ] , scoor [ a2 ]
return mvd |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.