signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _diff_text ( left , right , verbose = False ) :
"""Return the explanation for the diff between text or bytes
Unless - - verbose is used this will skip leading and trailing
characters which are identical to keep the diff minimal .
If the input are bytes they will be safely converted to text ."""
|
from difflib import ndiff
explanation = [ ]
if isinstance ( left , py . builtin . bytes ) :
left = u ( repr ( left ) [ 1 : - 1 ] ) . replace ( r'\n' , '\n' )
if isinstance ( right , py . builtin . bytes ) :
right = u ( repr ( right ) [ 1 : - 1 ] ) . replace ( r'\n' , '\n' )
if not verbose :
i = 0
# just in case left or right has zero length
for i in range ( min ( len ( left ) , len ( right ) ) ) :
if left [ i ] != right [ i ] :
break
if i > 42 :
i -= 10
# Provide some context
explanation = [ u ( 'Skipping %s identical leading ' 'characters in diff, use -v to show' ) % i ]
left = left [ i : ]
right = right [ i : ]
if len ( left ) == len ( right ) :
for i in range ( len ( left ) ) :
if left [ - i ] != right [ - i ] :
break
if i > 42 :
i -= 10
# Provide some context
explanation += [ u ( 'Skipping %s identical trailing ' 'characters in diff, use -v to show' ) % i ]
left = left [ : - i ]
right = right [ : - i ]
keepends = True
explanation += [ line . strip ( '\n' ) for line in ndiff ( left . splitlines ( keepends ) , right . splitlines ( keepends ) ) ]
return explanation
|
def _prepare_wsdl_objects ( self ) :
"""This is the data that will be used to create your shipment . Create
the data structure and get it ready for the WSDL request ."""
|
# Default behavior is to not request transit information
self . ReturnTransitAndCommit = False
# This is the primary data structure for processShipment requests .
self . RequestedShipment = self . client . factory . create ( 'RequestedShipment' )
self . RequestedShipment . ShipTimestamp = datetime . datetime . now ( )
# Defaults for TotalWeight wsdl object .
total_weight = self . client . factory . create ( 'Weight' )
# Start at nothing .
total_weight . Value = 0.0
# Default to pounds .
total_weight . Units = 'LB'
# This is the total weight of the entire shipment . Shipments may
# contain more than one package .
self . RequestedShipment . TotalWeight = total_weight
# This is the top level data structure for Shipper information .
shipper = self . client . factory . create ( 'Party' )
shipper . Address = self . client . factory . create ( 'Address' )
shipper . Contact = self . client . factory . create ( 'Contact' )
# Link the ShipperParty to our master data structure .
self . RequestedShipment . Shipper = shipper
# This is the top level data structure for Recipient information .
recipient_party = self . client . factory . create ( 'Party' )
recipient_party . Contact = self . client . factory . create ( 'Contact' )
recipient_party . Address = self . client . factory . create ( 'Address' )
# Link the RecipientParty object to our master data structure .
self . RequestedShipment . Recipient = recipient_party
# Make sender responsible for payment by default .
self . RequestedShipment . ShippingChargesPayment = self . create_wsdl_object_of_type ( 'Payment' )
self . RequestedShipment . ShippingChargesPayment . PaymentType = 'SENDER'
# Start with no packages , user must add them .
self . RequestedShipment . PackageCount = 0
self . RequestedShipment . RequestedPackageLineItems = [ ]
# This is good to review if you ' d like to see what the data structure
# looks like .
self . logger . debug ( self . RequestedShipment )
|
async def get_vm ( self , vm_id ) :
"""Dummy get _ vm func"""
|
if vm_id not in self . _vms :
raise DummyIaasVmNotFound ( )
return self . _vms [ vm_id ]
|
def h6_mahe ( simulated_array , observed_array , k = 1 , replace_nan = None , replace_inf = None , remove_neg = False , remove_zero = False ) :
"""Compute the H6 mean absolute error .
. . image : : / pictures / H6 . png
. . image : : / pictures / AHE . png
* * Range : * *
* * Notes : * *
Parameters
simulated _ array : one dimensional ndarray
An array of simulated data from the time series .
observed _ array : one dimensional ndarray
An array of observed data from the time series .
k : int or float
If given , sets the value of k . If None , k = 1.
replace _ nan : float , optional
If given , indicates which value to replace NaN values with in the two arrays . If None , when
a NaN value is found at the i - th position in the observed OR simulated array , the i - th value
of the observed and simulated array are removed before the computation .
replace _ inf : float , optional
If given , indicates which value to replace Inf values with in the two arrays . If None , when
an inf value is found at the i - th position in the observed OR simulated array , the i - th
value of the observed and simulated array are removed before the computation .
remove _ neg : boolean , optional
If True , when a negative value is found at the i - th position in the observed OR simulated
array , the i - th value of the observed AND simulated array are removed before the
computation .
remove _ zero : boolean , optional
If true , when a zero value is found at the i - th position in the observed OR simulated
array , the i - th value of the observed AND simulated array are removed before the
computation .
Returns
float
The mean absolute H6 error .
Examples
> > > import HydroErr as he
> > > import numpy as np
> > > sim = np . array ( [ 5 , 7 , 9 , 2 , 4.5 , 6.7 ] )
> > > obs = np . array ( [ 4.7 , 6 , 10 , 2.5 , 4 , 7 ] )
> > > he . h6 _ mahe ( sim , obs )
0.11743831388794852
References
- Tornquist , L . , Vartia , P . , Vartia , Y . O . , 1985 . How Should Relative Changes be Measured ?
The American Statistician 43-46."""
|
# Treats data
simulated_array , observed_array = treat_values ( simulated_array , observed_array , replace_nan = replace_nan , replace_inf = replace_inf , remove_neg = remove_neg , remove_zero = remove_zero )
top = ( simulated_array / observed_array - 1 )
bot = np . power ( 0.5 * ( 1 + np . power ( simulated_array / observed_array , k ) ) , 1 / k )
h = top / bot
return np . mean ( np . abs ( h ) )
|
def runQuery ( statDict , query ) :
"""Filters for the given query ."""
|
parts = [ x . strip ( ) for x in OPERATOR . split ( query ) ]
assert len ( parts ) in ( 1 , 3 )
queryKey = parts [ 0 ]
result = { }
for key , value in six . iteritems ( statDict ) :
if key == queryKey :
if len ( parts ) == 3 :
op = OPERATORS [ parts [ 1 ] ]
try :
queryValue = type ( value ) ( parts [ 2 ] ) if value else parts [ 2 ]
except ( TypeError , ValueError ) :
continue
if not op ( value , queryValue ) :
continue
result [ key ] = value
elif isinstance ( value , scales . StatContainer ) or isinstance ( value , dict ) :
child = runQuery ( value , query )
if child :
result [ key ] = child
return result
|
def cross_product_compare ( start , candidate1 , candidate2 ) :
"""Compare two relative changes by their cross - product .
This is meant to be a way to determine which vector is more " inside "
relative to ` ` start ` ` .
. . note : :
This is a helper for : func : ` _ simple _ convex _ hull ` .
Args :
start ( numpy . ndarray ) : The start vector ( as 1D NumPy array with
2 elements ) .
candidate1 ( numpy . ndarray ) : The first candidate vector ( as 1D
NumPy array with 2 elements ) .
candidate2 ( numpy . ndarray ) : The second candidate vector ( as 1D
NumPy array with 2 elements ) .
Returns :
float : The cross product of the two differences ."""
|
delta1 = candidate1 - start
delta2 = candidate2 - start
return cross_product ( delta1 , delta2 )
|
def create ( controller_id , name ) :
"""Turn class into a kervi controller"""
|
def _decorator ( cls ) :
class _ControllerClass ( cls , Controller ) :
def __init__ ( self ) :
Controller . __init__ ( self , controller_id , name )
for key in cls . __dict__ . keys ( ) :
prop = cls . __dict__ [ key ]
if isinstance ( prop , KerviValue ) :
if prop . is_input :
self . inputs . _add_internal ( key , prop )
else :
self . outputs . _add_internal ( key , prop )
cls . __init__ ( self )
return _ControllerClass
return _decorator
|
def similarity ( self , other ) :
"""Compare two objects for similarity .
@ param self : first object to compare
@ param other : second object to compare
@ return : L { Similarity } result of comparison"""
|
sim = self . Similarity ( )
total = 0.0
# Calculate similarity ratio for each attribute
cname = self . __class__ . __name__
for aname , weight in self . attributes . items ( ) :
attr1 = getattr ( self , aname , None )
attr2 = getattr ( other , aname , None )
self . log ( attr1 , attr2 , '%' , cname = cname , aname = aname )
# Similarity is ignored if None on both objects
if attr1 is None and attr2 is None :
self . log ( attr1 , attr2 , '%' , cname = cname , aname = aname , result = "attributes are both None" )
continue
# Similarity is 0 if either attribute is non - Comparable
if not all ( ( isinstance ( attr1 , Comparable ) , isinstance ( attr2 , Comparable ) ) ) :
self . log ( attr1 , attr2 , '%' , cname = cname , aname = aname , result = "attributes not Comparable" )
total += weight
continue
# Calculate similarity between the attributes
attr_sim = ( attr1 % attr2 )
self . log ( attr1 , attr2 , '%' , cname = cname , aname = aname , result = attr_sim )
# Add the similarity to the total
sim += attr_sim * weight
total += weight
# Scale the similarity so the total is 1.0
if total :
sim *= ( 1.0 / total )
return sim
|
def get_varied_cfg_lbls ( cfg_list , default_cfg = None , mainkey = '_cfgname' , checkname = False ) :
r"""Args :
cfg _ list ( list ) :
default _ cfg ( None ) : ( default = None )
checkname ( bool ) : if True removes names if they are all the same .
Returns :
list : cfglbl _ list
CommandLine :
python - m utool . util _ gridsearch - - exec - get _ varied _ cfg _ lbls
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ gridsearch import * # NOQA
> > > import utool as ut
> > > cfg _ list = [ { ' _ cfgname ' : ' test ' , ' f ' : 1 , ' b ' : 1 } ,
> > > { ' _ cfgname ' : ' test ' , ' f ' : 2 , ' b ' : 1 } ,
> > > { ' _ cfgname ' : ' test ' , ' f ' : 3 , ' b ' : 1 , ' z ' : 4 } ]
> > > default _ cfg = None
> > > cfglbl _ list = get _ varied _ cfg _ lbls ( cfg _ list , default _ cfg )
> > > result = ( ' cfglbl _ list = % s ' % ( ut . repr2 ( cfglbl _ list ) , ) )
> > > print ( result )
cfglbl _ list = [ ' test : f = 1 ' , ' test : f = 2 ' , ' test : f = 3 , z = 4 ' ]"""
|
import utool as ut
try :
cfgname_list = [ cfg [ mainkey ] for cfg in cfg_list ]
except KeyError :
cfgname_list = [ '' ] * len ( cfg_list )
varied_cfg_list = partition_varied_cfg_list ( cfg_list , default_cfg ) [ 1 ]
if checkname and ut . allsame ( cfgname_list ) :
cfgname_list = [ None ] * len ( cfgname_list )
cfglbl_list = [ get_cfg_lbl ( cfg , name ) for cfg , name in zip ( varied_cfg_list , cfgname_list ) ]
if checkname :
cfglbl_list = [ x . lstrip ( ':' ) for x in cfglbl_list ]
return cfglbl_list
|
def _parseSpecType ( self , classString ) :
"""This class attempts to parse the spectral type . It should probably use more advanced matching use regex"""
|
try :
classString = str ( classString )
except UnicodeEncodeError : # This is for the benefit of 1RXS1609 which currently has the spectral type K7 \ pm 1V
# TODO add unicode support and handling for this case / ammend the target
return False
# some initial cases
if classString == '' or classString == 'nan' :
return False
possNumbers = range ( 10 )
possLType = ( 'III' , 'II' , 'Iab' , 'Ia0' , 'Ia' , 'Ib' , 'IV' , 'V' )
# in order of unique matches
# remove spaces , remove slashes
classString = classString . replace ( ' ' , '' )
classString = classString . replace ( '-' , '/' )
classString = classString . replace ( '\\' , '/' )
classString = classString . split ( '/' ) [ 0 ]
# TODO we do not consider slashed classes yet ( intemediates )
# check first 3 chars for spectral types
stellarClass = classString [ : 3 ]
if stellarClass in _possSpectralClasses :
self . classLetter = stellarClass
elif stellarClass [ : 2 ] in _possSpectralClasses : # needed because A5V wouldnt match before
self . classLetter = stellarClass [ : 2 ]
elif stellarClass [ 0 ] in _possSpectralClasses :
self . classLetter = stellarClass [ 0 ]
else :
return False
# assume a non standard class and fail
# get number
try :
numIndex = len ( self . classLetter )
classNum = int ( classString [ numIndex ] )
if classNum in possNumbers :
self . classNumber = int ( classNum )
# don ' t consider decimals here , done at the type check
typeString = classString [ numIndex + 1 : ]
else :
return False
# invalid number received
except IndexError : # reached the end of the string
return True
except ValueError : # i . e its a letter - fail # TODO multi letter checking
typeString = classString [ 1 : ]
if typeString == '' : # ie there is no more information as in ' A8'
return True
# Now check for a decimal and handle those cases
if typeString [ 0 ] == '.' : # handle decimal cases , we check each number in turn , add them as strings and then convert to float and add
# to original number
decimalNumbers = '.'
for number in typeString [ 1 : ] :
try :
if int ( number ) in possNumbers :
decimalNumbers += number
else :
print ( 'Something went wrong in decimal checking' )
# TODO replace with logging
return False
# somethings gone wrong
except ValueError :
break
# recevied a non - number ( probably L class )
# add decimal to classNum
try :
self . classNumber += float ( decimalNumbers )
except ValueError : # probably trying to convert ' . ' to a float
pass
typeString = typeString [ len ( decimalNumbers ) : ]
if len ( typeString ) is 0 :
return True
# Handle luminosity class
for possL in possLType : # match each possible case in turn ( in order of uniqueness )
Lcase = typeString [ : len ( possL ) ]
# match from front with length to minimise matching say IV in ' < 3 CIV '
if possL == Lcase :
self . lumType = possL
return True
if not self . classNumber == '' :
return True
else : # if there no number asumme we have a name ie ' Catac . var . '
self . classLetter = ''
self . classNumber = ''
self . lumType = ''
return False
|
def process_data ( self , file_info ) :
"""expects FileInfo"""
|
if self . _has_reached_stop_limit ( ) :
self . log . info ( "Remaining bytes in quota (%d) has reached minimum to request stop (%d)" , self . _quota . remaining , self . _stop_on_remaining )
self . fire ( events . TransmissionQuotaReached ( ) )
elif not self . _fits_in_quota ( file_info ) :
self . log . debug ( "File would exceed quota. Won't process '%s'" , str ( file_info ) )
else :
return file_info
self . fire ( events . FilteredFile ( file_info ) )
|
def vector_drive ( self , vx , vy , vw , tm_diff ) :
"""Call this from your : func : ` PhysicsEngine . update _ sim ` function .
Will update the robot ' s position on the simulation field .
This moves the robot using a velocity vector relative to the robot
instead of by speed / rotation speed .
: param vx : Speed in x direction relative to robot in ft / s
: param vy : Speed in y direction relative to robot in ft / s
: param vw : Clockwise rotational speed in rad / s
: param tm _ diff : Amount of time speed was traveled"""
|
# if the robot is disabled , don ' t do anything
if not self . robot_enabled :
return
angle = vw * tm_diff
vx = vx * tm_diff
vy = vy * tm_diff
x = vx * math . sin ( angle ) + vy * math . cos ( angle )
y = vx * math . cos ( angle ) + vy * math . sin ( angle )
self . distance_drive ( x , y , angle )
|
def _search_mapred_emu ( self , index , query ) :
"""Emulates a search request via MapReduce . Used in the case
where the transport supports MapReduce but has no native
search capability ."""
|
phases = [ ]
if not self . phaseless_mapred ( ) :
phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } )
mr_result = self . mapred ( { 'module' : 'riak_search' , 'function' : 'mapred_search' , 'arg' : [ index , query ] } , phases )
result = { 'num_found' : len ( mr_result ) , 'max_score' : 0.0 , 'docs' : [ ] }
for bucket , key , data in mr_result :
if u'score' in data and data [ u'score' ] [ 0 ] > result [ 'max_score' ] :
result [ 'max_score' ] = data [ u'score' ] [ 0 ]
result [ 'docs' ] . append ( { u'id' : key } )
return result
|
def makedirs ( name , mode = 0o777 , exist_ok = False ) :
"""cheapo replacement for py3 makedirs with support for exist _ ok"""
|
if os . path . exists ( name ) :
if not exist_ok :
raise FileExistsError ( "File exists: " + name )
else :
os . makedirs ( name , mode )
|
def resolve_multiple_existing_paths ( paths ) :
""": param paths : A list of paths to items that need to be resolved
: type paths : list
: returns : A dictionary mapping a specified path to either its resolved
object or Nones , if the object could not be resolved
: rtype : dict
For each input given in paths , attempts to resolve the path , and returns
the resolved object in a dictionary .
The return value will look like :
{ < path1 > : < resolved _ object1 > , < path2 > : < resolved _ object2 > , . . . }
If entity _ id is a DX ID that can be described ,
< resolved _ object * > : : = { " project " : None ,
" folder " : None ,
" name " : { " id " : < id > ,
" describe " : < describe _ output > } }
Else if a general resolution ( or search ) method will be used to resolve
the entity ,
< resolved _ object * > : : = { " project " : < project > ,
" folder " : None ,
" name " : { " project " : < project > ,
" id " : < resolved _ id > } }
Else if < project > is a job ID ,
< resolved _ object * > : : = { " project " : None ,
" folder " : None ,
" name " : { " project " : < project > ,
" id " : < resolved _ id > } }
Else if the path refers to a folder instead of a data object ,
< resolved _ object * > : : = { " project " : < project > ,
" folder " : < folder > ,
" name " : None }
Else if description or resolution fails ,
< resolved _ object * > : : = { " project " : None , " folder " : None , " name " : None }"""
|
done_objects = { }
# Return value
to_resolve_in_batch_paths = [ ]
# Paths to resolve
to_resolve_in_batch_inputs = [ ]
# Project , folderpath , and entity name
for path in paths :
project , folderpath , entity_name = resolve_path ( path , expected = 'entity' )
try :
must_resolve , project , folderpath , entity_name = _check_resolution_needed ( path , project , folderpath , entity_name )
except :
must_resolve = False
if must_resolve :
if is_glob_pattern ( entity_name ) : # TODO : Must call findDataObjects because resolveDataObjects does not support glob patterns
try :
find_results = _resolve_global_entity ( project , folderpath , entity_name )
done_objects [ path ] = _format_resolution_output ( path , project , folderpath , entity_name , find_results )
except ResolutionError : # Catches any ResolutionError thrown by _ resolve _ global _ entity
done_objects [ path ] = { "project" : None , "folder" : None , "name" : None }
else : # Prepare batch call for resolveDataObjects
to_resolve_in_batch_paths . append ( path )
to_resolve_in_batch_inputs . append ( { "project" : project , "folder" : folderpath , "name" : entity_name } )
else : # No need to resolve
done_objects [ path ] = { "project" : project , "folder" : folderpath , "name" : entity_name }
# Call resolveDataObjects
resolution_results = dxpy . resolve_data_objects ( to_resolve_in_batch_inputs )
for path , inputs , result in zip ( to_resolve_in_batch_paths , to_resolve_in_batch_inputs , resolution_results ) :
done_objects [ path ] = _format_resolution_output ( path , inputs [ "project" ] , inputs [ "folder" ] , inputs [ "name" ] , result )
return done_objects
|
def layout_spring ( self , num_dims = 2 , spring_constant = None , iterations = 50 , initial_temp = 0.1 , initial_layout = None ) :
'''Position vertices using the Fruchterman - Reingold ( spring ) algorithm .
num _ dims : int ( default = 2)
Number of dimensions to embed vertices in .
spring _ constant : float ( default = None )
Optimal distance between nodes . If None the distance is set to
1 / sqrt ( n ) where n is the number of nodes . Increase this value
to move nodes farther apart .
iterations : int ( default = 50)
Number of iterations of spring - force relaxation
initial _ temp : float ( default = 0.1)
Largest step - size allowed in the dynamics , decays linearly .
Must be positive , should probably be less than 1.
initial _ layout : array - like of shape ( n , num _ dims )
If provided , serves as the initial placement of vertex coordinates .'''
|
if initial_layout is None :
X = np . random . random ( ( self . num_vertices ( ) , num_dims ) )
else :
X = np . array ( initial_layout , dtype = float , copy = True )
assert X . shape == ( self . num_vertices ( ) , num_dims )
if spring_constant is None : # default to sqrt ( area _ of _ viewport / num _ vertices )
spring_constant = X . shape [ 0 ] ** - 0.5
S = self . matrix ( 'csr' , 'csc' , 'coo' , copy = True )
S . data [ : ] = 1. / S . data
# Convert to similarity
ii , jj = S . nonzero ( )
# cache nonzero indices
# simple cooling scheme , linearly steps down
cooling_scheme = np . linspace ( initial_temp , 0 , iterations + 2 ) [ : - 2 ]
# this is still O ( V ^ 2)
# could use multilevel methods to speed this up significantly
for t in cooling_scheme :
delta = X [ : , None ] - X [ None ]
distance = _bounded_norm ( delta , 1e-8 )
# repulsion from all vertices
force = spring_constant ** 2 / distance
# attraction from connected vertices
force [ ii , jj ] -= S . data * distance [ ii , jj ] ** 2 / spring_constant
displacement = np . einsum ( 'ijk,ij->ik' , delta , force )
# update positions
length = _bounded_norm ( displacement , 1e-2 )
X += displacement * t / length [ : , None ]
return X
|
def send_rpc_message ( self , method , request ) :
'''Sends a Hadoop RPC request to the NameNode .
The IpcConnectionContextProto , RpcPayloadHeaderProto and HadoopRpcRequestProto
should already be serialized in the right way ( delimited or not ) before
they are passed in this method .
The Hadoop RPC protocol looks like this for sending requests :
When sending requests
| Length of the next three parts ( 4 bytes / 32 bit int ) |
| Delimited serialized RpcRequestHeaderProto ( varint len + header ) |
| Delimited serialized RequestHeaderProto ( varint len + header ) |
| Delimited serialized Request ( varint len + request ) |'''
|
log . debug ( "############## SENDING ##############" )
# 0 . RpcRequestHeaderProto
rpc_request_header = self . create_rpc_request_header ( )
# 1 . RequestHeaderProto
request_header = self . create_request_header ( method )
# 2 . Param
param = request . SerializeToString ( )
if log . getEffectiveLevel ( ) == logging . DEBUG :
log_protobuf_message ( "Request" , request )
rpc_message_length = len ( rpc_request_header ) + encoder . _VarintSize ( len ( rpc_request_header ) ) + len ( request_header ) + encoder . _VarintSize ( len ( request_header ) ) + len ( param ) + encoder . _VarintSize ( len ( param ) )
if log . getEffectiveLevel ( ) == logging . DEBUG :
log . debug ( "RPC message length: %s (%s)" % ( rpc_message_length , format_bytes ( struct . pack ( '!I' , rpc_message_length ) ) ) )
self . write ( struct . pack ( '!I' , rpc_message_length ) )
self . write_delimited ( rpc_request_header )
self . write_delimited ( request_header )
self . write_delimited ( param )
|
def serialize ( obj ) :
"""JSON serializer for objects not serializable by default json code"""
|
if isinstance ( obj , datetime . datetime ) :
serial = obj . isoformat ( sep = 'T' )
return serial
if isinstance ( obj , uuid . UUID ) :
serial = str ( obj )
return serial
try :
return obj . __dict__
except AttributeError :
return str ( obj )
except Exception as e :
strval = 'unknown obj'
exceptval = 'unknown err'
try :
strval = str ( obj )
exceptval = repr ( e )
except Exception :
pass
return 'json fail {} {}' . format ( exceptval , strval )
|
def update_pypsa_storage ( pypsa , storages , storages_lines ) :
"""Adds storages and their lines to pypsa representation of the edisgo graph .
This function effects the following attributes of the pypsa network :
components ( ' StorageUnit ' ) , storage _ units , storage _ units _ t ( p _ set , q _ set ) ,
buses , lines
Parameters
pypsa : : pypsa : ` pypsa . Network < network > `
storages : : obj : ` list `
List with storages of type : class : ` ~ . grid . components . Storage ` to add
to pypsa network .
storages _ lines : : obj : ` list `
List with lines of type : class : ` ~ . grid . components . Line ` that connect
storages to the grid ."""
|
bus = { 'name' : [ ] , 'v_nom' : [ ] , 'x' : [ ] , 'y' : [ ] }
line = { 'name' : [ ] , 'bus0' : [ ] , 'bus1' : [ ] , 'type' : [ ] , 'x' : [ ] , 'r' : [ ] , 's_nom' : [ ] , 'length' : [ ] }
storage = { 'name' : [ ] , 'bus' : [ ] , 'p_nom' : [ ] , 'state_of_charge_initial' : [ ] , 'efficiency_store' : [ ] , 'efficiency_dispatch' : [ ] , 'standing_loss' : [ ] }
for s in storages :
bus_name = '_' . join ( [ 'Bus' , repr ( s ) ] )
storage [ 'name' ] . append ( repr ( s ) )
storage [ 'bus' ] . append ( bus_name )
storage [ 'p_nom' ] . append ( s . nominal_power / 1e3 )
storage [ 'state_of_charge_initial' ] . append ( s . soc_initial )
storage [ 'efficiency_store' ] . append ( s . efficiency_in )
storage [ 'efficiency_dispatch' ] . append ( s . efficiency_out )
storage [ 'standing_loss' ] . append ( s . standing_loss )
bus [ 'name' ] . append ( bus_name )
bus [ 'v_nom' ] . append ( s . grid . voltage_nom )
bus [ 'x' ] . append ( s . geom . x )
bus [ 'y' ] . append ( s . geom . y )
omega = 2 * pi * 50
for l in storages_lines :
line [ 'name' ] . append ( repr ( l ) )
adj_nodes = l . grid . graph . nodes_from_line ( l )
if isinstance ( l . grid , LVGrid ) :
if isinstance ( adj_nodes [ 0 ] , LVStation ) :
line [ 'bus0' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 0 ] . __repr__ ( side = 'lv' ) ] ) )
else :
line [ 'bus0' ] . append ( '_' . join ( [ 'Bus' , repr ( adj_nodes [ 0 ] ) ] ) )
if isinstance ( adj_nodes [ 1 ] , LVStation ) :
line [ 'bus1' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 1 ] . __repr__ ( side = 'lv' ) ] ) )
else :
line [ 'bus1' ] . append ( '_' . join ( [ 'Bus' , repr ( adj_nodes [ 1 ] ) ] ) )
else :
if isinstance ( adj_nodes [ 0 ] , LVStation ) :
line [ 'bus0' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 0 ] . __repr__ ( side = 'mv' ) ] ) )
elif isinstance ( adj_nodes [ 0 ] , MVStation ) :
line [ 'bus0' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 0 ] . __repr__ ( side = 'lv' ) ] ) )
else :
line [ 'bus0' ] . append ( '_' . join ( [ 'Bus' , repr ( adj_nodes [ 0 ] ) ] ) )
if isinstance ( adj_nodes [ 1 ] , LVStation ) :
line [ 'bus1' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 1 ] . __repr__ ( side = 'mv' ) ] ) )
elif isinstance ( adj_nodes [ 1 ] , MVStation ) :
line [ 'bus1' ] . append ( '_' . join ( [ 'Bus' , adj_nodes [ 1 ] . __repr__ ( side = 'lv' ) ] ) )
else :
line [ 'bus1' ] . append ( '_' . join ( [ 'Bus' , repr ( adj_nodes [ 1 ] ) ] ) )
line [ 'type' ] . append ( "" )
line [ 'x' ] . append ( l . type [ 'L' ] * omega / 1e3 * l . length )
line [ 'r' ] . append ( l . type [ 'R' ] * l . length )
line [ 's_nom' ] . append ( sqrt ( 3 ) * l . type [ 'I_max_th' ] * l . type [ 'U_n' ] / 1e3 )
line [ 'length' ] . append ( l . length )
# import new components to pypsa
pypsa . import_components_from_dataframe ( pd . DataFrame ( bus ) . set_index ( 'name' ) , 'Bus' )
pypsa . import_components_from_dataframe ( pd . DataFrame ( storage ) . set_index ( 'name' ) , 'StorageUnit' )
pypsa . import_components_from_dataframe ( pd . DataFrame ( line ) . set_index ( 'name' ) , 'Line' )
# import time series of storages and buses to pypsa
timeseries_storage_p = pd . DataFrame ( )
timeseries_storage_q = pd . DataFrame ( )
for s in storages :
timeseries_storage_p [ repr ( s ) ] = s . pypsa_timeseries ( 'p' ) . loc [ pypsa . storage_units_t . p_set . index ]
timeseries_storage_q [ repr ( s ) ] = s . pypsa_timeseries ( 'q' ) . loc [ pypsa . storage_units_t . q_set . index ]
import_series_from_dataframe ( pypsa , timeseries_storage_p , 'StorageUnit' , 'p_set' )
import_series_from_dataframe ( pypsa , timeseries_storage_q , 'StorageUnit' , 'q_set' )
|
def check_status ( status ) :
"""Check the status of a mkl functions and raise a python exeption if
there is an error ."""
|
if status :
msg = lib . DftiErrorMessage ( status )
msg = ctypes . c_char_p ( msg ) . value
raise RuntimeError ( msg )
|
def get_screen_size ( self , screen_no ) :
"""Returns the size of the given screen number"""
|
return GetScreenSize ( display = self . display , opcode = self . display . get_extension_major ( extname ) , window = self . id , screen = screen_no , )
|
def pspawn_wrapper ( self , sh , escape , cmd , args , env ) :
"""Wrapper function for handling piped spawns .
This looks to the calling interface ( in Action . py ) like a " normal "
spawn , but associates the call with the PSPAWN variable from
the construction environment and with the streams to which we
want the output logged . This gets slid into the construction
environment as the SPAWN variable so Action . py doesn ' t have to
know or care whether it ' s spawning a piped command or not ."""
|
return self . pspawn ( sh , escape , cmd , args , env , self . logstream , self . logstream )
|
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) :
"""See : meth : ` superclass method
< . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > `
for spec of input and result values ."""
|
sites . vs30 = 700 * np . ones ( len ( sites . vs30 ) )
mean , stddevs = super ( ) . get_mean_and_stddevs ( sites , rup , dists , imt , stddev_types )
C = CauzziFaccioli2008SWISS01 . COEFFS
tau_ss = 'tau'
log_phi_ss = np . log ( 10 )
mean , stddevs = _apply_adjustments ( C , self . COEFFS_FS_ROCK [ imt ] , tau_ss , mean , stddevs , sites , rup , dists . rhypo , imt , stddev_types , log_phi_ss )
return mean , np . log ( 10 ** np . array ( stddevs ) )
|
def temp_unit ( self ) :
"""Get unit of temp ."""
|
if CONST . UNIT_FAHRENHEIT in self . _get_status ( CONST . TEMP_STATUS_KEY ) :
return CONST . UNIT_FAHRENHEIT
elif CONST . UNIT_CELSIUS in self . _get_status ( CONST . TEMP_STATUS_KEY ) :
return CONST . UNIT_CELSIUS
return None
|
def create_action ( self ) :
"""Create actions associated with Annotations ."""
|
actions = { }
act = QAction ( 'New Annotations' , self )
act . triggered . connect ( self . new_annot )
actions [ 'new_annot' ] = act
act = QAction ( 'Load Annotations' , self )
act . triggered . connect ( self . load_annot )
actions [ 'load_annot' ] = act
act = QAction ( 'Clear Annotations...' , self )
act . triggered . connect ( self . clear_annot )
actions [ 'clear_annot' ] = act
act = QAction ( 'New...' , self )
act . triggered . connect ( self . new_rater )
actions [ 'new_rater' ] = act
act = QAction ( 'Rename...' , self )
act . triggered . connect ( self . rename_rater )
actions [ 'rename_rater' ] = act
act = QAction ( 'Delete...' , self )
act . triggered . connect ( self . delete_rater )
actions [ 'del_rater' ] = act
act = QAction ( QIcon ( ICON [ 'bookmark' ] ) , 'New Bookmark' , self )
act . setCheckable ( True )
actions [ 'new_bookmark' ] = act
act = QAction ( QIcon ( ICON [ 'new_eventtype' ] ) , 'New Event Type' , self )
act . triggered . connect ( self . new_eventtype )
actions [ 'new_eventtype' ] = act
act = QAction ( QIcon ( ICON [ 'del_eventtype' ] ) , 'Delete Event Type' , self )
act . triggered . connect ( self . delete_eventtype )
actions [ 'del_eventtype' ] = act
act = QAction ( 'Rename Event Type' , self )
act . triggered . connect ( self . rename_eventtype )
actions [ 'rename_eventtype' ] = act
act = QAction ( 'New Name' , self )
act . triggered . connect ( self . markers_to_events )
actions [ 'm2e_newname' ] = act
act = QAction ( 'Keep Marker Names' , self )
act . triggered . connect ( partial ( self . markers_to_events , True ) )
actions [ 'm2e_keepname' ] = act
act = QAction ( 'Merge Events...' , self )
act . triggered . connect ( self . parent . show_merge_dialog )
actions [ 'merge_events' ] = act
act = QAction ( QIcon ( ICON [ 'event' ] ) , 'Event Mode' , self )
act . setCheckable ( True )
actions [ 'new_event' ] = act
uncheck_new_event = lambda : actions [ 'new_event' ] . setChecked ( False )
uncheck_new_bookmark = lambda : actions [ 'new_bookmark' ] . setChecked ( False )
actions [ 'new_event' ] . triggered . connect ( uncheck_new_bookmark )
actions [ 'new_bookmark' ] . triggered . connect ( uncheck_new_event )
act = { }
for one_stage , one_shortcut in zip ( STAGE_NAME , STAGE_SHORTCUT ) :
act [ one_stage ] = QAction ( 'Score as ' + one_stage , self . parent )
act [ one_stage ] . setShortcut ( one_shortcut )
stage_idx = STAGE_NAME . index ( one_stage )
act [ one_stage ] . triggered . connect ( partial ( self . get_sleepstage , stage_idx ) )
self . addAction ( act [ one_stage ] )
actions [ 'stages' ] = act
act = { }
for one_qual , one_shortcut in zip ( QUALIFIERS , QUALITY_SHORTCUT ) :
act [ one_qual ] = QAction ( 'Score as ' + one_qual , self . parent )
act [ one_qual ] . setShortcut ( one_shortcut )
qual_idx = QUALIFIERS . index ( one_qual )
act [ one_qual ] . triggered . connect ( partial ( self . get_quality , qual_idx ) )
self . addAction ( act [ one_qual ] )
actions [ 'quality' ] = act
act = QAction ( 'Set Cycle Start' , self )
act . setShortcut ( 'Ctrl+[' )
act . triggered . connect ( self . get_cycle_mrkr )
actions [ 'cyc_start' ] = act
act = QAction ( 'Set Cycle End' , self )
act . setShortcut ( 'Ctrl+]' )
act . triggered . connect ( partial ( self . get_cycle_mrkr , end = True ) )
actions [ 'cyc_end' ] = act
act = QAction ( 'Remove Cycle Marker' , self )
act . triggered . connect ( self . remove_cycle_mrkr )
actions [ 'remove_cyc' ] = act
act = QAction ( 'Clear Cycle Markers' , self )
act . triggered . connect ( self . clear_cycle_mrkrs )
actions [ 'clear_cyc' ] = act
act = QAction ( 'Domino' , self )
act . triggered . connect ( partial ( self . import_staging , 'domino' ) )
actions [ 'import_domino' ] = act
act = QAction ( 'Alice' , self )
act . triggered . connect ( partial ( self . import_staging , 'alice' ) )
actions [ 'import_alice' ] = act
act = QAction ( 'Sandman' , self )
act . triggered . connect ( partial ( self . import_staging , 'sandman' ) )
actions [ 'import_sandman' ] = act
act = QAction ( 'RemLogic' , self )
act . triggered . connect ( partial ( self . import_staging , 'remlogic' ) )
actions [ 'import_remlogic' ] = act
act = QAction ( 'Compumedics' , self )
act . triggered . connect ( partial ( self . import_staging , 'compumedics' ) )
actions [ 'import_compumedics' ] = act
act = QAction ( 'PRANA' , self )
act . triggered . connect ( partial ( self . import_staging , 'prana' ) )
actions [ 'import_prana' ] = act
act = QAction ( 'DeltaMed' , self )
act . triggered . connect ( partial ( self . import_staging , 'deltamed' ) )
actions [ 'import_deltamed' ] = act
act = QAction ( 'FASST' , self )
act . triggered . connect ( self . import_fasst )
actions [ 'import_fasst' ] = act
act = QAction ( 'Domino' , self )
act . triggered . connect ( partial ( self . import_staging , 'domino' , as_qual = True ) )
actions [ 'import_domino_qual' ] = act
act = QAction ( 'Alice' , self )
act . triggered . connect ( partial ( self . import_staging , 'alice' , as_qual = True ) )
actions [ 'import_alice_qual' ] = act
act = QAction ( 'Sandman' , self )
act . triggered . connect ( partial ( self . import_staging , 'sandman' , as_qual = True ) )
actions [ 'import_sandman_qual' ] = act
act = QAction ( 'RemLogic' , self )
act . triggered . connect ( partial ( self . import_staging , 'remlogic' , as_qual = True ) )
actions [ 'import_remlogic_qual' ] = act
act = QAction ( 'Compumedics' , self )
act . triggered . connect ( partial ( self . import_staging , 'compumedics' , as_qual = True ) )
actions [ 'import_compumedics_qual' ] = act
act = QAction ( 'PRANA' , self )
act . triggered . connect ( partial ( self . import_staging , 'prana' , as_qual = True ) )
actions [ 'import_prana_qual' ] = act
act = QAction ( 'DeltaMed' , self )
act . triggered . connect ( partial ( self . import_staging , 'deltamed' , as_qual = True ) )
actions [ 'import_deltamed_qual' ] = act
act = QAction ( 'Wonambi' , self )
act . triggered . connect ( partial ( self . import_events , 'wonambi' ) )
actions [ 'import_events_wonambi' ] = act
act = QAction ( 'RemLogic' , self )
act . triggered . connect ( partial ( self . import_events , 'remlogic' ) )
actions [ 'import_events_remlogic' ] = act
act = QAction ( 'CSV' , self )
act . triggered . connect ( partial ( self . export , xformat = 'csv' ) )
actions [ 'export_to_csv' ] = act
act = QAction ( 'RemLogic' , self )
act . triggered . connect ( partial ( self . export , xformat = 'remlogic' ) )
actions [ 'export_to_remlogic' ] = act
act = QAction ( 'RemLogic FR' , self )
act . triggered . connect ( partial ( self . export , xformat = 'remlogic_fr' ) )
actions [ 'export_to_remlogic_fr' ] = act
act = QAction ( 'Export Events...' , self )
act . triggered . connect ( self . parent . show_export_events_dialog )
actions [ 'export_events' ] = act
act = QAction ( 'Spindle...' , self )
act . triggered . connect ( self . parent . show_spindle_dialog )
act . setShortcut ( 'Ctrl+Shift+s' )
act . setEnabled ( False )
actions [ 'spindle' ] = act
act = QAction ( 'Slow Wave...' , self )
act . triggered . connect ( self . parent . show_slow_wave_dialog )
act . setShortcut ( 'Ctrl+Shift+w' )
act . setEnabled ( False )
actions [ 'slow_wave' ] = act
act = QAction ( 'Analysis Console' , self )
act . triggered . connect ( self . parent . show_analysis_dialog )
act . setShortcut ( 'Ctrl+Shift+a' )
act . setEnabled ( False )
actions [ 'analyze' ] = act
act = QAction ( 'Sleep Statistics' , self )
act . triggered . connect ( self . export_sleeps_stats )
actions [ 'export_sleepstats' ] = act
self . action = actions
|
def pairwise ( iterable ) :
"""From itertools cookbook . [ a , b , c , . . . ] - > ( a , b ) , ( b , c ) , . . ."""
|
first , second = tee ( iterable )
next ( second , None )
return zip ( first , second )
|
def register ( self , schema ) :
"""Register input schema class .
When registering a schema , all inner schemas are registered as well .
: param Schema schema : schema to register .
: return : old registered schema .
: rtype : type"""
|
result = None
uuid = schema . uuid
if uuid in self . _schbyuuid :
result = self . _schbyuuid [ uuid ]
if result != schema :
self . _schbyuuid [ uuid ] = schema
name = schema . name
schemas = self . _schbyname . setdefault ( name , set ( ) )
schemas . add ( schema )
for innername , innerschema in iteritems ( schema . getschemas ( ) ) :
if innerschema . uuid not in self . _schbyuuid :
register ( innerschema )
return result
|
def get_checksum ( file ) :
"""Get SHA256 hash from the contents of a given file"""
|
with open ( file , 'rb' ) as FH :
contents = FH . read ( )
return hashlib . sha256 ( contents ) . hexdigest ( )
|
def fetch ( self ) :
"""Fetch & return a new ` Tag ` object representing the tag ' s current state
: rtype : Tag
: raises DOAPIError : if the API endpoint replies with an error ( e . g . , if
the tag no longer exists )"""
|
api = self . doapi_manager
return api . _tag ( api . request ( self . url ) [ "tag" ] )
|
async def _async_get_data ( self , resource , id = None ) :
"""Get the data from the resource ."""
|
if id :
url = urljoin ( self . _api_url , "spc/{}/{}" . format ( resource , id ) )
else :
url = urljoin ( self . _api_url , "spc/{}" . format ( resource ) )
data = await async_request ( self . _session . get , url )
if not data :
return False
if id and isinstance ( data [ 'data' ] [ resource ] , list ) : # for some reason the gateway returns an array with a single
# element for areas but not for zones . . .
return data [ 'data' ] [ resource ] [ 0 ]
elif id :
return data [ 'data' ] [ resource ]
return [ item for item in data [ 'data' ] [ resource ] ]
|
def remove ( self , observableElement ) :
"""remove an obsrvable element
: param str observableElement : the name of the observable element"""
|
if observableElement in self . _observables :
self . _observables . remove ( observableElement )
|
def set_accuracy ( self , acc ) :
"""Sets the accuracy order of the finite difference scheme .
If the FinDiff object is not a raw partial derivative but a composition of derivatives
the accuracy order will be propagated to the child operators ."""
|
self . acc = acc
if self . child :
self . child . set_accuracy ( acc )
|
def type ( self ) :
"""Returns the shape of the data array associated with this file ."""
|
hdu = self . open ( )
_type = hdu . data . dtype . name
if not self . inmemory :
self . close ( )
del hdu
return _type
|
def delete_port_binding ( self , port , host ) :
"""Enqueue port binding delete"""
|
if not self . get_instance_type ( port ) :
return
for pb_key in self . _get_binding_keys ( port , host ) :
pb_res = MechResource ( pb_key , a_const . PORT_BINDING_RESOURCE , a_const . DELETE )
self . provision_queue . put ( pb_res )
|
def make_bag ( bag_dir , bag_info = None , processes = 1 , checksum = None ) :
"""Convert a given directory into a bag . You can pass in arbitrary
key / value pairs to put into the bag - info . txt metadata file as
the bag _ info dictionary ."""
|
bag_dir = os . path . abspath ( bag_dir )
logger . info ( "creating bag for directory %s" , bag_dir )
# assume md5 checksum if not specified
if not checksum :
checksum = [ 'md5' ]
if not os . path . isdir ( bag_dir ) :
logger . error ( "no such bag directory %s" , bag_dir )
raise RuntimeError ( "no such bag directory %s" % bag_dir )
old_dir = os . path . abspath ( os . path . curdir )
os . chdir ( bag_dir )
try :
unbaggable = _can_bag ( os . curdir )
if unbaggable :
logger . error ( "no write permissions for the following directories and files: \n%s" , unbaggable )
raise BagError ( "Not all files/folders can be moved." )
unreadable_dirs , unreadable_files = _can_read ( os . curdir )
if unreadable_dirs or unreadable_files :
if unreadable_dirs :
logger . error ( "The following directories do not have read permissions: \n%s" , unreadable_dirs )
if unreadable_files :
logger . error ( "The following files do not have read permissions: \n%s" , unreadable_files )
raise BagError ( "Read permissions are required to calculate file fixities." )
else :
logger . info ( "creating data dir" )
cwd = os . getcwd ( )
temp_data = tempfile . mkdtemp ( dir = cwd )
for f in os . listdir ( '.' ) :
if os . path . abspath ( f ) == temp_data :
continue
new_f = os . path . join ( temp_data , f )
logger . info ( "moving %s to %s" , f , new_f )
os . rename ( f , new_f )
logger . info ( "moving %s to %s" , temp_data , 'data' )
os . rename ( temp_data , 'data' )
# permissions for the payload directory should match those of the
# original directory
os . chmod ( 'data' , os . stat ( cwd ) . st_mode )
for c in checksum :
logger . info ( "writing manifest-%s.txt" , c )
Oxum = _make_manifest ( 'manifest-%s.txt' % c , 'data' , processes , c )
logger . info ( "writing bagit.txt" )
txt = """BagIt-Version: 0.97\nTag-File-Character-Encoding: UTF-8\n"""
with open ( "bagit.txt" , "w" ) as bagit_file :
bagit_file . write ( txt )
logger . info ( "writing bag-info.txt" )
if bag_info is None :
bag_info = { }
# allow ' Bagging - Date ' and ' Bag - Software - Agent ' to be overidden
if 'Bagging-Date' not in bag_info :
bag_info [ 'Bagging-Date' ] = date . strftime ( date . today ( ) , "%Y-%m-%d" )
if 'Bag-Software-Agent' not in bag_info :
bag_info [ 'Bag-Software-Agent' ] = 'bagit.py <http://github.com/libraryofcongress/bagit-python>'
bag_info [ 'Payload-Oxum' ] = Oxum
_make_tag_file ( 'bag-info.txt' , bag_info )
for c in checksum :
_make_tagmanifest_file ( c , bag_dir )
except Exception :
logger . exception ( "An error occurred creating the bag" )
raise
finally :
os . chdir ( old_dir )
return Bag ( bag_dir )
|
def discard ( self , item ) :
'''Remove * item * .'''
|
index = self . _index ( item )
if index >= 0 :
del self . _members [ index ]
|
def ckan_extension_template ( name , target ) :
"""Create ckanext - ( name ) in target directory ."""
|
setupdir = '{0}/ckanext-{1}theme' . format ( target , name )
extdir = setupdir + '/ckanext/{0}theme' . format ( name )
templatedir = extdir + '/templates/'
staticdir = extdir + '/static/datacats'
makedirs ( templatedir + '/home/snippets' )
makedirs ( staticdir )
here = dirname ( __file__ )
copyfile ( here + '/images/chart.png' , staticdir + '/chart.png' )
copyfile ( here + '/images/datacats-footer.png' , staticdir + '/datacats-footer.png' )
filecontents = [ ( setupdir + '/setup.py' , SETUP_PY ) , ( setupdir + '/.gitignore' , DOT_GITIGNORE ) , ( setupdir + '/ckanext/__init__.py' , NAMESPACE_PACKAGE ) , ( extdir + '/__init__.py' , '' ) , ( extdir + '/plugins.py' , PLUGINS_PY ) , ( templatedir + '/home/snippets/promoted.html' , PROMOTED_SNIPPET ) , ( templatedir + '/footer.html' , FOOTER_HTML ) , ]
for filename , content in filecontents :
with open ( filename , 'w' ) as f :
f . write ( content . replace ( '##name##' , name ) )
|
def AgregarCTG ( self , nro_ctg = None , nro_carta_porte = None , porcentaje_secado_humedad = None , importe_secado = None , peso_neto_merma_secado = None , tarifa_secado = None , importe_zarandeo = None , peso_neto_merma_zarandeo = None , tarifa_zarandeo = None , peso_neto_confirmado_definitivo = None , ** kwargs ) :
"Agrega la información referente a una CTG de la certificación"
|
ctg = dict ( nroCTG = nro_ctg , nroCartaDePorte = nro_carta_porte , pesoNetoConfirmadoDefinitivo = peso_neto_confirmado_definitivo , porcentajeSecadoHumedad = porcentaje_secado_humedad , importeSecado = importe_secado , pesoNetoMermaSecado = peso_neto_merma_secado , tarifaSecado = tarifa_secado , importeZarandeo = importe_zarandeo , pesoNetoMermaZarandeo = peso_neto_merma_zarandeo , tarifaZarandeo = tarifa_zarandeo , )
self . certificacion [ 'primaria' ] [ 'ctg' ] . append ( ctg )
return True
|
def eth_getLogs ( self , from_block = BLOCK_TAG_LATEST , to_block = BLOCK_TAG_LATEST , address = None , topics = None ) :
"""https : / / github . com / ethereum / wiki / wiki / JSON - RPC # eth _ getlogs
: param from _ block : Block tag or number ( optional )
: type from _ block : int or BLOCK _ TAGS
: param to _ block : Block tag or number ( optional )
: type to _ block : int or BLOCK _ TAGS
: param address : Contract address ( optional )
: type address : str
: param topics : Topics ( optional )
: type topics : list
: return : logs
: rtype : list"""
|
obj = { 'fromBlock' : validate_block ( from_block ) , 'toBlock' : validate_block ( to_block ) , 'address' : address , 'topics' : topics }
result = yield from self . rpc_call ( 'eth_getLogs' , [ obj ] )
return result
|
def gen_code_api ( self ) :
"""TODO : Docstring for gen _ code _ api ."""
|
# edit config file
conf_editor = Editor ( self . conf_fpath )
# insert code path for searching
conf_editor . editline_with_regex ( r'^# import os' , 'import os' )
conf_editor . editline_with_regex ( r'^# import sys' , 'import sys' )
conf_editor . editline_with_regex ( r'^# sys\.path\.insert' , 'sys.path.insert(0, "{}")' . format ( self . code_fdpath ) )
conf_editor . editline_with_regex ( r"""html_theme = 'alabaster'""" , 'html_theme = \'default\'' . format ( self . code_fdpath ) )
conf_editor . finish_writing ( )
# sphinx - apidoc to generate rst from source code
# force regenerate
subprocess . call ( self . _sphinx_apidoc_cmd )
pass
|
def run_bafRegress ( filenames , out_prefix , extract_filename , freq_filename , options ) :
"""Runs the bafRegress function .
: param filenames : the set of all sample files .
: param out _ prefix : the output prefix .
: param extract _ filename : the name of the markers to extract .
: param freq _ filename : the name of the file containing the frequency .
: param options : the other options .
: type filenames : set
: type out _ prefix : str
: type extract _ filename : str
: type freq _ filename : str
: type options : argparse . Namespace"""
|
# The command
command = [ "bafRegress.py" , "estimate" , "--freqfile" , freq_filename , "--freqcol" , "2,5" , "--extract" , extract_filename , "--colsample" , options . colsample , "--colmarker" , options . colmarker , "--colbaf" , options . colbaf , "--colab1" , options . colab1 , "--colab2" , options . colab2 , ]
command . extend ( filenames )
output = None
try :
output = subprocess . check_output ( command , stderr = subprocess . STDOUT , shell = False )
except subprocess . CalledProcessError as exc :
raise ProgramError ( "bafRegress.py: couldn't run " "bafRegress.py\n{}" . format ( exc . output ) )
# Saving the output
try :
with open ( out_prefix + ".bafRegress" , "w" ) as o_file :
o_file . write ( output )
except IOError :
raise ProgramError ( "{}: cannot write file" . format ( out_prefix + ".bafRegress" , ) )
|
def check_error_code ( self ) :
"""For CredSSP version of 3 or newer , the server can response with an
NtStatus error code with details of what error occurred . This method
will check if the error code exists and throws an NTStatusException
if it is no STATUS _ SUCCESS ."""
|
# start off with STATUS _ SUCCESS as a baseline
status = NtStatusCodes . STATUS_SUCCESS
error_code = self [ 'errorCode' ]
if error_code . isValue : # ASN . 1 Integer is stored as an signed integer , we need to
# convert it to a unsigned integer
status = ctypes . c_uint32 ( error_code ) . value
if status != NtStatusCodes . STATUS_SUCCESS :
raise NTStatusException ( status )
|
def get_sigma ( database_file_name = '' , e_min = np . NaN , e_max = np . NaN , e_step = np . NaN , t_kelvin = None ) :
"""retrieve the Energy and sigma axis for the given isotope
: param database _ file _ name : path / to / file with extension
: type database _ file _ name : string
: param e _ min : left energy range in eV of new interpolated data
: type e _ min : float
: param e _ max : right energy range in eV of new interpolated data
: type e _ max : float
: param e _ step : energy step in eV for interpolation
: type e _ step : float
: param t _ kelvin : temperature in Kelvin
: type t _ kelvin : float
: return : { ' energy ' : np . array , ' sigma ' : np . array }
: rtype : dict"""
|
file_extension = os . path . splitext ( database_file_name ) [ 1 ]
if t_kelvin is None : # ' . csv ' files
if file_extension != '.csv' :
raise IOError ( "Cross-section File type must be '.csv'" )
else :
_df = get_database_data ( file_name = database_file_name )
_dict = get_interpolated_data ( df = _df , e_min = e_min , e_max = e_max , e_step = e_step )
return { 'energy_eV' : _dict [ 'x_axis' ] , 'sigma_b' : _dict [ 'y_axis' ] }
else :
raise ValueError ( "Doppler broadened cross-section in not yet supported in current version." )
|
def del_all_host_comments ( self , host ) :
"""Delete all host comments
Format of the line that triggers function call : :
DEL _ ALL _ HOST _ COMMENTS ; < host _ name >
: param host : host to edit
: type host : alignak . objects . host . Host
: return : None"""
|
comments = list ( host . comments . keys ( ) )
for uuid in comments :
host . del_comment ( uuid )
self . send_an_element ( host . get_update_status_brok ( ) )
|
def mark_flags_as_required ( flag_names , flag_values = _flagvalues . FLAGS ) :
"""Ensures that flags are not None during program execution .
Recommended usage :
if _ _ name _ _ = = ' _ _ main _ _ ' :
flags . mark _ flags _ as _ required ( [ ' flag1 ' , ' flag2 ' , ' flag3 ' ] )
app . run ( )
Args :
flag _ names : Sequence [ str ] , names of the flags .
flag _ values : flags . FlagValues , optional FlagValues instance where the flags
are defined .
Raises :
AttributeError : If any of flag name has not already been defined as a flag ."""
|
for flag_name in flag_names :
mark_flag_as_required ( flag_name , flag_values )
|
def get_vnetwork_dvs_output_vnetwork_dvs_interface_type ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_vnetwork_dvs = ET . Element ( "get_vnetwork_dvs" )
config = get_vnetwork_dvs
output = ET . SubElement ( get_vnetwork_dvs , "output" )
vnetwork_dvs = ET . SubElement ( output , "vnetwork-dvs" )
interface_type = ET . SubElement ( vnetwork_dvs , "interface-type" )
interface_type . text = kwargs . pop ( 'interface_type' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def create_instances_from_matrices ( x , y = None , name = "data" ) :
"""Allows the generation of an Instances object from a 2 - dimensional matrix for X and a
1 - dimensional matrix for Y ( optional ) .
All data must be numerical . Attributes can be converted to nominal with the
weka . filters . unsupervised . attribute . NumericToNominal filter .
: param x : the input variables
: type x : ndarray
: param y : the output variable ( optional )
: type y : ndarray
: param name : the name of the dataset
: type name : str
: return : the generated dataset
: rtype : Instances"""
|
if y is not None :
if len ( x ) != len ( y ) :
raise Exception ( "Dimensions of x and y differ: " + str ( len ( x ) ) + " != " + str ( len ( y ) ) )
# create header
atts = [ ]
for i in range ( len ( x [ 0 ] ) ) :
atts . append ( Attribute . create_numeric ( "x" + str ( i + 1 ) ) )
if y is not None :
atts . append ( Attribute . create_numeric ( "y" ) )
result = Instances . create_instances ( name , atts , len ( x ) )
# add data
for i in range ( len ( x ) ) :
values = list ( x [ i ] )
if y is not None :
values . append ( y [ i ] )
result . add_instance ( Instance . create_instance ( values ) )
return result
|
def confirm_login_allowed ( self , user ) :
"""Controls whether the given User may log in . This is a policy setting ,
independent of end - user authentication . This default behavior is to
allow login by active users , and reject login by inactive users .
If the given user cannot log in , this method should raise a
` ` forms . ValidationError ` ` .
If the given user may log in , this method should return None ."""
|
if not user . is_active :
raise forms . ValidationError ( self . error_messages [ 'inactive' ] , code = 'inactive' , )
|
def waypoint_request_list_send ( self ) :
'''wrapper for waypoint _ request _ list _ send'''
|
if self . mavlink10 ( ) :
self . mav . mission_request_list_send ( self . target_system , self . target_component )
else :
self . mav . waypoint_request_list_send ( self . target_system , self . target_component )
|
def multi_h1 ( cls , a_dict : Dict [ str , Any ] , bins = None , ** kwargs ) -> "HistogramCollection" :
"""Create a collection from multiple datasets ."""
|
from physt . binnings import calculate_bins
mega_values = np . concatenate ( list ( a_dict . values ( ) ) )
binning = calculate_bins ( mega_values , bins , ** kwargs )
title = kwargs . pop ( "title" , None )
name = kwargs . pop ( "name" , None )
collection = HistogramCollection ( binning = binning , title = title , name = name )
for key , value in a_dict . items ( ) :
collection . create ( key , value )
return collection
|
def get_next_first ( intersection , intersections , to_end = True ) :
"""Gets the next node along the current ( first ) edge .
. . note : :
This is a helper used only by : func : ` get _ next ` , which in
turn is only used by : func : ` basic _ interior _ combine ` , which itself
is only used by : func : ` combine _ intersections ` .
Along with : func : ` get _ next _ second ` , this function does the majority of the
heavy lifting in : func : ` get _ next ` . * * Very * * similar to
: func : ` get _ next _ second ` , but this works with the first curve while the
other function works with the second .
Args :
intersection ( . Intersection ) : The current intersection .
intersections ( List [ . Intersection ] ) : List of all detected
intersections , provided as a reference for potential
points to arrive at .
to _ end ( Optional [ bool ] ) : Indicates if the next node should just be
the end of the first edge or : data : ` None ` .
Returns :
Optional [ . Intersection ] : The " next " point along a surface of
intersection . This will produce the next intersection along the
current ( first ) edge or the end of the same edge . If ` ` to _ end ` ` is
: data : ` False ` and there are no other intersections along the current
edge , will return : data : ` None ` ( rather than the end of the same edge ) ."""
|
along_edge = None
index_first = intersection . index_first
s = intersection . s
for other_int in intersections :
other_s = other_int . s
if other_int . index_first == index_first and other_s > s :
if along_edge is None or other_s < along_edge . s :
along_edge = other_int
if along_edge is None :
if to_end : # If there is no other intersection on the edge , just return
# the segment end .
return _intersection_helpers . Intersection ( index_first , 1.0 , None , None , interior_curve = CLASSIFICATION_T . FIRST , )
else :
return None
else :
return along_edge
|
def _TerminateProcess ( self , process ) :
"""Terminate a process .
Args :
process ( MultiProcessBaseProcess ) : process to terminate ."""
|
pid = process . pid
logger . warning ( 'Terminating process: (PID: {0:d}).' . format ( pid ) )
process . terminate ( )
# Wait for the process to exit .
process . join ( timeout = self . _PROCESS_JOIN_TIMEOUT )
if process . is_alive ( ) :
logger . warning ( 'Killing process: (PID: {0:d}).' . format ( pid ) )
self . _KillProcess ( pid )
|
def plot_diagram ( ax , x , y , label = "S" , title = "syntenic" , gradient = True ) :
"""Part of the diagrams that are re - used . ( x , y ) marks the center of the
diagram . Label determines the modification to the " S " graph ."""
|
trackgap = .06
tracklen = .12
xa , xb = x - tracklen , x + tracklen
ya , yb = y + trackgap , y - trackgap
hsps = ( ( ( 60 , 150 ) , ( 50 , 130 ) ) , ( ( 190 , 225 ) , ( 200 , 240 ) ) , ( ( 330 , 280 ) , ( 360 , 310 ) ) )
for yy in ( ya , yb ) :
ax . plot ( ( xa , xb ) , ( yy , yy ) , "-" , color = "gray" , lw = 2 , zorder = 1 )
ytip = .015
mrange = 400
m = lambda t : xa + t * 1. / mrange * tracklen * 2
for i , ( ( a , b ) , ( c , d ) ) in enumerate ( hsps ) :
fb = False
if label == "FB" and i == 1 :
c , d = 270 , 280
fb = True
if label == "G" and i == 0 :
c , d = 120 , 65
a , b , c , d = [ m ( t ) for t in ( a , b , c , d ) ]
color = "g" if i == 1 else "r"
GeneGlyph ( ax , a , b , ya , 2 * ytip , fc = color , gradient = gradient , zorder = 10 )
if i == 1 and label in ( "F" , "G" , "FN" ) :
pass
else :
if fb :
GeneGlyph ( ax , c , d , yb , 2 * ytip , fc = 'w' , tip = 0 , gradient = gradient , zorder = 10 )
else :
GeneGlyph ( ax , c , d , yb , 2 * ytip , fc = 'r' , gradient = gradient , zorder = 10 )
r = Polygon ( ( ( a , ya - ytip ) , ( c , yb + ytip ) , ( d , yb + ytip ) , ( b , ya - ytip ) ) , fc = 'r' , alpha = .2 )
if i == 1 and label not in ( "S" , "FB" ) :
pass
elif i == 0 and label == "G" :
pass
else :
ax . add_patch ( r )
if label == "FN" :
ax . text ( x + .005 , yb , "NNNNN" , ha = "center" , size = 7 )
title = "{0}: {1}" . format ( label , title )
ax . text ( x , ya + 5 * ytip , title , size = 8 , ha = "center" )
|
def app ( self ) :
"""Internal method that will supply the app to use internally .
Returns :
flask . Flask : The app to use within the component .
Raises :
RuntimeError : This is raised if no app was provided to the
component and the method is being called outside of an
application context ."""
|
app = self . _app or current_app
if not in_app_context ( app ) :
raise RuntimeError ( "This component hasn't been initialized yet " "and an app context doesn't exist." )
# If current _ app is the app , this must be used in order for their IDs
# to be the same , as current _ app will wrap the app in a proxy .
if hasattr ( app , '_get_current_object' ) :
app = app . _get_current_object ( )
return app
|
def _proxy ( self ) :
"""Generate an instance context for the instance , the context is capable of
performing various actions . All instance actions are proxied to the context
: returns : FaxMediaContext for this FaxMediaInstance
: rtype : twilio . rest . fax . v1 . fax . fax _ media . FaxMediaContext"""
|
if self . _context is None :
self . _context = FaxMediaContext ( self . _version , fax_sid = self . _solution [ 'fax_sid' ] , sid = self . _solution [ 'sid' ] , )
return self . _context
|
def delete ( self , key , sort_key ) :
primary_key = key
key = self . prefixed ( '{}:{}' . format ( key , sort_key ) )
"""Delete an element in dictionary"""
|
self . logger . debug ( 'Storage - delete {}' . format ( key ) )
if sort_key is not None :
self . cache [ self . prefixed ( primary_key ) ] . remove ( sort_key )
for index in self . _secondary_indexes :
obj = json . loads ( self . cache [ key ] )
if index in obj . keys ( ) :
self . cache [ 'secondary_indexes' ] [ index ] [ obj [ index ] ] . remove ( key )
del ( self . cache [ key ] )
return True
|
def _parse_create_args ( client , args ) :
"""Converts CLI arguments to args for VSManager . create _ instance .
: param dict args : CLI arguments"""
|
data = { "hourly" : args . get ( 'billing' , 'hourly' ) == 'hourly' , "cpus" : args . get ( 'cpu' , None ) , "ipv6" : args . get ( 'ipv6' , None ) , "disks" : args . get ( 'disk' , None ) , "os_code" : args . get ( 'os' , None ) , "memory" : args . get ( 'memory' , None ) , "flavor" : args . get ( 'flavor' , None ) , "domain" : args . get ( 'domain' , None ) , "host_id" : args . get ( 'host_id' , None ) , "private" : args . get ( 'private' , None ) , "hostname" : args . get ( 'hostname' , None ) , "nic_speed" : args . get ( 'network' , None ) , "boot_mode" : args . get ( 'boot_mode' , None ) , "dedicated" : args . get ( 'dedicated' , None ) , "post_uri" : args . get ( 'postinstall' , None ) , "datacenter" : args . get ( 'datacenter' , None ) , "public_vlan" : args . get ( 'vlan_public' , None ) , "private_vlan" : args . get ( 'vlan_private' , None ) , "public_subnet" : args . get ( 'subnet_public' , None ) , "private_subnet" : args . get ( 'subnet_private' , None ) , }
# The primary disk is included in the flavor and the local _ disk flag is not needed
# Setting it to None prevents errors from the flag not matching the flavor
if not args . get ( 'san' ) and args . get ( 'flavor' ) :
data [ 'local_disk' ] = None
else :
data [ 'local_disk' ] = not args . get ( 'san' )
if args . get ( 'image' ) :
if args . get ( 'image' ) . isdigit ( ) :
image_mgr = SoftLayer . ImageManager ( client )
image_details = image_mgr . get_image ( args . get ( 'image' ) , mask = "id,globalIdentifier" )
data [ 'image_id' ] = image_details [ 'globalIdentifier' ]
else :
data [ 'image_id' ] = args [ 'image' ]
if args . get ( 'userdata' ) :
data [ 'userdata' ] = args [ 'userdata' ]
elif args . get ( 'userfile' ) :
with open ( args [ 'userfile' ] , 'r' ) as userfile :
data [ 'userdata' ] = userfile . read ( )
# Get the SSH keys
if args . get ( 'key' ) :
keys = [ ]
for key in args . get ( 'key' ) :
resolver = SoftLayer . SshKeyManager ( client ) . resolve_ids
key_id = helpers . resolve_id ( resolver , key , 'SshKey' )
keys . append ( key_id )
data [ 'ssh_keys' ] = keys
if args . get ( 'public_security_group' ) :
pub_groups = args . get ( 'public_security_group' )
data [ 'public_security_groups' ] = [ group for group in pub_groups ]
if args . get ( 'private_security_group' ) :
priv_groups = args . get ( 'private_security_group' )
data [ 'private_security_groups' ] = [ group for group in priv_groups ]
if args . get ( 'tag' , False ) :
data [ 'tags' ] = ',' . join ( args [ 'tag' ] )
if args . get ( 'host_id' ) :
data [ 'host_id' ] = args [ 'host_id' ]
if args . get ( 'placementgroup' ) :
resolver = SoftLayer . managers . PlacementManager ( client ) . resolve_ids
data [ 'placement_id' ] = helpers . resolve_id ( resolver , args . get ( 'placementgroup' ) , 'PlacementGroup' )
return data
|
def render ( self , context , instance , placeholder ) :
'''Allows this plugin to use templates designed for a list of locations .'''
|
context = super ( LocationPlugin , self ) . render ( context , instance , placeholder )
context [ 'location_list' ] = [ instance . location , ]
return context
|
def to_index ( self , index_type , index_name , includes = None ) :
"""Create an index field from this field"""
|
return IndexField ( self . name , self . data_type , index_type , index_name , includes )
|
def _initialize ( self , funs_to_tally , length ) :
"""Create a group named ` ` chain # ` ` to store all data for this chain ."""
|
chain = self . nchains
self . _chains [ chain ] = self . _h5file . create_group ( '/' , 'chain%d' % chain , 'chain #%d' % chain )
for name , fun in six . iteritems ( funs_to_tally ) :
arr = np . asarray ( fun ( ) )
assert arr . dtype != np . dtype ( 'object' )
array = self . _h5file . createEArray ( self . _chains [ chain ] , name , tables . Atom . from_dtype ( arr . dtype ) , ( 0 , ) + arr . shape , filters = self . filter )
self . _arrays [ chain , name ] = array
self . _traces [ name ] = Trace ( name , getfunc = fun , db = self )
self . _traces [ name ] . _initialize ( self . chains , length )
self . trace_names . append ( list ( funs_to_tally . keys ( ) ) )
|
def process_parsed_coords ( coords ) :
"""Takes a set of parsed coordinates , which come as an array of strings ,
and returns a numpy array of floats ."""
|
geometry = np . zeros ( shape = ( len ( coords ) , 3 ) , dtype = float )
for ii , entry in enumerate ( coords ) :
for jj in range ( 3 ) :
geometry [ ii , jj ] = float ( entry [ jj ] )
return geometry
|
def __fillablebox ( msg , title = "" , default = "" , mask = None , image = None , root = None ) :
"""Show a box in which a user can enter some text .
You may optionally specify some default text , which will appear in the
enterbox when it is displayed .
Returns the text that the user entered , or None if he cancels the operation ."""
|
if sys . platform == 'darwin' :
_bring_to_front ( )
global boxRoot , __enterboxText , __enterboxDefaultText
global cancelButton , entryWidget , okButton
if title is None :
title = ""
if default is None :
default = ""
__enterboxDefaultText = default
__enterboxText = __enterboxDefaultText
if root :
root . withdraw ( )
boxRoot = Toplevel ( master = root )
boxRoot . withdraw ( )
else :
boxRoot = Tk ( )
boxRoot . withdraw ( )
boxRoot . protocol ( 'WM_DELETE_WINDOW' , denyWindowManagerClose )
boxRoot . title ( title )
boxRoot . iconname ( 'Dialog' )
boxRoot . geometry ( rootWindowPosition )
boxRoot . bind ( "<Escape>" , __enterboxCancel )
# - - - - - define the messageFrame - - - - -
messageFrame = Frame ( master = boxRoot )
messageFrame . pack ( side = TOP , fill = BOTH )
# - - - - - define the imageFrame - - - - -
tk_Image = None
if image :
imageFilename = os . path . normpath ( image )
junk , ext = os . path . splitext ( imageFilename )
if os . path . exists ( imageFilename ) :
if ext . lower ( ) in [ ".gif" , ".pgm" , ".ppm" ] :
tk_Image = PhotoImage ( master = boxRoot , file = imageFilename )
else :
if PILisLoaded :
try :
pil_Image = PILImage . open ( imageFilename )
tk_Image = PILImageTk . PhotoImage ( pil_Image , master = boxRoot )
except :
msg += ImageErrorMsg % ( imageFilename , "\nThe Python Imaging Library (PIL) could not convert this file to a displayable image." "\n\nPIL reports:\n" + exception_format ( ) )
else : # PIL is not loaded
msg += ImageErrorMsg % ( imageFilename , "\nI could not import the Python Imaging Library (PIL) to display the image.\n\n" "You may need to install PIL\n" "(http://www.pythonware.com/products/pil/)\n" "to display " + ext + " image files." )
else :
msg += ImageErrorMsg % ( imageFilename , "\nImage file not found." )
if tk_Image :
imageFrame = Frame ( master = boxRoot )
imageFrame . pack ( side = TOP , fill = BOTH )
label = Label ( imageFrame , image = tk_Image )
label . image = tk_Image
# keep a reference !
label . pack ( side = TOP , expand = YES , fill = X , padx = '1m' , pady = '1m' )
# - - - - - define the buttonsFrame - - - - -
buttonsFrame = Frame ( master = boxRoot )
buttonsFrame . pack ( side = TOP , fill = BOTH )
# - - - - - define the entryFrame - - - - -
entryFrame = Frame ( master = boxRoot )
entryFrame . pack ( side = TOP , fill = BOTH )
# - - - - - define the buttonsFrame - - - - -
buttonsFrame = Frame ( master = boxRoot )
buttonsFrame . pack ( side = TOP , fill = BOTH )
# - - - - - the msg widget - - - - -
messageWidget = Message ( messageFrame , width = "4.5i" , text = msg )
messageWidget . configure ( font = ( PROPORTIONAL_FONT_FAMILY , PROPORTIONAL_FONT_SIZE ) )
messageWidget . pack ( side = RIGHT , expand = 1 , fill = BOTH , padx = '3m' , pady = '3m' )
# - - - - - entryWidget - - - - -
entryWidget = Entry ( entryFrame , width = 40 )
bindArrows ( entryWidget )
entryWidget . configure ( font = ( PROPORTIONAL_FONT_FAMILY , TEXT_ENTRY_FONT_SIZE ) )
if mask :
entryWidget . configure ( show = mask )
entryWidget . pack ( side = LEFT , padx = "3m" )
entryWidget . bind ( "<Return>" , __enterboxGetText )
entryWidget . bind ( "<Escape>" , __enterboxCancel )
# put text into the entryWidget
entryWidget . insert ( 0 , __enterboxDefaultText )
# - - - - - ok button - - - - -
okButton = Button ( buttonsFrame , takefocus = 1 , text = "OK" )
bindArrows ( okButton )
okButton . pack ( expand = 1 , side = LEFT , padx = '3m' , pady = '3m' , ipadx = '2m' , ipady = '1m' )
# for the commandButton , bind activation events to the activation event handler
commandButton = okButton
handler = __enterboxGetText
for selectionEvent in STANDARD_SELECTION_EVENTS :
commandButton . bind ( "<%s>" % selectionEvent , handler )
# - - - - - cancel button - - - - -
cancelButton = Button ( buttonsFrame , takefocus = 1 , text = "Cancel" )
bindArrows ( cancelButton )
cancelButton . pack ( expand = 1 , side = RIGHT , padx = '3m' , pady = '3m' , ipadx = '2m' , ipady = '1m' )
# for the commandButton , bind activation events to the activation event handler
commandButton = cancelButton
handler = __enterboxCancel
for selectionEvent in STANDARD_SELECTION_EVENTS :
commandButton . bind ( "<%s>" % selectionEvent , handler )
# - - - - - time for action ! - - - - -
entryWidget . focus_force ( )
# put the focus on the entryWidget
boxRoot . deiconify ( )
boxRoot . mainloop ( )
# run it !
# - - - - - after the run has completed - - - - -
if root :
root . deiconify ( )
boxRoot . destroy ( )
# button _ click didn ' t destroy boxRoot , so we do it now
return __enterboxText
|
def generate ( env ) :
"""Add Builders and construction variables for cyglink to an Environment ."""
|
gnulink . generate ( env )
env [ 'LINKFLAGS' ] = SCons . Util . CLVar ( '-Wl,-no-undefined' )
env [ 'SHLINKCOM' ] = shlib_action
env [ 'LDMODULECOM' ] = ldmod_action
env . Append ( SHLIBEMITTER = [ shlib_emitter ] )
env . Append ( LDMODULEEMITTER = [ ldmod_emitter ] )
env [ 'SHLIBPREFIX' ] = 'cyg'
env [ 'SHLIBSUFFIX' ] = '.dll'
env [ 'IMPLIBPREFIX' ] = 'lib'
env [ 'IMPLIBSUFFIX' ] = '.dll.a'
# Variables used by versioned shared libraries
env [ '_SHLIBVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS'
env [ '_LDMODULEVERSIONFLAGS' ] = '$LDMODULEVERSIONFLAGS'
# SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink . . .
# LINKCALLBACKS are NOT inherited from gnulink
env [ 'LINKCALLBACKS' ] = { 'VersionedShLibSuffix' : _versioned_lib_suffix , 'VersionedLdModSuffix' : _versioned_lib_suffix , 'VersionedImpLibSuffix' : _versioned_lib_suffix , 'VersionedShLibName' : link . _versioned_shlib_name , 'VersionedLdModName' : link . _versioned_ldmod_name , 'VersionedShLibImpLibName' : lambda * args : _versioned_implib_name ( * args , libtype = 'ShLib' ) , 'VersionedLdModImpLibName' : lambda * args : _versioned_implib_name ( * args , libtype = 'LdMod' ) , 'VersionedShLibImpLibSymlinks' : lambda * args : _versioned_implib_symlinks ( * args , libtype = 'ShLib' ) , 'VersionedLdModImpLibSymlinks' : lambda * args : _versioned_implib_symlinks ( * args , libtype = 'LdMod' ) , }
# these variables were set by gnulink but are not used in cyglink
try :
del env [ '_SHLIBSONAME' ]
except KeyError :
pass
try :
del env [ '_LDMODULESONAME' ]
except KeyError :
pass
|
def folderitem ( self , obj , item , index ) :
"""Service triggered each time an item is iterated in folderitems .
The use of this service prevents the extra - loops in child objects .
: obj : the instance of the class to be foldered
: item : dict containing the properties of the object to be used by
the template
: index : current index of the item"""
|
title = obj . Title ( )
description = obj . Description ( )
url = obj . absolute_url ( )
item [ "replace" ] [ "Title" ] = get_link ( url , value = title )
item [ "Description" ] = description
sample_types = obj . getSampleTypes ( )
if sample_types :
links = map ( lambda st : get_link ( st . absolute_url ( ) , value = st . Title ( ) , css_class = "link" ) , sample_types )
item [ "replace" ] [ "SampleTypes" ] = ", " . join ( links )
else :
item [ "SampleTypes" ] = ""
parent = obj . aq_parent
if parent . portal_type == "Client" :
item [ "Owner" ] = parent . aq_parent . Title ( )
item [ "replace" ] [ "Owner" ] = get_link ( parent . absolute_url ( ) , value = parent . getName ( ) )
else :
item [ "Owner" ] = self . context . bika_setup . laboratory . Title ( )
return item
|
def add_subplot ( self , x , y , n , margin = 0.05 ) :
"""Creates a div child subplot in a matplotlib . figure . add _ subplot style .
Parameters
x : int
The number of rows in the grid .
y : int
The number of columns in the grid .
n : int
The cell number in the grid , counted from 1 to x * y .
Example :
> > > fig . add _ subplot ( 3,2,5)
# Create a div in the 5th cell of a 3rows x 2columns
grid ( bottom - left corner ) ."""
|
width = 1. / y
height = 1. / x
left = ( ( n - 1 ) % y ) * width
top = ( ( n - 1 ) // y ) * height
left = left + width * margin
top = top + height * margin
width = width * ( 1 - 2. * margin )
height = height * ( 1 - 2. * margin )
div = Div ( position = 'absolute' , width = '{}%' . format ( 100. * width ) , height = '{}%' . format ( 100. * height ) , left = '{}%' . format ( 100. * left ) , top = '{}%' . format ( 100. * top ) , )
self . add_child ( div )
return div
|
def from_api_repr ( cls , resource , zone ) :
"""Factory : construct a change set given its API representation
: type resource : dict
: param resource : change set representation returned from the API .
: type zone : : class : ` google . cloud . dns . zone . ManagedZone `
: param zone : A zone which holds zero or more change sets .
: rtype : : class : ` google . cloud . dns . changes . Changes `
: returns : RRS parsed from ` ` resource ` ` ."""
|
changes = cls ( zone = zone )
changes . _set_properties ( resource )
return changes
|
def get_ssl_context ( private_key , certificate ) :
"""Get ssl context from private key and certificate paths .
The return value is used when calling Flask .
i . e . app . run ( ssl _ context = get _ ssl _ context ( , , , ) )"""
|
if ( certificate and os . path . isfile ( certificate ) and private_key and os . path . isfile ( private_key ) ) :
context = ssl . SSLContext ( ssl . PROTOCOL_TLSv1_2 )
context . load_cert_chain ( certificate , private_key )
return context
return None
|
def all_build_jobs ( self ) :
"""Similar to build _ jobs ,
but uses the default manager to return archived experiments as well ."""
|
from db . models . build_jobs import BuildJob
return BuildJob . all . filter ( project = self )
|
def wrap_handler ( cls , handler , protocol , ** kwargs ) :
'''Wrap a request handler with the matching protocol handler'''
|
def _wrapper ( request , * args , ** kwargs ) :
instance = cls ( request = request , ** kwargs )
if protocol == Resource . Protocol . http :
return instance . _wrap_http ( handler , request = request , ** kwargs )
elif protocol == Resource . Protocol . websocket :
return instance . _wrap_ws ( handler , request = request , ** kwargs )
elif protocol == Resource . Protocol . amqp :
return instance . _wrap_amqp ( view_type , * args , ** kwargs )
else :
raise Exception ( 'Communication protocol not specified' )
return _wrapper
|
def pf_post_lopf ( network , args , extra_functionality , add_foreign_lopf ) :
"""Function that prepares and runs non - linar load flow using PyPSA pf .
If network has been extendable , a second lopf with reactances adapted to
new s _ nom is needed .
If crossborder lines are DC - links , pf is only applied on german network .
Crossborder flows are still considerd due to the active behavior of links .
To return a network containing the whole grid , the optimised solution of the
foreign components can be added afterwards .
Parameters
network : : class : ` pypsa . Network
Overall container of PyPSA
args : dict
Contains calculation settings of appl . py
extra _ fuctionality : function or NoneType
Adds constraint to optimization ( e . g . when applied snapshot clustering )
add _ foreign _ lopf : boolean
Choose if foreign results of lopf should be added to the network when
foreign lines are DC
Returns
pf _ solve : pandas . Dataframe
Contains information about convergency and calculation time of pf"""
|
network_pf = network
# Update x of extended lines and transformers
if network_pf . lines . s_nom_extendable . any ( ) or network_pf . transformers . s_nom_extendable . any ( ) :
storages_extendable = network_pf . storage_units . p_nom_extendable . copy ( )
lines_extendable = network_pf . lines . s_nom_extendable . copy ( )
links_extendable = network_pf . links . p_nom_extendable . copy ( )
trafos_extendable = network_pf . transformers . s_nom_extendable . copy ( )
storages_p_nom = network_pf . storage_units . p_nom . copy ( )
lines_s_nom = network_pf . lines . s_nom . copy ( )
links_p_nom = network_pf . links . p_nom . copy ( )
trafos_s_nom = network_pf . transformers . s_nom . copy ( )
network_pf . lines . x [ network . lines . s_nom_extendable ] = network_pf . lines . x * network . lines . s_nom / network_pf . lines . s_nom_opt
network_pf . lines . r [ network . lines . s_nom_extendable ] = network_pf . lines . r * network . lines . s_nom / network_pf . lines . s_nom_opt
network_pf . lines . b [ network . lines . s_nom_extendable ] = network_pf . lines . b * network . lines . s_nom_opt / network_pf . lines . s_nom
network_pf . lines . g [ network . lines . s_nom_extendable ] = network_pf . lines . g * network . lines . s_nom_opt / network_pf . lines . s_nom
network_pf . transformers . x [ network . transformers . s_nom_extendable ] = network_pf . transformers . x * network . transformers . s_nom / network_pf . transformers . s_nom_opt
network_pf . lines . s_nom_extendable = False
network_pf . transformers . s_nom_extendable = False
network_pf . storage_units . p_nom_extendable = False
network_pf . links . p_nom_extendable = False
network_pf . lines . s_nom = network . lines . s_nom_opt
network_pf . transformers . s_nom = network . transformers . s_nom_opt
network_pf . storage_units . p_nom = network_pf . storage_units . p_nom_opt
network_pf . links . p_nom = network_pf . links . p_nom_opt
network_pf . lopf ( network . snapshots , solver_name = args [ 'solver' ] , solver_options = args [ 'solver_options' ] , extra_functionality = extra_functionality )
network_pf . storage_units . p_nom_extendable = storages_extendable
network_pf . lines . s_nom_extendable = lines_extendable
network_pf . links . p_nom_extendable = links_extendable
network_pf . transformers . s_nom_extendable = trafos_extendable
network_pf . storage_units . p_nom = storages_p_nom
network_pf . lines . s_nom = lines_s_nom
network_pf . links . p_nom = links_p_nom
network_pf . transformers . s_nom = trafos_s_nom
# For the PF , set the P to the optimised P
network_pf . generators_t . p_set = network_pf . generators_t . p_set . reindex ( columns = network_pf . generators . index )
network_pf . generators_t . p_set = network_pf . generators_t . p
network_pf . storage_units_t . p_set = network_pf . storage_units_t . p_set . reindex ( columns = network_pf . storage_units . index )
network_pf . storage_units_t . p_set = network_pf . storage_units_t . p
network_pf . links_t . p_set = network_pf . links_t . p_set . reindex ( columns = network_pf . links . index )
network_pf . links_t . p_set = network_pf . links_t . p0
# if foreign lines are DC , execute pf only on sub _ network in Germany
if ( args [ 'foreign_lines' ] [ 'carrier' ] == 'DC' ) or ( ( args [ 'scn_extension' ] != None ) and ( 'BE_NO_NEP 2035' in args [ 'scn_extension' ] ) ) :
n_bus = pd . Series ( index = network . sub_networks . index )
for i in range ( 0 , len ( network . sub_networks . index ) - 1 ) :
n_bus [ i ] = len ( network . buses . index [ network . buses . sub_network . astype ( int ) == i ] )
sub_network_DE = n_bus . index [ n_bus == n_bus . max ( ) ]
foreign_bus = network . buses [ network . buses . sub_network != sub_network_DE . values [ 0 ] ]
foreign_comp = { 'Bus' : network . buses [ network . buses . sub_network != sub_network_DE . values [ 0 ] ] , 'Generator' : network . generators [ network . generators . bus . isin ( foreign_bus . index ) ] , 'Load' : network . loads [ network . loads . bus . isin ( foreign_bus . index ) ] , 'Transformer' : network . transformers [ network . transformers . bus0 . isin ( foreign_bus . index ) ] , 'StorageUnit' : network . storage_units [ network . storage_units . bus . isin ( foreign_bus . index ) ] }
foreign_series = { 'Bus' : network . buses_t . copy ( ) , 'Generator' : network . generators_t . copy ( ) , 'Load' : network . loads_t . copy ( ) , 'Transformer' : network . transformers_t . copy ( ) , 'StorageUnit' : network . storage_units_t . copy ( ) }
for comp in sorted ( foreign_series ) :
attr = sorted ( foreign_series [ comp ] )
for a in attr :
if not foreign_series [ comp ] [ a ] . empty :
if a != 'p_max_pu' :
foreign_series [ comp ] [ a ] = foreign_series [ comp ] [ a ] [ foreign_comp [ comp ] . index ]
else :
foreign_series [ comp ] [ a ] = foreign_series [ comp ] [ a ] [ foreign_comp [ comp ] [ foreign_comp [ comp ] [ 'carrier' ] . isin ( [ 'solar' , 'wind_onshore' , 'wind_offshore' , 'run_of_river' ] ) ] . index ]
network . buses = network . buses . drop ( foreign_bus . index )
network . generators = network . generators [ network . generators . bus . isin ( network . buses . index ) ]
network . loads = network . loads [ network . loads . bus . isin ( network . buses . index ) ]
network . transformers = network . transformers [ network . transformers . bus0 . isin ( network . buses . index ) ]
network . storage_units = network . storage_units [ network . storage_units . bus . isin ( network . buses . index ) ]
# Set slack bus
network = set_slack ( network )
# execute non - linear pf
pf_solution = network_pf . pf ( network . snapshots , use_seed = True )
# if selected , copy lopf results of neighboring countries to network
if ( ( args [ 'foreign_lines' ] [ 'carrier' ] == 'DC' ) or ( ( args [ 'scn_extension' ] != None ) and ( 'BE_NO_NEP 2035' in args [ 'scn_extension' ] ) ) ) and add_foreign_lopf :
for comp in sorted ( foreign_series ) :
network . import_components_from_dataframe ( foreign_comp [ comp ] , comp )
for attr in sorted ( foreign_series [ comp ] ) :
network . import_series_from_dataframe ( foreign_series [ comp ] [ attr ] , comp , attr )
pf_solve = pd . DataFrame ( index = pf_solution [ 'converged' ] . index )
pf_solve [ 'converged' ] = pf_solution [ 'converged' ] . values
pf_solve [ 'error' ] = pf_solution [ 'error' ] . values
pf_solve [ 'n_iter' ] = pf_solution [ 'n_iter' ] . values
if not pf_solve [ ~ pf_solve . converged ] . count ( ) . max ( ) == 0 :
logger . warning ( "PF of %d snapshots not converged." , pf_solve [ ~ pf_solve . converged ] . count ( ) . max ( ) )
return pf_solve
|
def load ( filename ) :
"""Load a CameraIntrinsics object from a file .
Parameters
filename : : obj : ` str `
The . intr file to load the object from .
Returns
: obj : ` CameraIntrinsics `
The CameraIntrinsics object loaded from the file .
Raises
ValueError
If filename does not have the . intr extension ."""
|
file_root , file_ext = os . path . splitext ( filename )
if file_ext . lower ( ) != INTR_EXTENSION :
raise ValueError ( 'Extension %s not supported for CameraIntrinsics. Must be stored with extension %s' % ( file_ext , INTR_EXTENSION ) )
f = open ( filename , 'r' )
ci = json . load ( f )
f . close ( )
return OrthographicIntrinsics ( frame = ci [ '_frame' ] , vol_height = ci [ '_vol_height' ] , vol_width = ci [ '_vol_width' ] , vol_depth = ci [ '_vol_depth' ] , plane_height = ci [ '_plane_height' ] , plane_width = ci [ '_plane_width' ] , depth_scale = ci [ '_depth_scale' ] )
|
def _do_else ( self , rule , p_selectors , p_parents , p_children , scope , media , c_lineno , c_property , c_codestr , code , name ) :
"""Implements @ else"""
|
if '@if' not in rule [ OPTIONS ] :
log . error ( "@else with no @if (%s" , rule [ INDEX ] [ rule [ LINENO ] ] )
val = rule [ OPTIONS ] . pop ( '@if' , True )
if not val :
rule [ CODESTR ] = c_codestr
self . manage_children ( rule , p_selectors , p_parents , p_children , scope , media )
|
def _ensure_values ( data : Mapping [ str , Any ] ) -> Tuple [ Dict [ str , Any ] , bool ] :
"""Make sure we have appropriate keys and say if we should write"""
|
to_return = { }
should_write = False
for keyname , typekind , default in REQUIRED_DATA :
if keyname not in data :
LOG . debug ( f"Defaulted config value {keyname} to {default}" )
to_return [ keyname ] = default
should_write = True
elif not isinstance ( data [ keyname ] , typekind ) :
LOG . warning ( f"Config value {keyname} was {type(data[keyname])} not" f" {typekind}, defaulted to {default}" )
to_return [ keyname ] = default
should_write = True
else :
to_return [ keyname ] = data [ keyname ]
return to_return , should_write
|
def load_wc ( cls , stream ) :
"""Return a ` Wilson ` instance initialized by a WCxf file - like object"""
|
wc = wcxf . WC . load ( stream )
return cls . from_wc ( wc )
|
def plot ( self , wavelengths = None , ** kwargs ) : # pragma : no cover
"""Plot the spectrum .
. . note : : Uses ` ` matplotlib ` ` .
Parameters
wavelengths : array - like , ` ~ astropy . units . quantity . Quantity ` , or ` None `
Wavelength values for sampling .
If not a Quantity , assumed to be in Angstrom .
If ` None ` , ` waveset ` is used .
title : str
Plot title .
xlog , ylog : bool
Plot X and Y axes , respectively , in log scale .
Default is linear scale .
left , right : ` None ` or number
Minimum and maximum wavelengths to plot .
If ` None ` , uses the whole range . If a number is given ,
must be in Angstrom .
bottom , top : ` None ` or number
Minimum and maximum flux / throughput to plot .
If ` None ` , uses the whole range . If a number is given ,
must be in internal unit .
save _ as : str
Save the plot to an image file . The file type is
automatically determined by given file extension .
Raises
synphot . exceptions . SynphotError
Invalid inputs ."""
|
w , y = self . _get_arrays ( wavelengths )
self . _do_plot ( w , y , ** kwargs )
|
def set_dwelling_current ( self , settings ) :
'''Sets the amperage of each motor for when it is dwelling .
Values are initialized from the ` robot _ config . log _ current ` values ,
and can then be changed through this method by other parts of the API .
For example , ` Pipette ` setting the dwelling - current of it ' s pipette ,
depending on what model pipette it is .
settings
Dict with axes as valies ( e . g . : ' X ' , ' Y ' , ' Z ' , ' A ' , ' B ' , or ' C ' )
and floating point number for current ( generally between 0.1 and 2)'''
|
self . _dwelling_current_settings [ 'now' ] . update ( settings )
# if an axis specified in the ` settings ` is currently dwelling ,
# reset it ' s current to the new dwelling - current value
dwelling_axes_to_update = { axis : amps for axis , amps in self . _dwelling_current_settings [ 'now' ] . items ( ) if self . _active_axes . get ( axis ) is False if self . current [ axis ] != amps }
if dwelling_axes_to_update :
self . _save_current ( dwelling_axes_to_update , axes_active = False )
|
def _parse_supl ( content ) :
"""Parse supplemental measurements data .
Parameters
content : str
Data to parse
Returns
: class : ` pandas . DataFrame ` containing the data"""
|
col_names = [ 'year' , 'month' , 'day' , 'hour' , 'minute' , 'hourly_low_pressure' , 'hourly_low_pressure_time' , 'hourly_high_wind' , 'hourly_high_wind_direction' , 'hourly_high_wind_time' ]
col_units = { 'hourly_low_pressure' : 'hPa' , 'hourly_low_pressure_time' : None , 'hourly_high_wind' : 'meters/second' , 'hourly_high_wind_direction' : 'degrees' , 'hourly_high_wind_time' : None , 'time' : None }
df = pd . read_table ( StringIO ( content ) , comment = '#' , na_values = 'MM' , names = col_names , sep = r'\s+' )
df [ 'time' ] = pd . to_datetime ( df [ [ 'year' , 'month' , 'day' , 'hour' , 'minute' ] ] , utc = True )
df [ 'hours' ] = np . floor ( df [ 'hourly_low_pressure_time' ] / 100 )
df [ 'minutes' ] = df [ 'hourly_low_pressure_time' ] - df [ 'hours' ] * 100
df [ 'hours' ] = df [ 'hours' ] . replace ( 99 , np . nan )
df [ 'minutes' ] = df [ 'minutes' ] . replace ( 99 , np . nan )
df [ 'hourly_low_pressure_time' ] = pd . to_datetime ( df [ [ 'year' , 'month' , 'day' , 'hours' , 'minutes' ] ] , utc = True )
df [ 'hours' ] = np . floor ( df [ 'hourly_high_wind_time' ] / 100 )
df [ 'minutes' ] = df [ 'hourly_high_wind_time' ] - df [ 'hours' ] * 100
df [ 'hours' ] = df [ 'hours' ] . replace ( 99 , np . nan )
df [ 'minutes' ] = df [ 'minutes' ] . replace ( 99 , np . nan )
df [ 'hourly_high_wind_time' ] = pd . to_datetime ( df [ [ 'year' , 'month' , 'day' , 'hours' , 'minutes' ] ] , utc = True )
df = df . drop ( columns = [ 'year' , 'month' , 'day' , 'hour' , 'minute' , 'hours' , 'minutes' ] )
df . units = col_units
return df
|
def read ( self , n ) :
"""Read * n * bytes from the subprocess ' output channel .
Args :
n ( int ) : The number of bytes to read .
Returns :
bytes : * n * bytes of output .
Raises :
EOFError : If the process exited ."""
|
d = b''
while n :
try :
block = self . _process . stdout . read ( n )
except ValueError :
block = None
if not block :
self . _process . poll ( )
raise EOFError ( 'Process ended' )
d += block
n -= len ( block )
return d
|
def dewpoint_rh ( temperature , rh ) :
r"""Calculate the ambient dewpoint given air temperature and relative humidity .
Parameters
temperature : ` pint . Quantity `
Air temperature
rh : ` pint . Quantity `
Relative humidity expressed as a ratio in the range 0 < rh < = 1
Returns
` pint . Quantity `
The dew point temperature
See Also
dewpoint , saturation _ vapor _ pressure"""
|
if np . any ( rh > 1.2 ) :
warnings . warn ( 'Relative humidity >120%, ensure proper units.' )
return dewpoint ( rh * saturation_vapor_pressure ( temperature ) )
|
def describe_tile ( self , index ) :
"""Get the registration information for the tile at the given index ."""
|
if index >= len ( self . tile_manager . registered_tiles ) :
tile = TileInfo . CreateInvalid ( )
else :
tile = self . tile_manager . registered_tiles [ index ]
return tile . registration_packet ( )
|
def unicast ( self , socket_id , event , data ) :
"""Sends an event to a single socket . Returns ` True ` if that
worked or ` False ` if not ."""
|
payload = self . _server . serialize_event ( event , data )
rv = self . _server . sockets . get ( socket_id )
if rv is not None :
rv . socket . send ( payload )
return True
return False
|
def _to_datalibrary_safe ( fname , gi , folder_name , sample_info , config ) :
"""Upload with retries for intermittent JSON failures ."""
|
num_tries = 0
max_tries = 5
while 1 :
try :
_to_datalibrary ( fname , gi , folder_name , sample_info , config )
break
except ( simplejson . scanner . JSONDecodeError , bioblend . galaxy . client . ConnectionError ) as e :
num_tries += 1
if num_tries > max_tries :
raise
print ( "Retrying upload, failed with:" , str ( e ) )
time . sleep ( 5 )
|
def _put_subject ( self , subject_id , body ) :
"""Update a subject for the given subject id . The body is not
a list but a dictionary of a single resource ."""
|
assert isinstance ( body , ( dict ) ) , "PUT requires body to be dict."
# subject _ id could be a path such as ' / asset / 123 ' so quote
uri = self . _get_subject_uri ( guid = subject_id )
return self . service . _put ( uri , body )
|
def complete ( self , msg ) :
"""Called to complete a transaction , usually when ProcessIO has
shipped the IOCB off to some other thread or function ."""
|
if _debug :
IOCB . _debug ( "complete(%d) %r" , self . ioID , msg )
if self . ioController : # pass to controller
self . ioController . complete_io ( self , msg )
else : # just fill in the data
self . ioState = COMPLETED
self . ioResponse = msg
self . trigger ( )
|
def restore ( self ) :
"""Restore the saved value for the attribute of the object ."""
|
if self . proxy_object is None :
if self . getter :
setattr ( self . getter_class , self . attr_name , self . getter )
elif self . is_local :
setattr ( self . orig_object , self . attr_name , self . orig_value )
else : # Was not a local , safe to delete :
delattr ( self . orig_object , self . attr_name )
else :
setattr ( sys . modules [ self . orig_object . __module__ ] , self . orig_object . __name__ , self . orig_object )
|
def copy ( self , existing_inputs ) :
"""Creates a copy of self using existing input placeholders ."""
|
return PPOPolicyGraph ( self . observation_space , self . action_space , self . config , existing_inputs = existing_inputs )
|
def add_members ( current ) :
"""Subscribe member ( s ) to a channel
. . code - block : : python
# request :
' view ' : ' _ zops _ add _ members ' ,
' channel _ key ' : key ,
' read _ only ' : boolean , # true if this is a Broadcast channel ,
# false if it ' s a normal chat room
' members ' : [ key , key ] ,
# response :
' existing ' : [ key , ] , # existing members
' newly _ added ' : [ key , ] , # newly added members
' status ' : ' Created ' ,
' code ' : 201"""
|
newly_added , existing = [ ] , [ ]
read_only = current . input [ 'read_only' ]
for member_key in current . input [ 'members' ] :
sb , new = Subscriber ( current ) . objects . get_or_create ( user_id = member_key , read_only = read_only , channel_id = current . input [ 'channel_key' ] )
if new :
newly_added . append ( member_key )
else :
existing . append ( member_key )
current . output = { 'existing' : existing , 'newly_added' : newly_added , 'status' : 'OK' , 'code' : 201 }
|
def _print_fields ( self , fields ) :
"""Print the fields , padding the names as necessary to align them ."""
|
# Prepare a formatting string that aligns the names and types based on the longest ones
longest_name = max ( fields , key = lambda f : len ( f [ 1 ] ) ) [ 1 ]
longest_type = max ( fields , key = lambda f : len ( f [ 2 ] ) ) [ 2 ]
field_format = '%s%-{}s %-{}s %s' . format ( len ( longest_name ) + self . _padding_after_name , len ( longest_type ) + self . _padding_after_type )
for field in fields :
self . _print ( field_format % field )
|
def minify ( compiled ) :
"""Perform basic minifications .
Fails on non - tabideal indentation or a string with a # ."""
|
compiled = compiled . strip ( )
if compiled :
out = [ ]
for line in compiled . splitlines ( ) :
line = line . split ( "#" , 1 ) [ 0 ] . rstrip ( )
if line :
ind = 0
while line . startswith ( " " ) :
line = line [ 1 : ]
ind += 1
internal_assert ( ind % tabideal == 0 , "invalid indentation in" , line )
out . append ( " " * ( ind // tabideal ) + line )
compiled = "\n" . join ( out ) + "\n"
return compiled
|
def Run ( self ) :
"Execute the action"
|
inputs = self . GetInput ( )
return SendInput ( len ( inputs ) , ctypes . byref ( inputs ) , ctypes . sizeof ( INPUT ) )
|
def RepositoryName ( self ) :
"""FullName after removing the local path to the repository .
If we have a real absolute path name here we can try to do something smart :
detecting the root of the checkout and truncating / path / to / checkout from
the name so that we get header guards that don ' t include things like
" C : \ Documents and Settings \ . . . " or " / home / username / . . . " in them and thus
people on different computers who have checked the source out to different
locations won ' t see bogus errors ."""
|
fullname = self . FullName ( )
if os . path . exists ( fullname ) :
project_dir = os . path . dirname ( fullname )
if os . path . exists ( os . path . join ( project_dir , ".svn" ) ) : # If there ' s a . svn file in the current directory , we recursively look
# up the directory tree for the top of the SVN checkout
root_dir = project_dir
one_up_dir = os . path . dirname ( root_dir )
while os . path . exists ( os . path . join ( one_up_dir , ".svn" ) ) :
root_dir = os . path . dirname ( root_dir )
one_up_dir = os . path . dirname ( one_up_dir )
prefix = os . path . commonprefix ( [ root_dir , project_dir ] )
return fullname [ len ( prefix ) + 1 : ]
# Not SVN < = 1.6 ? Try to find a git , hg , or svn top level directory by
# searching up from the current path .
root_dir = os . path . dirname ( fullname )
while ( root_dir != os . path . dirname ( root_dir ) and not os . path . exists ( os . path . join ( root_dir , ".git" ) ) and not os . path . exists ( os . path . join ( root_dir , ".hg" ) ) and not os . path . exists ( os . path . join ( root_dir , ".svn" ) ) ) :
root_dir = os . path . dirname ( root_dir )
if ( os . path . exists ( os . path . join ( root_dir , ".git" ) ) or os . path . exists ( os . path . join ( root_dir , ".hg" ) ) or os . path . exists ( os . path . join ( root_dir , ".svn" ) ) ) :
prefix = os . path . commonprefix ( [ root_dir , project_dir ] )
return fullname [ len ( prefix ) + 1 : ]
# Don ' t know what to do ; header guard warnings may be wrong . . .
return fullname
|
def integerize ( self ) :
"""Convert co - ordinate values to integers ."""
|
self . x = int ( round ( self . x ) )
self . y = int ( round ( self . y ) )
|
def __get_gaas_hmac_headers ( self , method , url , date = None , body = None , secret = None , userId = None ) :
"""Note : this documentation was copied for the Java client for GP .
Generate GaaS HMAC credentials used for HTTP Authorization header .
GaaS HMAC uses HMAC SHA1 algorithm signing a message composed by :
( HTTP method ) [ LF ] ( in UPPERCASE )
( Target URL ) [ LF ]
( RFC1123 date ) [ LF ]
( Request Body )
If the request body is empty , it is simply omitted ,
the ' message ' then ends with new line code [ LF ] .
The format for HTTP Authorization header is :
" Authorization : GaaS - HMAC ( user ID ) : ( HMAC above ) "
For example , with user " MyUser " and secret " MySecret " ,
the method " POST " ,
the URL " https : / / example . com / gaas " ,
the date " Mon , 30 Jun 2014 00:00:00 GMT " ,
the body ' { " param " : " value " } ' ,
the following text to be signed will be generated :
POST
https : / / example . com / gaas
Mon , 30 Jun 2014 00:00:00 GMT
{ " param " : " value " }
And the resulting headers are :
Authorization : GaaS - HMAC MyUser : ONBJapYEveDZfsPFdqZHQ64GDgc =
Date : Mon , 30 Jun 2014 00:00:00 GMT
The HTTP Date header , matching the one included in the message
to be signed , is required for GaaS HMAC authentication . GaaS
authentication code checks the Date header value and if it ' s too old ,
it rejects the request ."""
|
if not date :
date = self . __get_RFC1123_date ( )
message = str ( method ) + '\n' + str ( url ) + '\n' + str ( date ) + '\n'
if body :
message += str ( body )
if not secret :
secret = self . __serviceAccount . get_password ( )
secret = bytes ( secret . encode ( self . __ENCODINGFORMAT ) )
message = bytes ( message . encode ( self . __ENCODINGFORMAT ) )
digest = hmac . new ( secret , message , sha1 ) . digest ( )
urlSafeHmac = base64 . b64encode ( digest ) . strip ( )
if not userId :
userId = self . __serviceAccount . get_user_id ( )
urlSafeHmac = urlSafeHmac . strip ( ) . decode ( self . __ENCODINGFORMAT )
authorizationValue = 'GP-HMAC ' + userId + ':' + urlSafeHmac
headers = { self . __AUTHORIZATION_HEADER_KEY : str ( authorizationValue ) , self . __DATE_HEADER_KEY : str ( date ) }
return headers
|
def crop ( stream , x , y , width , height , ** kwargs ) :
"""Crop the input video .
Args :
x : The horizontal position , in the input video , of the left edge of
the output video .
y : The vertical position , in the input video , of the top edge of the
output video .
width : The width of the output video . Must be greater than 0.
heigth : The height of the output video . Must be greater than 0.
Official documentation : ` crop < https : / / ffmpeg . org / ffmpeg - filters . html # crop > ` _ _"""
|
return FilterNode ( stream , crop . __name__ , args = [ width , height , x , y ] , kwargs = kwargs ) . stream ( )
|
def get_prep_value ( self , value ) :
"""Return the integer value to be stored from the hex string"""
|
if value is None or value == "" :
return None
if isinstance ( value , six . string_types ) :
value = _hex_string_to_unsigned_integer ( value )
if _using_signed_storage ( ) :
value = _unsigned_to_signed_integer ( value )
return value
|
def distance_similarity ( a , b , p , T = CLOSE_DISTANCE_THRESHOLD ) :
"""Computes the distance similarity between a line segment
and a point
Args :
a ( [ float , float ] ) : x and y coordinates . Line start
b ( [ float , float ] ) : x and y coordinates . Line end
p ( [ float , float ] ) : x and y coordinates . Point to compute the distance
Returns :
float : between 0 and 1 . Where 1 is very similar and 0 is completely different"""
|
d = distance_to_line ( a , b , p )
r = ( - 1 / float ( T ) ) * abs ( d ) + 1
return r if r > 0 else 0
|
def query_all ( ) :
'''查询大类记录'''
|
recs = TabTag . select ( ) . where ( TabTag . uid . endswith ( '00' ) ) . order_by ( TabTag . uid )
return recs
|
def message ( subject , message , access_token , all_members = False , project_member_ids = None , base_url = OH_BASE_URL ) :
"""Send an email to individual users or in bulk . To learn more about Open
Humans OAuth2 projects , go to :
https : / / www . openhumans . org / direct - sharing / oauth2 - features /
: param subject : This field is the subject of the email .
: param message : This field is the body of the email .
: param access _ token : This is user specific access token / master token .
: param all _ members : This is a boolean field to send email to all members of
the project .
: param project _ member _ ids : This field is the list of project _ member _ id .
: param base _ url : It is this URL ` https : / / www . openhumans . org ` ."""
|
url = urlparse . urljoin ( base_url , '/api/direct-sharing/project/message/?{}' . format ( urlparse . urlencode ( { 'access_token' : access_token } ) ) )
if not ( all_members ) and not ( project_member_ids ) :
response = requests . post ( url , data = { 'subject' : subject , 'message' : message } )
handle_error ( response , 200 )
return response
elif all_members and project_member_ids :
raise ValueError ( "One (and only one) of the following must be specified: " "project_members_id or all_members is set to True." )
else :
r = requests . post ( url , data = { 'all_members' : all_members , 'project_member_ids' : project_member_ids , 'subject' : subject , 'message' : message } )
handle_error ( r , 200 )
return r
|
def _at ( self , t ) :
"""Compute the GCRS position and velocity of this Topos at time ` t ` ."""
|
pos , vel = terra ( self . latitude . radians , self . longitude . radians , self . elevation . au , t . gast )
pos = einsum ( 'ij...,j...->i...' , t . MT , pos )
vel = einsum ( 'ij...,j...->i...' , t . MT , vel )
if self . x :
R = rot_y ( self . x * ASEC2RAD )
pos = einsum ( 'ij...,j...->i...' , R , pos )
if self . y :
R = rot_x ( self . y * ASEC2RAD )
pos = einsum ( 'ij...,j...->i...' , R , pos )
# TODO : also rotate velocity
return pos , vel , pos , None
|
def helical_turbulent_Nu_Schmidt ( Re , Pr , Di , Dc ) :
r'''Calculates Nusselt number for a fluid flowing inside a curved
pipe such as a helical coil under turbulent conditions , using the method of
Schmidt [ 1 ] _ , also shown in [ 2 ] _ , [ 3 ] _ , and [ 4 ] _ .
For : math : ` Re _ { crit } < Re < 2.2 \ times 10 ^ 4 ` :
. . math : :
Nu = 0.023 \ left [ 1 + 14.8 \ left ( 1 + \ frac { D _ i } { D _ c } \ right ) \ left (
\ frac { D _ i } { D _ c } \ right ) ^ { 1/3 } \ right ] Re ^ { 0.8-0.22 \ left ( \ frac { D _ i } { D _ c }
\ right ) ^ { 0.1 } } Pr ^ { 1/3}
For : math : ` 2.2 \ times 10 ^ 4 < Re < 1.5 \ times 10 ^ 5 ` :
. . math : :
Nu = 0.023 \ left [ 1 + 3.6 \ left ( 1 - \ frac { D _ i } { D _ c } \ right ) \ left ( \ frac { D _ i }
{ D _ c } \ right ) ^ { 0.8 } \ right ] Re ^ { 0.8 } Pr ^ { 1/3}
Parameters
Re : float
Reynolds number with ` D = Di ` , [ - ]
Pr : float
Prandtl number with bulk properties [ - ]
Di : float
Inner diameter of the coil , [ m ]
Dc : float
Diameter of the helix / coil measured from the center of the tube on one
side to the center of the tube on the other side , [ m ]
Returns
Nu : float
Nusselt number with respect to ` Di ` , [ - ]
Notes
For very low curvatures , reasonable results are returned by both cases
of Reynolds numbers .
Examples
> > > helical _ turbulent _ Nu _ Schmidt ( 2E5 , 0.7 , 0.01 , . 2)
466.2569996832083
References
. . [ 1 ] Schmidt , Eckehard F . " Wärmeübergang Und Druckverlust in
Rohrschlangen . " Chemie Ingenieur Technik 39 , no . 13 ( July 10 , 1967 ) :
781-89 . doi : 10.1002 / cite . 330391302.
. . [ 2 ] El - Genk , Mohamed S . , and Timothy M . Schriener . " A Review and
Correlations for Convection Heat Transfer and Pressure Losses in
Toroidal and Helically Coiled Tubes . " Heat Transfer Engineering 0 , no . 0
( June 7 , 2016 ) : 1-28 . doi : 10.1080/01457632.2016.1194693.
. . [ 3 ] Hardik , B . K . , P . K . Baburajan , and S . V . Prabhu . " Local Heat
Transfer Coefficient in Helical Coils with Single Phase Flow . "
International Journal of Heat and Mass Transfer 89 ( October 2015 ) :
522-38 . doi : 10.1016 / j . ijheatmasstransfer . 2015.05.069.
. . [ 4 ] Rohsenow , Warren and James Hartnett and Young Cho . Handbook of Heat
Transfer , 3E . New York : McGraw - Hill , 1998.'''
|
D_ratio = Di / Dc
if Re <= 2.2E4 :
term = Re ** ( 0.8 - 0.22 * D_ratio ** 0.1 ) * Pr ** ( 1 / 3. )
return 0.023 * ( 1. + 14.8 * ( 1. + D_ratio ) * D_ratio ** ( 1 / 3. ) ) * term
else :
return 0.023 * ( 1. + 3.6 * ( 1. - D_ratio ) * D_ratio ** 0.8 ) * Re ** 0.8 * Pr ** ( 1 / 3. )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.