signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def connectExec ( connection , protocol , commandLine ) :
"""Connect a Protocol to a ssh exec session"""
|
deferred = connectSession ( connection , protocol )
@ deferred . addCallback
def requestSubsystem ( session ) :
return session . requestExec ( commandLine )
return deferred
|
def orient ( mag_azimuth , field_dip , or_con ) :
"""uses specified orientation convention to convert user supplied orientations
to laboratory azimuth and plunge"""
|
or_con = str ( or_con )
if mag_azimuth == - 999 :
return "" , ""
if or_con == "1" : # lab _ mag _ az = mag _ az ; sample _ dip = - dip
return mag_azimuth , - field_dip
if or_con == "2" :
return mag_azimuth - 90. , - field_dip
if or_con == "3" : # lab _ mag _ az = mag _ az ; sample _ dip = 90 . - dip
return mag_azimuth , 90. - field_dip
if or_con == "4" : # lab _ mag _ az = mag _ az ; sample _ dip = dip
return mag_azimuth , field_dip
if or_con == "5" : # lab _ mag _ az = mag _ az ; sample _ dip = dip - 90.
return mag_azimuth , field_dip - 90.
if or_con == "6" : # lab _ mag _ az = mag _ az - 90 . ; sample _ dip = 90 . - dip
return mag_azimuth - 90. , 90. - field_dip
if or_con == "7" : # lab _ mag _ az = mag _ az ; sample _ dip = 90 . - dip
return mag_azimuth - 90. , 90. - field_dip
print ( "Error in orientation convention" )
|
def _Viscosity ( rho , T , fase = None , drho = None ) :
"""Equation for the Viscosity
Parameters
rho : float
Density , [ kg / m3]
T : float
Temperature , [ K ]
fase : dict , optional for calculate critical enhancement
phase properties
drho : float , optional for calculate critical enhancement
[ ∂ ρ / ∂ P ] T at reference state ,
Returns
μ : float
Viscosity , [ Pa · s ]
Examples
> > > _ Viscosity ( 998 , 298.15)
0.0008897351001498108
> > > _ Viscosity ( 600 , 873.15)
7.743019522728247e - 05
References
IAPWS , Release on the IAPWS Formulation 2008 for the Viscosity of Ordinary
Water Substance , http : / / www . iapws . org / relguide / viscosity . html"""
|
Tr = T / Tc
Dr = rho / rhoc
# Eq 11
H = [ 1.67752 , 2.20462 , 0.6366564 , - 0.241605 ]
mu0 = 100 * Tr ** 0.5 / sum ( [ Hi / Tr ** i for i , Hi in enumerate ( H ) ] )
# Eq 12
I = [ 0 , 1 , 2 , 3 , 0 , 1 , 2 , 3 , 5 , 0 , 1 , 2 , 3 , 4 , 0 , 1 , 0 , 3 , 4 , 3 , 5 ]
J = [ 0 , 0 , 0 , 0 , 1 , 1 , 1 , 1 , 1 , 2 , 2 , 2 , 2 , 2 , 3 , 3 , 4 , 4 , 5 , 6 , 6 ]
Hij = [ 0.520094 , 0.850895e-1 , - 0.108374e1 , - 0.289555 , 0.222531 , 0.999115 , 0.188797e1 , 0.126613e1 , 0.120573 , - 0.281378 , - 0.906851 , - 0.772479 , - 0.489837 , - 0.257040 , 0.161913 , 0.257399 , - 0.325372e-1 , 0.698452e-1 , 0.872102e-2 , - 0.435673e-2 , - 0.593264e-3 ]
mu1 = exp ( Dr * sum ( [ ( 1 / Tr - 1 ) ** i * h * ( Dr - 1 ) ** j for i , j , h in zip ( I , J , Hij ) ] ) )
# Critical enhancement
if fase and drho :
qc = 1 / 1.9
qd = 1 / 1.1
# Eq 21
DeltaX = Pc * Dr ** 2 * ( fase . drhodP_T / rho - drho / rho * 1.5 / Tr )
if DeltaX < 0 :
DeltaX = 0
# Eq 20
X = 0.13 * ( DeltaX / 0.06 ) ** ( 0.63 / 1.239 )
if X <= 0.3817016416 : # Eq 15
Y = qc / 5 * X * ( qd * X ) ** 5 * ( 1 - qc * X + ( qc * X ) ** 2 - 765. / 504 * ( qd * X ) ** 2 )
else :
Fid = acos ( ( 1 + qd ** 2 * X ** 2 ) ** - 0.5 )
# Eq 17
w = abs ( ( qc * X - 1 ) / ( qc * X + 1 ) ) ** 0.5 * tan ( Fid / 2 )
# Eq 19
# Eq 18
if qc * X > 1 :
Lw = log ( ( 1 + w ) / ( 1 - w ) )
else :
Lw = 2 * atan ( abs ( w ) )
# Eq 16
Y = sin ( 3 * Fid ) / 12 - sin ( 2 * Fid ) / 4 / qc / X + ( 1 - 5 / 4 * ( qc * X ) ** 2 ) / ( qc * X ) ** 2 * sin ( Fid ) - ( ( 1 - 3 / 2 * ( qc * X ) ** 2 ) * Fid - abs ( ( qc * X ) ** 2 - 1 ) ** 1.5 * Lw ) / ( qc * X ) ** 3
# Eq 14
mu2 = exp ( 0.068 * Y )
else :
mu2 = 1
# Eq 10
mu = mu0 * mu1 * mu2
return mu * 1e-6
|
def _build_http ( http = None ) :
"""Construct an http client suitable for googleapiclient usage w / user agent ."""
|
if not http :
http = httplib2 . Http ( timeout = HTTP_REQUEST_TIMEOUT , ca_certs = HTTPLIB_CA_BUNDLE )
user_agent = 'Python-httplib2/{} (gzip), {}/{}' . format ( httplib2 . __version__ , 'custodian-gcp' , '0.1' )
return set_user_agent ( http , user_agent )
|
def get_stoplist ( language ) :
"""Returns an built - in stop - list for the language as a set of words ."""
|
file_path = os . path . join ( "stoplists" , "%s.txt" % language )
try :
stopwords = pkgutil . get_data ( "justext" , file_path )
except IOError :
raise ValueError ( "Stoplist for language '%s' is missing. " "Please use function 'get_stoplists' for complete list of stoplists " "and feel free to contribute by your own stoplist." % language )
return frozenset ( w . decode ( "utf8" ) . lower ( ) for w in stopwords . splitlines ( ) )
|
def to_unicode ( data , encoding = 'UTF-8' ) :
"""Convert a number of different types of objects to unicode ."""
|
if isinstance ( data , unicode_type ) :
return data
if isinstance ( data , bytes ) :
return unicode_type ( data , encoding = encoding )
if hasattr ( data , '__iter__' ) :
try :
dict ( data )
except TypeError :
pass
except ValueError : # Assume it ' s a one dimensional data structure
return ( to_unicode ( i , encoding ) for i in data )
else : # We support 2.6 which lacks dict comprehensions
if hasattr ( data , 'items' ) :
data = data . items ( )
return dict ( ( ( to_unicode ( k , encoding ) , to_unicode ( v , encoding ) ) for k , v in data ) )
return data
|
def setxy ( self ) :
"""computes all vertex coordinates ( x , y ) using
an algorithm by Brandes & Kopf ."""
|
self . _edge_inverter ( )
self . _detect_alignment_conflicts ( )
inf = float ( 'infinity' )
# initialize vertex coordinates attributes :
for l in self . layers :
for v in l :
self . grx [ v ] . root = v
self . grx [ v ] . align = v
self . grx [ v ] . sink = v
self . grx [ v ] . shift = inf
self . grx [ v ] . X = None
self . grx [ v ] . x = [ 0.0 ] * 4
curvh = self . dirvh
# save current dirvh value
for dirvh in xrange ( 4 ) :
self . dirvh = dirvh
self . _coord_vertical_alignment ( )
self . _coord_horizontal_compact ( )
self . dirvh = curvh
# restore it
# vertical coordinate assigment of all nodes :
Y = 0
for l in self . layers :
dY = max ( [ v . view . h / 2. for v in l ] )
for v in l :
vx = sorted ( self . grx [ v ] . x )
# mean of the 2 medians out of the 4 x - coord computed above :
avgm = ( vx [ 1 ] + vx [ 2 ] ) / 2.
# final xy - coordinates :
v . view . xy = ( avgm , Y + dY )
Y += 2 * dY + self . yspace
self . _edge_inverter ( )
|
def _prepare_forms ( self ) :
"""private function to prepare content for paramType = form"""
|
content_type = 'application/x-www-form-urlencoded'
if self . __op . consumes and content_type not in self . __op . consumes :
raise errs . SchemaError ( 'content type {0} does not present in {1}' . format ( content_type , self . __op . consumes ) )
return content_type , six . moves . urllib . parse . urlencode ( self . __p [ 'formData' ] )
|
def generate ( env ) :
"""Add default tools ."""
|
for t in SCons . Tool . tool_list ( env [ 'PLATFORM' ] , env ) :
SCons . Tool . Tool ( t ) ( env )
|
def set_syslog_config ( host , username , password , syslog_config , config_value , protocol = None , port = None , firewall = True , reset_service = True , esxi_hosts = None , credstore = None ) :
'''Set the specified syslog configuration parameter . By default , this function will
reset the syslog service after the configuration is set .
host
ESXi or vCenter host to connect to .
username
User to connect as , usually root .
password
Password to connect with .
syslog _ config
Name of parameter to set ( corresponds to the command line switch for
esxcli without the double dashes ( - - ) )
Valid syslog _ config values are ` ` logdir ` ` , ` ` loghost ` ` , ` ` default - rotate ` ,
` ` default - size ` ` , ` ` default - timeout ` ` , and ` ` logdir - unique ` ` .
config _ value
Value for the above parameter . For ` ` loghost ` ` , URLs or IP addresses to
use for logging . Multiple log servers can be specified by listing them ,
comma - separated , but without spaces before or after commas .
( reference : https : / / blogs . vmware . com / vsphere / 2012/04 / configuring - multiple - syslog - servers - for - esxi - 5 . html )
protocol
Optionally set to alternate protocol if the host is not using the default
protocol . Default protocol is ` ` https ` ` .
port
Optionally set to alternate port if the host is not using the default
port . Default port is ` ` 443 ` ` .
firewall
Enable the firewall rule set for syslog . Defaults to ` ` True ` ` .
reset _ service
After a successful parameter set , reset the service . Defaults to ` ` True ` ` .
esxi _ hosts
If ` ` host ` ` is a vCenter host , then use esxi _ hosts to execute this function
on a list of one or more ESXi machines .
credstore
Optionally set to path to the credential store file .
: return : Dictionary with a top - level key of ' success ' which indicates
if all the parameters were reset , and individual keys
for each parameter indicating which succeeded or failed , per host .
CLI Example :
. . code - block : : bash
# Used for ESXi host connection information
salt ' * ' vsphere . set _ syslog _ config my . esxi . host root bad - password loghost ssl : / / localhost : 5432 , tcp : / / 10.1.0.1:1514
# Used for connecting to a vCenter Server
salt ' * ' vsphere . set _ syslog _ config my . vcenter . location root bad - password loghost ssl : / / localhost : 5432 , tcp : / / 10.1.0.1:1514 esxi _ hosts = ' [ esxi - 1 . host . com , esxi - 2 . host . com ] ' '''
|
ret = { }
# First , enable the syslog firewall ruleset , for each host , if needed .
if firewall and syslog_config == 'loghost' :
if esxi_hosts :
if not isinstance ( esxi_hosts , list ) :
raise CommandExecutionError ( '\'esxi_hosts\' must be a list.' )
for esxi_host in esxi_hosts :
response = enable_firewall_ruleset ( host , username , password , ruleset_enable = True , ruleset_name = 'syslog' , protocol = protocol , port = port , esxi_hosts = [ esxi_host ] , credstore = credstore ) . get ( esxi_host )
if response [ 'retcode' ] != 0 :
ret . update ( { esxi_host : { 'enable_firewall' : { 'message' : response [ 'stdout' ] , 'success' : False } } } )
else :
ret . update ( { esxi_host : { 'enable_firewall' : { 'success' : True } } } )
else : # Handles a single host or a vCenter connection when no esxi _ hosts are provided .
response = enable_firewall_ruleset ( host , username , password , ruleset_enable = True , ruleset_name = 'syslog' , protocol = protocol , port = port , credstore = credstore ) . get ( host )
if response [ 'retcode' ] != 0 :
ret . update ( { host : { 'enable_firewall' : { 'message' : response [ 'stdout' ] , 'success' : False } } } )
else :
ret . update ( { host : { 'enable_firewall' : { 'success' : True } } } )
# Set the config value on each esxi _ host , if provided .
if esxi_hosts :
if not isinstance ( esxi_hosts , list ) :
raise CommandExecutionError ( '\'esxi_hosts\' must be a list.' )
for esxi_host in esxi_hosts :
response = _set_syslog_config_helper ( host , username , password , syslog_config , config_value , protocol = protocol , port = port , reset_service = reset_service , esxi_host = esxi_host , credstore = credstore )
# Ensure we don ' t overwrite any dictionary data already set
# By updating the esxi _ host directly .
if ret . get ( esxi_host ) is None :
ret . update ( { esxi_host : { } } )
ret [ esxi_host ] . update ( response )
else : # Handles a single host or a vCenter connection when no esxi _ hosts are provided .
response = _set_syslog_config_helper ( host , username , password , syslog_config , config_value , protocol = protocol , port = port , reset_service = reset_service , credstore = credstore )
# Ensure we don ' t overwrite any dictionary data already set
# By updating the host directly .
if ret . get ( host ) is None :
ret . update ( { host : { } } )
ret [ host ] . update ( response )
return ret
|
def grab_focus ( self ) :
"""grab window ' s focus . Keyboard and scroll events will be forwarded
to the sprite who has the focus . Check the ' focused ' property of sprite
in the on - render event to decide how to render it ( say , add an outline
when focused = true )"""
|
scene = self . get_scene ( )
if scene and scene . _focus_sprite != self :
scene . _focus_sprite = self
|
def _update_partition_dci_id ( self , tenant_name , dci_id , vrf_prof = None , part_name = None ) :
"""Function to update DCI ID of partition ."""
|
self . dcnm_obj . update_project ( tenant_name , part_name , dci_id = dci_id , vrf_prof = vrf_prof )
|
def _get_model_cost ( self , formula , model ) :
"""Given a WCNF formula and a model , the method computes the MaxSAT
cost of the model , i . e . the sum of weights of soft clauses that are
unsatisfied by the model .
: param formula : an input MaxSAT formula
: param model : a satisfying assignment
: type formula : : class : ` . WCNF `
: type model : list ( int )
: rtype : int"""
|
model_set = set ( model )
cost = 0
for i , cl in enumerate ( formula . soft ) :
cost += formula . wght [ i ] if all ( l not in model_set for l in filter ( lambda l : abs ( l ) <= self . formula . nv , cl ) ) else 0
return cost
|
def fetch_json_by_name ( name ) :
"""Fetch json based on the element name
First gets the href based on a search by name , then makes a
second query to obtain the element json
: method : GET
: param str name : element name
: return : : py : class : ` smc . api . web . SMCResult `"""
|
result = fetch_meta_by_name ( name )
if result . href :
result = fetch_json_by_href ( result . href )
return result
|
def draw ( self ) :
"""Renders the class balance chart on the specified axes from support ."""
|
# Number of colors is either number of classes or 2
colors = resolve_colors ( len ( self . support_ ) )
if self . _mode == BALANCE :
self . ax . bar ( np . arange ( len ( self . support_ ) ) , self . support_ , color = colors , align = 'center' , width = 0.5 )
# Compare mode
else :
bar_width = 0.35
labels = [ "train" , "test" ]
for idx , support in enumerate ( self . support_ ) :
index = np . arange ( len ( self . classes_ ) )
if idx > 0 :
index = index + bar_width
self . ax . bar ( index , support , bar_width , color = colors [ idx ] , label = labels [ idx ] )
return self . ax
|
def concat ( self , * args , ** kwargs ) :
""": type args : FormattedText
: type kwargs : FormattedText"""
|
for arg in args :
assert self . formatted_text . _is_compatible ( arg ) , "Cannot concat text with different modes"
self . format_args . append ( arg . text )
for kwarg in kwargs :
value = kwargs [ kwarg ]
assert self . formatted_text . _is_compatible ( value ) , "Cannot concat text with different modes"
self . format_kwargs [ kwarg ] = value . text
return self
|
def copy_clip_rectangle_list ( self ) :
"""Return the current clip region as a list of rectangles
in user coordinates .
: return :
A list of rectangles ,
as ` ` ( x , y , width , height ) ` ` tuples of floats .
: raises :
: exc : ` CairoError `
if the clip region cannot be represented as a list
of user - space rectangles ."""
|
rectangle_list = cairo . cairo_copy_clip_rectangle_list ( self . _pointer )
_check_status ( rectangle_list . status )
rectangles = rectangle_list . rectangles
result = [ ]
for i in range ( rectangle_list . num_rectangles ) :
rect = rectangles [ i ]
result . append ( ( rect . x , rect . y , rect . width , rect . height ) )
cairo . cairo_rectangle_list_destroy ( rectangle_list )
return result
|
def point_probability ( self , threshold ) :
"""Determine the probability of exceeding a threshold at a grid point based on the ensemble forecasts at
that point .
Args :
threshold : If > = threshold assigns a 1 to member , otherwise 0.
Returns :
EnsembleConsensus"""
|
point_prob = np . zeros ( self . data . shape [ 1 : ] )
for t in range ( self . data . shape [ 1 ] ) :
point_prob [ t ] = np . where ( self . data [ : , t ] >= threshold , 1.0 , 0.0 ) . mean ( axis = 0 )
return EnsembleConsensus ( point_prob , "point_probability" , self . ensemble_name , self . run_date , self . variable + "_{0:0.2f}_{1}" . format ( threshold , self . units . replace ( " " , "_" ) ) , self . start_date , self . end_date , "" )
|
def _xray_register_type_fix ( wrapped , instance , args , kwargs ) :
"""Send the actual connection or curser to register type ."""
|
our_args = list ( copy . copy ( args ) )
if len ( our_args ) == 2 and isinstance ( our_args [ 1 ] , ( XRayTracedConn , XRayTracedCursor ) ) :
our_args [ 1 ] = our_args [ 1 ] . __wrapped__
return wrapped ( * our_args , ** kwargs )
|
def create_estimator ( model_name , hparams , run_config , schedule = "train_and_evaluate" , decode_hparams = None , use_tpu = False , use_tpu_estimator = False , use_xla = False ) :
"""Create a T2T Estimator ."""
|
model_fn = t2t_model . T2TModel . make_estimator_model_fn ( model_name , hparams , decode_hparams = decode_hparams , use_tpu = use_tpu )
del use_xla
if use_tpu or use_tpu_estimator :
problem = hparams . problem
batch_size = ( problem . tpu_batch_size_per_shard ( hparams ) * run_config . tpu_config . num_shards )
mlperf_log . transformer_print ( key = mlperf_log . INPUT_BATCH_SIZE , value = batch_size )
if getattr ( hparams , "mtf_mode" , False ) :
batch_size = problem . tpu_batch_size_per_shard ( hparams )
predict_batch_size = batch_size
if decode_hparams and decode_hparams . batch_size :
predict_batch_size = decode_hparams . batch_size
if decode_hparams and run_config . tpu_config :
decode_hparams . add_hparam ( "iterations_per_loop" , run_config . tpu_config . iterations_per_loop )
estimator = tf . contrib . tpu . TPUEstimator ( model_fn = model_fn , model_dir = run_config . model_dir , config = run_config , use_tpu = use_tpu , train_batch_size = batch_size , eval_batch_size = batch_size if "eval" in schedule else None , predict_batch_size = predict_batch_size , experimental_export_device_assignment = True )
else :
estimator = tf . estimator . Estimator ( model_fn = model_fn , model_dir = run_config . model_dir , config = run_config , )
return estimator
|
def word_freq ( word : str , domain : str = "all" ) -> int :
"""* * Not officially supported . * *
Get word frequency of a word by domain .
This function will make a query to the server of Thai National Corpus .
Internet connection is required .
* * IMPORTANT : * * Currently ( as of 29 April 2019 ) always return 0,
as the service URL has been changed and the code is not updated yet .
: param string word : word
: param string domain : domain"""
|
listdomain = { "all" : "" , "imaginative" : "1" , "natural-pure-science" : "2" , "applied-science" : "3" , "social-science" : "4" , "world-affairs-history" : "5" , "commerce-finance" : "6" , "arts" : "7" , "belief-thought" : "8" , "leisure" : "9" , "others" : "0" , }
url = "http://www.arts.chula.ac.th/~ling/TNCII/corp.php"
# New URL is http : / / www . arts . chula . ac . th / ~ ling / tnc3/
data = { "genre[]" : "" , "domain[]" : listdomain [ domain ] , "sortby" : "perc" , "p" : word }
r = requests . post ( url , data = data )
pat = re . compile ( r'TOTAL</font>(?s).*?#ffffff">(.*?)</font>' )
match = pat . search ( r . text )
n = 0
if match :
n = int ( match . group ( 1 ) . strip ( ) )
return n
|
def _new_page ( self ) :
"""Helper function to start a new page . Not intended for external use ."""
|
self . _current_page = Drawing ( * self . _pagesize )
if self . _bgimage :
self . _current_page . add ( self . _bgimage )
self . _pages . append ( self . _current_page )
self . page_count += 1
self . _position = [ 1 , 0 ]
|
def _auth ( self , load ) :
'''Authenticate the client , use the sent public key to encrypt the AES key
which was generated at start up .
This method fires an event over the master event manager . The event is
tagged " auth " and returns a dict with information about the auth
event
# Verify that the key we are receiving matches the stored key
# Store the key if it is not there
# Make an RSA key with the pub key
# Encrypt the AES key as an encrypted salt . payload
# Package the return and return it'''
|
if not salt . utils . verify . valid_id ( self . opts , load [ 'id' ] ) :
log . info ( 'Authentication request from invalid id %s' , load [ 'id' ] )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
log . info ( 'Authentication request from %s' , load [ 'id' ] )
# 0 is default which should be ' unlimited '
if self . opts [ 'max_minions' ] > 0 : # use the ConCache if enabled , else use the minion utils
if self . cache_cli :
minions = self . cache_cli . get_cached ( )
else :
minions = self . ckminions . connected_ids ( )
if len ( minions ) > 1000 :
log . info ( 'With large numbers of minions it is advised ' 'to enable the ConCache with \'con_cache: True\' ' 'in the masters configuration file.' )
if not len ( minions ) <= self . opts [ 'max_minions' ] : # we reject new minions , minions that are already
# connected must be allowed for the mine , highstate , etc .
if load [ 'id' ] not in minions :
msg = ( 'Too many minions connected (max_minions={0}). ' 'Rejecting connection from id ' '{1}' . format ( self . opts [ 'max_minions' ] , load [ 'id' ] ) )
log . info ( msg )
eload = { 'result' : False , 'act' : 'full' , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : 'full' } }
# Check if key is configured to be auto - rejected / signed
auto_reject = self . auto_key . check_autoreject ( load [ 'id' ] )
auto_sign = self . auto_key . check_autosign ( load [ 'id' ] , load . get ( u'autosign_grains' , None ) )
pubfn = os . path . join ( self . opts [ 'pki_dir' ] , 'minions' , load [ 'id' ] )
pubfn_pend = os . path . join ( self . opts [ 'pki_dir' ] , 'minions_pre' , load [ 'id' ] )
pubfn_rejected = os . path . join ( self . opts [ 'pki_dir' ] , 'minions_rejected' , load [ 'id' ] )
pubfn_denied = os . path . join ( self . opts [ 'pki_dir' ] , 'minions_denied' , load [ 'id' ] )
if self . opts [ 'open_mode' ] : # open mode is turned on , nuts to checks and overwrite whatever
# is there
pass
elif os . path . isfile ( pubfn_rejected ) : # The key has been rejected , don ' t place it in pending
log . info ( 'Public key rejected for %s. Key is present in ' 'rejection key dir.' , load [ 'id' ] )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
elif os . path . isfile ( pubfn ) : # The key has been accepted , check it
with salt . utils . files . fopen ( pubfn , 'r' ) as pubfn_handle :
if pubfn_handle . read ( ) . strip ( ) != load [ 'pub' ] . strip ( ) :
log . error ( 'Authentication attempt from %s failed, the public ' 'keys did not match. This may be an attempt to compromise ' 'the Salt cluster.' , load [ 'id' ] )
# put denied minion key into minions _ denied
with salt . utils . files . fopen ( pubfn_denied , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'act' : 'denied' , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
elif not os . path . isfile ( pubfn_pend ) : # The key has not been accepted , this is a new minion
if os . path . isdir ( pubfn_pend ) : # The key path is a directory , error out
log . info ( 'New public key %s is a directory' , load [ 'id' ] )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
if auto_reject :
key_path = pubfn_rejected
log . info ( 'New public key for %s rejected via autoreject_file' , load [ 'id' ] )
key_act = 'reject'
key_result = False
elif not auto_sign :
key_path = pubfn_pend
log . info ( 'New public key for %s placed in pending' , load [ 'id' ] )
key_act = 'pend'
key_result = True
else : # The key is being automatically accepted , don ' t do anything
# here and let the auto accept logic below handle it .
key_path = None
if key_path is not None : # Write the key to the appropriate location
with salt . utils . files . fopen ( key_path , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
ret = { 'enc' : 'clear' , 'load' : { 'ret' : key_result } }
eload = { 'result' : key_result , 'act' : key_act , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return ret
elif os . path . isfile ( pubfn_pend ) : # This key is in the pending dir and is awaiting acceptance
if auto_reject : # We don ' t care if the keys match , this minion is being
# auto - rejected . Move the key file from the pending dir to the
# rejected dir .
try :
shutil . move ( pubfn_pend , pubfn_rejected )
except ( IOError , OSError ) :
pass
log . info ( 'Pending public key for %s rejected via ' 'autoreject_file' , load [ 'id' ] )
ret = { 'enc' : 'clear' , 'load' : { 'ret' : False } }
eload = { 'result' : False , 'act' : 'reject' , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return ret
elif not auto_sign : # This key is in the pending dir and is not being auto - signed .
# Check if the keys are the same and error out if this is the
# case . Otherwise log the fact that the minion is still
# pending .
with salt . utils . files . fopen ( pubfn_pend , 'r' ) as pubfn_handle :
if pubfn_handle . read ( ) != load [ 'pub' ] :
log . error ( 'Authentication attempt from %s failed, the public ' 'key in pending did not match. This may be an ' 'attempt to compromise the Salt cluster.' , load [ 'id' ] )
# put denied minion key into minions _ denied
with salt . utils . files . fopen ( pubfn_denied , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'act' : 'denied' , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
else :
log . info ( 'Authentication failed from host %s, the key is in ' 'pending and needs to be accepted with salt-key ' '-a %s' , load [ 'id' ] , load [ 'id' ] )
eload = { 'result' : True , 'act' : 'pend' , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : True } }
else : # This key is in pending and has been configured to be
# auto - signed . Check to see if it is the same key , and if
# so , pass on doing anything here , and let it get automatically
# accepted below .
with salt . utils . files . fopen ( pubfn_pend , 'r' ) as pubfn_handle :
if pubfn_handle . read ( ) != load [ 'pub' ] :
log . error ( 'Authentication attempt from %s failed, the public ' 'keys in pending did not match. This may be an ' 'attempt to compromise the Salt cluster.' , load [ 'id' ] )
# put denied minion key into minions _ denied
with salt . utils . files . fopen ( pubfn_denied , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
else :
os . remove ( pubfn_pend )
else : # Something happened that I have not accounted for , FAIL !
log . warning ( 'Unaccounted for authentication failure' )
eload = { 'result' : False , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
log . info ( 'Authentication accepted from %s' , load [ 'id' ] )
# only write to disk if you are adding the file , and in open mode ,
# which implies we accept any key from a minion .
if not os . path . isfile ( pubfn ) and not self . opts [ 'open_mode' ] :
with salt . utils . files . fopen ( pubfn , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
elif self . opts [ 'open_mode' ] :
disk_key = ''
if os . path . isfile ( pubfn ) :
with salt . utils . files . fopen ( pubfn , 'r' ) as fp_ :
disk_key = fp_ . read ( )
if load [ 'pub' ] and load [ 'pub' ] != disk_key :
log . debug ( 'Host key change detected in open mode.' )
with salt . utils . files . fopen ( pubfn , 'w+' ) as fp_ :
fp_ . write ( load [ 'pub' ] )
elif not load [ 'pub' ] :
log . error ( 'Public key is empty: %s' , load [ 'id' ] )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
pub = None
# the con _ cache is enabled , send the minion id to the cache
if self . cache_cli :
self . cache_cli . put_cache ( [ load [ 'id' ] ] )
# The key payload may sometimes be corrupt when using auto - accept
# and an empty request comes in
try :
pub = salt . crypt . get_rsa_pub_key ( pubfn )
except ( ValueError , IndexError , TypeError ) as err :
log . error ( 'Corrupt public key "%s": %s' , pubfn , err )
return { 'enc' : 'clear' , 'load' : { 'ret' : False } }
if not HAS_M2 :
cipher = PKCS1_OAEP . new ( pub )
ret = { 'enc' : 'pub' , 'pub_key' : self . master_key . get_pub_str ( ) , 'publish_port' : self . opts [ 'publish_port' ] }
# sign the master ' s pubkey ( if enabled ) before it is
# sent to the minion that was just authenticated
if self . opts [ 'master_sign_pubkey' ] : # append the pre - computed signature to the auth - reply
if self . master_key . pubkey_signature ( ) :
log . debug ( 'Adding pubkey signature to auth-reply' )
log . debug ( self . master_key . pubkey_signature ( ) )
ret . update ( { 'pub_sig' : self . master_key . pubkey_signature ( ) } )
else : # the master has its own signing - keypair , compute the master . pub ' s
# signature and append that to the auth - reply
# get the key _ pass for the signing key
key_pass = salt . utils . sdb . sdb_get ( self . opts [ 'signing_key_pass' ] , self . opts )
log . debug ( "Signing master public key before sending" )
pub_sign = salt . crypt . sign_message ( self . master_key . get_sign_paths ( ) [ 1 ] , ret [ 'pub_key' ] , key_pass )
ret . update ( { 'pub_sig' : binascii . b2a_base64 ( pub_sign ) } )
if not HAS_M2 :
mcipher = PKCS1_OAEP . new ( self . master_key . key )
if self . opts [ 'auth_mode' ] >= 2 :
if 'token' in load :
try :
if HAS_M2 :
mtoken = self . master_key . key . private_decrypt ( load [ 'token' ] , RSA . pkcs1_oaep_padding )
else :
mtoken = mcipher . decrypt ( load [ 'token' ] )
aes = '{0}_|-{1}' . format ( salt . master . SMaster . secrets [ 'aes' ] [ 'secret' ] . value , mtoken )
except Exception : # Token failed to decrypt , send back the salty bacon to
# support older minions
pass
else :
aes = salt . master . SMaster . secrets [ 'aes' ] [ 'secret' ] . value
if HAS_M2 :
ret [ 'aes' ] = pub . public_encrypt ( aes , RSA . pkcs1_oaep_padding )
else :
ret [ 'aes' ] = cipher . encrypt ( aes )
else :
if 'token' in load :
try :
if HAS_M2 :
mtoken = self . master_key . key . private_decrypt ( load [ 'token' ] , RSA . pkcs1_oaep_padding )
ret [ 'token' ] = pub . public_encrypt ( mtoken , RSA . pkcs1_oaep_padding )
else :
mtoken = mcipher . decrypt ( load [ 'token' ] )
ret [ 'token' ] = cipher . encrypt ( mtoken )
except Exception : # Token failed to decrypt , send back the salty bacon to
# support older minions
pass
aes = salt . master . SMaster . secrets [ 'aes' ] [ 'secret' ] . value
if HAS_M2 :
ret [ 'aes' ] = pub . public_encrypt ( aes , RSA . pkcs1_oaep_padding )
else :
ret [ 'aes' ] = cipher . encrypt ( aes )
# Be aggressive about the signature
digest = salt . utils . stringutils . to_bytes ( hashlib . sha256 ( aes ) . hexdigest ( ) )
ret [ 'sig' ] = salt . crypt . private_encrypt ( self . master_key . key , digest )
eload = { 'result' : True , 'act' : 'accept' , 'id' : load [ 'id' ] , 'pub' : load [ 'pub' ] }
if self . opts . get ( 'auth_events' ) is True :
self . event . fire_event ( eload , salt . utils . event . tagify ( prefix = 'auth' ) )
return ret
|
def _to_dsn ( hosts ) :
"""Convert a host URI into a dsn for aiopg .
> > > _ to _ dsn ( ' aiopg : / / myhostname : 4242 / mydb ' )
' postgres : / / crate @ myhostname : 4242 / mydb '
> > > _ to _ dsn ( ' aiopg : / / myhostname : 4242 ' )
' postgres : / / crate @ myhostname : 4242 / doc '
> > > _ to _ dsn ( ' aiopg : / / hoschi : pw @ myhostname : 4242 / doc ? sslmode = require ' )
' postgres : / / hoschi : pw @ myhostname : 4242 / doc ? sslmode = require '
> > > _ to _ dsn ( ' aiopg : / / myhostname ' )
' postgres : / / crate @ myhostname : 5432 / doc '"""
|
p = urlparse ( hosts )
try :
user_and_pw , netloc = p . netloc . split ( '@' , maxsplit = 1 )
except ValueError :
netloc = p . netloc
user_and_pw = 'crate'
try :
host , port = netloc . split ( ':' , maxsplit = 1 )
except ValueError :
host = netloc
port = 5432
dbname = p . path [ 1 : ] if p . path else 'doc'
dsn = f'postgres://{user_and_pw}@{host}:{port}/{dbname}'
if p . query :
dsn += '?' + '&' . join ( k + '=' + v [ 0 ] for k , v in parse_qs ( p . query ) . items ( ) )
return dsn
|
def remove_get_department_uids ( portal ) :
"""Removes getDepartmentUIDs indexes and metadata"""
|
logger . info ( "Removing filtering by department ..." )
del_index ( portal , "bika_catalog" , "getDepartmentUIDs" )
del_index ( portal , "bika_setup_catalog" , "getDepartmentUID" )
del_index ( portal , CATALOG_ANALYSIS_REQUEST_LISTING , "getDepartmentUIDs" )
del_index ( portal , CATALOG_WORKSHEET_LISTING , "getDepartmentUIDs" )
del_index ( portal , CATALOG_ANALYSIS_LISTING , "getDepartmentUID" )
del_metadata ( portal , CATALOG_ANALYSIS_REQUEST_LISTING , "getDepartmentUIDs" )
del_metadata ( portal , CATALOG_WORKSHEET_LISTING , "getDepartmentUIDs" )
del_metadata ( portal , CATALOG_ANALYSIS_LISTING , "getDepartmentUID" )
|
def Create ( event_type ) :
"""Factory method creates objects derived from : py : class ` . Event ` with class name matching the : py : class ` . EventType ` .
: param event _ type : number for type of event
: returns : constructed event corresponding to ` ` event _ type ` `
: rtype : : py : class : ` . Event `"""
|
if event_type in EventType . Name : # unknown event type gets base class
if EventType . Name [ event_type ] == Event . __name__ :
return Event ( )
else : # instantiate Event subclass with same name as EventType name
return [ t for t in EventFactory . event_list if t . __name__ == EventType . Name [ event_type ] ] [ 0 ] ( )
else :
raise TypeError ( "EventFactory.Create: Invalid EventType" )
|
def argmin ( self , rows : List [ Row ] , column : ComparableColumn ) -> List [ Row ] :
"""Takes a list of rows and a column and returns a list containing a single row ( dict from
columns to cells ) that has the minimum numerical value in the given column . We return a list
instead of a single dict to be consistent with the return type of ` ` select ` ` and
` ` all _ rows ` ` ."""
|
if not rows :
return [ ]
value_row_pairs = [ ( row . values [ column . name ] , row ) for row in rows ]
if not value_row_pairs :
return [ ]
# Returns a list containing the row with the max cell value .
return [ sorted ( value_row_pairs , key = lambda x : x [ 0 ] ) [ 0 ] [ 1 ] ]
|
def read_and_redirect ( request , notification_id ) :
"""Marks the supplied notification as read and then redirects
to the supplied URL from the ` ` next ` ` URL parameter .
* * IMPORTANT * * : This is CSRF - unsafe method .
Only use it if its okay for you to mark notifications as read without a robust check .
: param request : HTTP request context .
: param notification _ id : ID of the notification to be marked a read .
: returns : Redirect response to a valid target url ."""
|
notification_page = reverse ( 'notifications:all' )
next_page = request . GET . get ( 'next' , notification_page )
if is_safe_url ( next_page ) :
target = next_page
else :
target = notification_page
try :
user_nf = request . user . notifications . get ( pk = notification_id )
if not user_nf . read :
user_nf . mark_as_read ( )
except Notification . DoesNotExist :
pass
return HttpResponseRedirect ( target )
|
def dump_hex ( ofd , start , len_ , prefix = 0 ) :
"""Convert ` start ` to hex and logs it , 16 bytes per log statement .
https : / / github . com / thom311 / libnl / blob / libnl3_2_25 / lib / msg . c # L760
Positional arguments :
ofd - - function to call with arguments similar to ` logging . debug ` .
start - - bytearray ( ) or bytearray _ ptr ( ) instance .
len _ - - size of ` start ` ( integer ) .
Keyword arguments :
prefix - - additional number of whitespace pairs to prefix each log statement with ."""
|
prefix_whitespaces = ' ' * prefix
limit = 16 - ( prefix * 2 )
start_ = start [ : len_ ]
for line in ( start_ [ i : i + limit ] for i in range ( 0 , len ( start_ ) , limit ) ) : # stackoverflow . com / a / 9475354/1198943
hex_lines , ascii_lines = list ( ) , list ( )
for c in line :
hex_lines . append ( '{0:02x}' . format ( c if hasattr ( c , 'real' ) else ord ( c ) ) )
c2 = chr ( c ) if hasattr ( c , 'real' ) else c
ascii_lines . append ( c2 if c2 in string . printable [ : 95 ] else '.' )
hex_line = ' ' . join ( hex_lines ) . ljust ( limit * 3 )
ascii_line = '' . join ( ascii_lines )
ofd ( ' %s%s%s' , prefix_whitespaces , hex_line , ascii_line )
|
def mfpt_sensitivity ( T , target , i ) :
r"""Sensitivity matrix of the mean first - passage time from specified state .
Parameters
T : ( M , M ) ndarray
Transition matrix
target : int or list
Target state or set for mfpt computation
i : int
Compute the sensitivity for state ` i `
Returns
S : ( M , M ) ndarray
Sensitivity matrix for specified state"""
|
# check input
T = _types . ensure_ndarray_or_sparse ( T , ndim = 2 , uniform = True , kind = 'numeric' )
target = _types . ensure_int_vector ( target )
# go
if _issparse ( T ) :
_showSparseConversionWarning ( )
mfpt_sensitivity ( T . todense ( ) , target , i )
else :
return dense . sensitivity . mfpt_sensitivity ( T , target , i )
|
def setup_continuous_delivery ( self , swap_with_slot , app_type_details , cd_project_url , create_account , vsts_app_auth_token , test , webapp_list ) :
"""Use this method to setup Continuous Delivery of an Azure web site from a source control repository .
: param swap _ with _ slot : the slot to use for deployment
: param app _ type _ details : the details of app that will be deployed . i . e . app _ type = Python , python _ framework = Django etc .
: param cd _ project _ url : CD Project url in the format of https : / / < accountname > . visualstudio . com / < projectname >
: param create _ account : Boolean value to decide if account need to be created or not
: param vsts _ app _ auth _ token : Authentication token for vsts app
: param test : Load test webapp name
: param webapp _ list : Existing webapp list
: return : a message indicating final status and instructions for the user"""
|
branch = self . _repo_info . branch or 'refs/heads/master'
self . _validate_cd_project_url ( cd_project_url )
vsts_account_name = self . _get_vsts_account_name ( cd_project_url )
# Verify inputs before we start generating tokens
source_repository , account_name , team_project_name = self . _get_source_repository ( self . _repo_info . url , self . _repo_info . git_token , branch , self . _azure_info . credentials , self . _repo_info . _private_repo_username , self . _repo_info . _private_repo_password )
self . _verify_vsts_parameters ( vsts_account_name , source_repository )
vsts_account_name = vsts_account_name or account_name
cd_project_name = team_project_name or self . _azure_info . website_name
account_url = 'https://{}.visualstudio.com' . format ( quote ( vsts_account_name ) )
portalext_account_url = 'https://{}.portalext.visualstudio.com' . format ( quote ( vsts_account_name ) )
# VSTS Account using AEX APIs
account_created = False
if create_account :
self . create_vsts_account ( self . _azure_info . credentials , vsts_account_name )
account_created = True
# Create ContinuousDelivery client
cd = ContinuousDelivery ( '3.2-preview.1' , portalext_account_url , self . _azure_info . credentials )
# Construct the config body of the continuous delivery call
build_configuration = self . _get_build_configuration ( app_type_details )
source = ProvisioningConfigurationSource ( 'codeRepository' , source_repository , build_configuration )
auth_info = AuthorizationInfo ( 'Headers' , AuthorizationInfoParameters ( 'Bearer ' + vsts_app_auth_token ) )
target = self . get_provisioning_configuration_target ( auth_info , swap_with_slot , test , webapp_list )
ci_config = CiConfiguration ( CiArtifact ( name = cd_project_name ) )
config = ProvisioningConfiguration ( None , source , target , ci_config )
# Configure the continuous deliver using VSTS as a backend
response = cd . provisioning_configuration ( config )
if response . ci_configuration . result . status == 'queued' :
final_status = self . _wait_for_cd_completion ( cd , response )
return self . _get_summary ( final_status , account_url , vsts_account_name , account_created , self . _azure_info . subscription_id , self . _azure_info . resource_group_name , self . _azure_info . website_name )
else :
raise RuntimeError ( 'Unknown status returned from provisioning_configuration: ' + response . ci_configuration . result . status )
|
def selected ( self ) :
"""Check whether all the matched elements are selected .
Returns :
bool"""
|
query_results = self . map ( lambda el : el . is_selected ( ) , 'selected' ) . results
if query_results :
return all ( query_results )
return False
|
def normalize ( location_name , preserve_commas = False ) :
"""Normalize * location _ name * by stripping punctuation and collapsing
runs of whitespace , and return the normalized name ."""
|
def replace ( match ) :
if preserve_commas and ',' in match . group ( 0 ) :
return ','
return ' '
return NORMALIZATION_RE . sub ( replace , location_name ) . strip ( ) . lower ( )
|
def compete ( source_x , source_o , timeout = None , memlimit = None , cgroup = 'tictactoe' , cgroup_path = '/sys/fs/cgroup' ) :
"""Fights two source files .
Returns either :
* ( ' ok ' , ' x ' | ' draw ' | ' o ' , GAMEPLAY )
* ( ' error ' , GUILTY , REASON , GAMEPLAY )
REASON : = utf8 - encoded error string ( can be up to 65k chars )
GAMEPLAY : = [ NUM ]
GUILTY : = ' x ' | ' o ' ( during whose turn the error occured )
NUM : = 1 . . 81 | 0
NUM = 0 means the move resulted in error ( then ERROR _ STRING is non - empty )
GAMEPLAY is never more than 255 characters long :
len ( " , " . join ( map ( str , range ( 1 , 81 ) ) ) ) = = 230"""
|
gameplay = [ ]
for xo , moveresult , log in run_interactive ( source_x , source_o , timeout , memlimit , cgroup , cgroup_path ) :
if moveresult [ 0 ] == 'error' :
return 'error' , xo , moveresult [ 1 ] , gameplay + [ 0 ]
elif moveresult [ 0 ] == 'state_coords' :
gameplay . append ( coords_to_num ( moveresult [ 1 ] [ 1 ] ) )
state = moveresult [ 1 ] [ 0 ]
if state == 'draw' or state == 'x' or state == 'o' :
return 'ok' , state , gameplay
|
def cli ( env , ack_all ) :
"""Summary and acknowledgement of upcoming and ongoing maintenance events"""
|
manager = AccountManager ( env . client )
events = manager . get_upcoming_events ( )
if ack_all :
for event in events :
result = manager . ack_event ( event [ 'id' ] )
event [ 'acknowledgedFlag' ] = result
env . fout ( event_table ( events ) )
|
def set_key ( self , section , key , value ) :
"""Stores given key in settings file .
: param section : Current section to save the key into .
: type section : unicode
: param key : Current key to save .
: type key : unicode
: param value : Current key value to save .
: type value : object"""
|
LOGGER . debug ( "> Saving '{0}' in '{1}' section with value: '{2}' in settings file." . format ( key , section , foundations . strings . to_string ( value ) ) )
self . __settings . beginGroup ( section )
self . __settings . setValue ( key , QVariant ( value ) )
self . __settings . endGroup ( )
|
def parse_args ( args , kwargs ) :
"""Returns a kwargs dictionary by turning args into kwargs"""
|
if 'style' in kwargs :
args += ( kwargs [ 'style' ] , )
del kwargs [ 'style' ]
for arg in args :
if not isinstance ( arg , ( bytes , unicode ) ) :
raise ValueError ( "args must be strings:" + repr ( args ) )
if arg . lower ( ) in FG_COLORS :
if 'fg' in kwargs :
raise ValueError ( "fg specified twice" )
kwargs [ 'fg' ] = FG_COLORS [ arg ]
elif arg . lower ( ) . startswith ( 'on_' ) and arg [ 3 : ] . lower ( ) in BG_COLORS :
if 'bg' in kwargs :
raise ValueError ( "fg specified twice" )
kwargs [ 'bg' ] = BG_COLORS [ arg [ 3 : ] ]
elif arg . lower ( ) in STYLES :
kwargs [ arg ] = True
else :
raise ValueError ( "couldn't process arg: " + repr ( arg ) )
for k in kwargs :
if k not in [ 'fg' , 'bg' ] + list ( STYLES . keys ( ) ) :
raise ValueError ( "Can't apply that transformation" )
if 'fg' in kwargs :
if kwargs [ 'fg' ] in FG_COLORS :
kwargs [ 'fg' ] = FG_COLORS [ kwargs [ 'fg' ] ]
if kwargs [ 'fg' ] not in list ( FG_COLORS . values ( ) ) :
raise ValueError ( "Bad fg value: %r" % kwargs [ 'fg' ] )
if 'bg' in kwargs :
if kwargs [ 'bg' ] in BG_COLORS :
kwargs [ 'bg' ] = BG_COLORS [ kwargs [ 'bg' ] ]
if kwargs [ 'bg' ] not in list ( BG_COLORS . values ( ) ) :
raise ValueError ( "Bad bg value: %r" % kwargs [ 'bg' ] )
return kwargs
|
def get_monitor ( self , topics ) :
"""Attempts to find a Monitor in device cloud that matches the provided topics
: param topics : a string list of topics ( e . g . ` ` [ ' DeviceCore [ U ] ' , ' FileDataCore ' ] ) ` ` )
Returns a : class : ` DeviceCloudMonitor ` if found , otherwise None ."""
|
for monitor in self . get_monitors ( MON_TOPIC_ATTR == "," . join ( topics ) ) :
return monitor
# return the first one , even if there are multiple
return None
|
def beta ( self , val : float ) -> None :
"Set beta ( or alpha as makes sense for given optimizer ) ."
|
if val is None :
return
if 'betas' in self . opt_keys :
self . set_val ( 'betas' , ( self . _mom , listify ( val , self . _beta ) ) )
elif 'alpha' in self . opt_keys :
self . set_val ( 'alpha' , listify ( val , self . _beta ) )
self . _beta = listify ( val , self . _beta )
|
def set_domain_id ( self , value = None , default = False , disable = False ) :
"""Configures the mlag domain - id value
Args :
value ( str ) : The value to configure the domain - id
default ( bool ) : Configures the domain - id using the default keyword
disable ( bool ) : Negates the domain - id using the no keyword
Returns :
bool : Returns True if the commands complete successfully"""
|
return self . _configure_mlag ( 'domain-id' , value , default , disable )
|
def msgHasAcceptableInstId ( self , msg , frm ) -> bool :
"""Return true if the instance id of message corresponds to a correct
replica .
: param msg : the node message to validate
: return :"""
|
# TODO : refactor this ! this should not do anything except checking !
instId = getattr ( msg , f . INST_ID . nm , None )
if not ( isinstance ( instId , int ) and instId >= 0 ) :
return False
if instId >= self . requiredNumberOfInstances :
if instId not in self . msgsForFutureReplicas :
self . msgsForFutureReplicas [ instId ] = deque ( )
self . msgsForFutureReplicas [ instId ] . append ( ( msg , frm ) )
logger . debug ( "{} queueing message {} for future protocol instance {}" . format ( self , msg , instId ) )
return False
return True
|
def angle ( self ) :
"""Angle value ."""
|
if self . use_global_light :
return self . _image_resources . get_data ( 'global_angle' , 30.0 )
return self . value . get ( Key . LocalLightingAngle ) . value
|
def _prepare_output ( partitions , verbose ) :
"""Returns dict with ' raw ' and ' message ' keys filled ."""
|
out = { }
partitions_count = len ( partitions )
out [ 'raw' ] = { 'offline_count' : partitions_count , }
if partitions_count == 0 :
out [ 'message' ] = 'No offline partitions.'
else :
out [ 'message' ] = "{count} offline partitions." . format ( count = partitions_count )
if verbose :
lines = ( '{}:{}' . format ( topic , partition ) for ( topic , partition ) in partitions )
out [ 'verbose' ] = "Partitions:\n" + "\n" . join ( lines )
else :
cmdline = sys . argv [ : ]
cmdline . insert ( 1 , '-v' )
out [ 'message' ] += '\nTo see all offline partitions run: ' + ' ' . join ( cmdline )
if verbose :
out [ 'raw' ] [ 'partitions' ] = [ { 'topic' : topic , 'partition' : partition } for ( topic , partition ) in partitions ]
return out
|
def setShowGridRows ( self , state ) :
"""Sets whether or not the grid rows should be rendered when drawing the \
grid .
: param state | < bool >"""
|
delegate = self . itemDelegate ( )
if ( isinstance ( delegate , XTreeWidgetDelegate ) ) :
delegate . setShowGridRows ( state )
|
def _calc_T_var ( self , X ) -> int :
"""Calculate the number of samples , T , from the shape of X"""
|
shape = X . shape
tensor_rank : int = len ( shape )
if tensor_rank == 0 :
return 1
if tensor_rank == 1 :
return shape [ 0 ]
if tensor_rank == 2 :
if shape [ 1 ] > 1 :
raise ValueError ( 'Initial value of a variable must have dimension T*1.' )
return shape [ 0 ]
|
def _inline ( ins ) :
"""Inline code"""
|
tmp = [ x . strip ( ' \t\r\n' ) for x in ins . quad [ 1 ] . split ( '\n' ) ]
# Split lines
i = 0
while i < len ( tmp ) :
if not tmp [ i ] or tmp [ i ] [ 0 ] == ';' : # a comment or empty string ?
tmp . pop ( i )
continue
if tmp [ i ] [ 0 ] == '#' : # A preprocessor directive
i += 1
continue
match = RE_LABEL . match ( tmp [ i ] )
if not match :
tmp [ i ] = '\t' + tmp [ i ]
i += 1
continue
if len ( tmp [ i ] [ - 1 ] ) == ':' :
i += 1
continue
# This is already a single label
tmp [ i ] = tmp [ i ] [ match . end ( ) + 1 : ] . strip ( ' \n' )
tmp . insert ( i , match . group ( ) )
i += 1
output = [ ]
if not tmp :
return output
ASMLABEL = new_ASMID ( )
ASMS [ ASMLABEL ] = tmp
output . append ( '#line %s' % ins . quad [ 2 ] )
output . append ( ASMLABEL )
output . append ( '#line %i' % ( int ( ins . quad [ 2 ] ) + len ( tmp ) ) )
return output
|
def safe_url ( self , url , errors = 'strict' ) :
"""URL encode value for safe HTTP request .
Args :
url ( string ) : The string to URL Encode .
Returns :
( string ) : The urlencoded string ."""
|
if url is not None :
url = quote ( self . s ( url , errors = errors ) , safe = '~' )
return url
|
def get_bandstructure_by_material_id ( self , material_id , line_mode = True ) :
"""Get a BandStructure corresponding to a material _ id .
REST Endpoint : https : / / www . materialsproject . org / rest / v2 / materials / < mp - id > / vasp / bandstructure or
https : / / www . materialsproject . org / rest / v2 / materials / < mp - id > / vasp / bandstructure _ uniform
Args :
material _ id ( str ) : Materials Project material _ id .
line _ mode ( bool ) : If True , fetch a BandStructureSymmLine object
( default ) . If False , return the uniform band structure .
Returns :
A BandStructure object ."""
|
prop = "bandstructure" if line_mode else "bandstructure_uniform"
data = self . get_data ( material_id , prop = prop )
return data [ 0 ] [ prop ]
|
def get_gam_splines ( start = 0 , end = 100 , n_bases = 10 , spline_order = 3 , add_intercept = True ) :
"""Main function required by ( TF ) Concise class"""
|
# make sure n _ bases is an int
assert type ( n_bases ) == int
x = np . arange ( start , end + 1 )
knots = get_knots ( start , end , n_bases , spline_order )
X_splines = get_X_spline ( x , knots , n_bases , spline_order , add_intercept )
S = get_S ( n_bases , spline_order , add_intercept )
# Get the same knot positions as with mgcv
# https : / / github . com / cran / mgcv / blob / master / R / smooth . r # L1560
return X_splines , S , knots
|
def my_init ( self ) :
"""Method automatically called from base class constructor ."""
|
self . _start_time = time . time ( )
self . _stats = { }
self . _stats_lock = threading . Lock ( )
|
def reverse_complement ( self ) :
'''str : Returns the reverse complement of ` ` Sequence . sequence ` ` .'''
|
if self . _reverse_complement is None :
self . _reverse_complement = self . _get_reverse_complement ( )
return self . _reverse_complement
|
def get_tensor_info ( self ) :
"""See base class for details ."""
|
return { feature_key : feature . get_tensor_info ( ) for feature_key , feature in self . _feature_dict . items ( ) }
|
def _SanitizeField ( self , field ) :
"""Sanitizes a field for output .
This method replaces any field delimiters with a space .
Args :
field ( str ) : name of the field to sanitize .
Returns :
str : value of the field ."""
|
if self . _field_delimiter and isinstance ( field , py2to3 . STRING_TYPES ) :
return field . replace ( self . _field_delimiter , ' ' )
return field
|
def run_parallel ( pipeline , input_gen , options = { } , ncpu = 4 , chunksize = 200 ) :
"""Run a pipeline in parallel over a input generator cutting it into small
chunks .
> > > # if we have a simple component
> > > from reliure . pipeline import Composable
> > > # that we want to run over a given input :
> > > input = " abcde "
> > > import string
> > > pipeline = Composable ( lambda letters : ( l . upper ( ) for l in letters ) )
> > > res = run _ parallel ( pipeline , input , ncpu = 2 , chunksize = 2)
> > > # Note : res should be equals to [ [ ' C ' , ' D ' ] , [ ' A ' , ' B ' ] , [ ' E ' ] ]
> > > # but it seems that there is a bug with py . test and mp . . ."""
|
t0 = time ( )
# FIXME : there is a know issue when pipeline results are " big " object , the merge is bloking . . . to be investigate
# TODO : add get _ pipeline args to prodvide a fct to build the pipeline ( in each worker )
logger = logging . getLogger ( "reliure.run_parallel" )
jobs = [ ]
results = [ ]
Qdata = mp . JoinableQueue ( ncpu * 2 )
# input queue
Qresult = mp . Queue ( )
# result queue
# ensure input _ gen is realy an itertor not a list
if hasattr ( input_gen , "__len__" ) :
input_gen = iter ( input_gen )
for wnum in range ( ncpu ) :
logger . debug ( "create worker #%s" % wnum )
worker = mp . Process ( target = _reliure_worker , args = ( wnum , Qdata , Qresult , pipeline , options ) )
worker . start ( )
jobs . append ( worker )
while True : # consume chunksize elements from input _ gen
chunk = tuple ( islice ( input_gen , chunksize ) )
if not len ( chunk ) :
break
logger . info ( "send a chunk of %s elemets to a worker" % len ( chunk ) )
Qdata . put ( chunk )
logger . info ( "all data has beed send to workers" )
# wait until all task are done
Qdata . join ( )
logger . debug ( "wait for workers..." )
for worker in jobs :
worker . terminate ( )
logger . debug ( "merge results" )
try :
while not Qresult . empty ( ) :
logger . debug ( "result queue still have %d elements" % Qresult . qsize ( ) )
res = Qresult . get_nowait ( )
results . append ( res )
except mp . Queue . Empty :
logger . debug ( "result queue is empty" )
pass
logger . info ( "Pipeline executed in %1.3f sec" % ( time ( ) - t0 ) )
return results
|
def cancel ( self , mark_completed_as_cancelled = False ) :
"""Cancel the future . If the future has not been started yet , it will never
start running . If the future is already running , it will run until the
worker function exists . The worker function can check if the future has
been cancelled using the : meth : ` cancelled ` method .
If the future has already been completed , it will not be marked as
cancelled unless you set * mark _ completed _ as _ cancelled * to : const : ` True ` .
: param mark _ completed _ as _ cancelled : If this is : const : ` True ` and the
future has already completed , it will be marked as cancelled anyway ."""
|
with self . _lock :
if not self . _completed or mark_completed_as_cancelled :
self . _cancelled = True
callbacks = self . _prepare_done_callbacks ( )
callbacks ( )
|
def has_mixed_eol_chars ( text ) :
"""Detect if text has mixed EOL characters"""
|
eol_chars = get_eol_chars ( text )
if eol_chars is None :
return False
correct_text = eol_chars . join ( ( text + eol_chars ) . splitlines ( ) )
return repr ( correct_text ) != repr ( text )
|
def conv_from_name ( name ) :
"""Understand simulink syntax for fixed types and returns the proper
conversion structure .
@ param name : the type name as in simulin ( i . e . UFix _ 8_7 . . . )
@ raise ConversionError : When cannot decode the string"""
|
_match = re . match ( r"^(?P<signed>u?fix)_(?P<bits>\d+)_(?P<binary>\d+)" , name , flags = re . I )
if not _match :
raise ConversionError ( "Cannot interpret name: " + name )
params = _match . groupdict ( )
if params [ 'signed' ] == 'fix' :
signed = True
else :
signed = False
bits = int ( params [ 'bits' ] )
binary = int ( params [ 'binary' ] )
return get_conv ( bits , binary , signed )
|
def _check_status ( self ) :
"""Check repo status and except if dirty ."""
|
logger . info ( 'Checking repo status' )
status = self . log_call ( [ 'git' , 'status' , '--porcelain' ] , callwith = subprocess . check_output , cwd = self . cwd , )
if status :
raise DirtyException ( status )
|
def bilinear_interp ( data , x , y ) :
"""Interpolate input ` ` data ` ` at " pixel " coordinates ` ` x ` ` and ` ` y ` ` ."""
|
x = np . asarray ( x )
y = np . asarray ( y )
if x . shape != y . shape :
raise ValueError ( "X- and Y-coordinates must have identical shapes." )
out_shape = x . shape
out_size = x . size
x = x . ravel ( )
y = y . ravel ( )
x0 = np . empty ( out_size , dtype = np . int )
y0 = np . empty ( out_size , dtype = np . int )
np . clip ( x , 0 , data . shape [ 1 ] - 2 , out = x0 )
np . clip ( y , 0 , data . shape [ 0 ] - 2 , out = y0 )
x1 = x0 + 1
y1 = y0 + 1
f00 = data [ ( y0 , x0 ) ]
f10 = data [ ( y1 , x0 ) ]
f01 = data [ ( y0 , x1 ) ]
f11 = data [ ( y1 , x1 ) ]
w00 = ( x1 - x ) * ( y1 - y )
w10 = ( x1 - x ) * ( y - y0 )
w01 = ( x - x0 ) * ( y1 - y )
w11 = ( x - x0 ) * ( y - y0 )
interp = w00 * f00 + w10 * f10 + w01 * f01 + w11 * f11
return interp . reshape ( out_shape ) . astype ( data . dtype . type )
|
def changelist_view ( self , request , extra_context = None ) :
"""Get object currently tracked and add a button to get back to it"""
|
extra_context = extra_context or { }
if 'object' in request . GET . keys ( ) :
value = request . GET [ 'object' ] . split ( ':' )
content_type = get_object_or_404 ( ContentType , id = value [ 0 ] , )
tracked_object = get_object_or_404 ( content_type . model_class ( ) , id = value [ 1 ] , )
extra_context [ 'tracked_object' ] = tracked_object
extra_context [ 'tracked_object_opts' ] = tracked_object . _meta
return super ( TrackingEventAdmin , self ) . changelist_view ( request , extra_context )
|
def show ( self , ticket ) :
"""通过ticket换取二维码
详情请参考
https : / / mp . weixin . qq . com / wiki ? t = resource / res _ main & id = mp1443433542
: param ticket : 二维码 ticket 。 可以通过 : func : ` create ` 获取到
: return : 返回的 Request 对象
使用示例 : :
from wechatpy import WeChatClient
client = WeChatClient ( ' appid ' , ' secret ' )
res = client . qrcode . show ( ' ticket data ' )"""
|
if isinstance ( ticket , dict ) :
ticket = ticket [ 'ticket' ]
return requests . get ( url = 'https://mp.weixin.qq.com/cgi-bin/showqrcode' , params = { 'ticket' : ticket } )
|
def resolve_url ( self , resource_name ) :
"""Return a URL to a local copy of a resource , suitable for get _ generator ( )"""
|
if self . target_format == 'csv' and self . target_file != DEFAULT_METATAB_FILE : # For CSV packages , need to get the package and open it to get the resoruce URL , becuase
# they are always absolute web URLs and may not be related to the location of the metadata .
s = self . get_resource ( )
rs = s . doc . resource ( resource_name )
return parse_app_url ( rs . url )
else :
jt = self . join_target ( resource_name )
rs = jt . get_resource ( )
t = rs . get_target ( )
return t
|
def normalize_reference_name ( name ) :
"""Search the dictionary of species - specific references to find a reference
name that matches aside from capitalization .
If no matching reference is found , raise an exception ."""
|
lower_name = name . strip ( ) . lower ( )
for reference in Species . _reference_names_to_species . keys ( ) :
if reference . lower ( ) == lower_name :
return reference
raise ValueError ( "Reference genome '%s' not found" % name )
|
def main ( ) :
"""Main entry point for iotile - sgcompile ."""
|
arg_parser = build_args ( )
args = arg_parser . parse_args ( )
model = DeviceModel ( )
parser = SensorGraphFileParser ( )
parser . parse_file ( args . sensor_graph )
if args . format == u'ast' :
write_output ( parser . dump_tree ( ) , True , args . output )
sys . exit ( 0 )
parser . compile ( model )
if not args . disable_optimizer :
opt = SensorGraphOptimizer ( )
opt . optimize ( parser . sensor_graph , model = model )
if args . format == u'nodes' :
output = u'\n' . join ( parser . sensor_graph . dump_nodes ( ) ) + u'\n'
write_output ( output , True , args . output )
else :
if args . format not in KNOWN_FORMATS :
print ( "Unknown output format: {}" . format ( args . format ) )
sys . exit ( 1 )
output_format = KNOWN_FORMATS [ args . format ]
output = output_format . format ( parser . sensor_graph )
write_output ( output , output_format . text , args . output )
|
def create_pipeline ( self , name , description , ** kwargs ) :
'''Creates a pipeline with the provided attributes .
Args :
namerequired name string
kwargs { name , description , orgWide , aclEntries } user
specifiable ones only
return ( status code , pipeline _ dict ) ( as created )'''
|
# req sanity check
if not ( name and description ) :
return requests . codes . bad_request , None
kwargs . update ( { 'name' : name , 'description' : description } )
new_pl = StreakPipeline ( ** kwargs )
uri = '/' . join ( [ self . api_uri , self . pipelines_suffix ] )
code , r_data = self . _req ( 'put' , uri , new_pl . to_dict ( ) )
return code , r_data
|
def inject_experiment ( ) :
"""Inject experiment and enviroment variables into the template context ."""
|
exp = Experiment ( session )
return dict ( experiment = exp , env = os . environ )
|
def session_rollback ( self , session ) :
"""Send session _ rollback signal in sqlalchemy ` ` after _ rollback ` ` .
This marks the failure of session so the session may enter commit
phase ."""
|
# this may happen when there ' s nothing to rollback
if not hasattr ( session , 'meepo_unique_id' ) :
self . logger . debug ( "skipped - session_rollback" )
return
# del session meepo id after rollback
self . logger . debug ( "%s - after_rollback" % session . meepo_unique_id )
signal ( "session_rollback" ) . send ( session )
self . _session_del ( session )
|
def geostrophic_wind ( heights , f , dx , dy ) :
r"""Calculate the geostrophic wind given from the heights or geopotential .
Parameters
heights : ( M , N ) ndarray
The height field , with either leading dimensions of ( x , y ) or trailing dimensions
of ( y , x ) , depending on the value of ` ` dim _ order ` ` .
f : array _ like
The coriolis parameter . This can be a scalar to be applied
everywhere or an array of values .
dx : float or ndarray
The grid spacing ( s ) in the x - direction . If an array , there should be one item less than
the size of ` heights ` along the applicable axis .
dy : float or ndarray
The grid spacing ( s ) in the y - direction . If an array , there should be one item less than
the size of ` heights ` along the applicable axis .
Returns
A 2 - item tuple of arrays
A tuple of the u - component and v - component of the geostrophic wind .
Notes
If inputs have more than two dimensions , they are assumed to have either leading dimensions
of ( x , y ) or trailing dimensions of ( y , x ) , depending on the value of ` ` dim _ order ` ` ."""
|
if heights . dimensionality [ '[length]' ] == 2.0 :
norm_factor = 1. / f
else :
norm_factor = mpconsts . g / f
dhdy = first_derivative ( heights , delta = dy , axis = - 2 )
dhdx = first_derivative ( heights , delta = dx , axis = - 1 )
return - norm_factor * dhdy , norm_factor * dhdx
|
def _interpolate ( im , x , y , name ) :
"""Perform bilinear sampling on im given x , y coordiantes .
Implements the differentiable sampling mechanism with bilinear kerenl
in https : / / arxiv . org / abs / 1506.02025.
Modified from https : / / github . com / tensorflow / models / tree / master / transformer
x , y are tensors specifying normalized coordinates [ - 1,1 ] to sampled on im .
( e . g . ) ( - 1 , - 1 ) in x , y corresponds to pixel location ( 0,0 ) in im , and
(1,1 ) in x , y corresponds to the bottom right pixel in im .
Args :
im : A tensor of size [ batch _ size , height , width , channels ]
x : A tensor of size [ batch _ size , height , width , 1 ] representing the sampling
x coordinates normalized to range [ - 1,1 ] .
y : A tensor of size [ batch _ size , height , width , 1 ] representing the
sampling y coordinates normalized to range [ - 1,1 ] .
name : The name for this operation is also used to create / find the
parameter variables .
Returns :
A tensor of size [ batch _ size , height , width , channels ]"""
|
with tf . variable_scope ( name ) :
x = tf . reshape ( x , [ - 1 ] )
y = tf . reshape ( y , [ - 1 ] )
# constants
num_batch = tf . shape ( im ) [ 0 ]
_ , height , width , channels = im . get_shape ( ) . as_list ( )
x = tf . to_float ( x )
y = tf . to_float ( y )
height_f = tf . cast ( height , 'float32' )
width_f = tf . cast ( width , 'float32' )
zero = tf . constant ( 0 , dtype = tf . int32 )
max_y = tf . cast ( tf . shape ( im ) [ 1 ] - 1 , 'int32' )
max_x = tf . cast ( tf . shape ( im ) [ 2 ] - 1 , 'int32' )
# scale indices from [ - 1 , 1 ] to [ 0 , width - 1 / height - 1]
x = ( x + 1.0 ) * ( width_f - 1.0 ) / 2.0
y = ( y + 1.0 ) * ( height_f - 1.0 ) / 2.0
# do sampling
x0 = tf . cast ( tf . floor ( x ) , 'int32' )
x1 = x0 + 1
y0 = tf . cast ( tf . floor ( y ) , 'int32' )
y1 = y0 + 1
x0 = tf . clip_by_value ( x0 , zero , max_x )
x1 = tf . clip_by_value ( x1 , zero , max_x )
y0 = tf . clip_by_value ( y0 , zero , max_y )
y1 = tf . clip_by_value ( y1 , zero , max_y )
dim2 = width
dim1 = width * height
# Create base index
base = tf . range ( num_batch ) * dim1
base = tf . reshape ( base , [ - 1 , 1 ] )
base = tf . tile ( base , [ 1 , height * width ] )
base = tf . reshape ( base , [ - 1 ] )
base_y0 = base + y0 * dim2
base_y1 = base + y1 * dim2
idx_a = base_y0 + x0
idx_b = base_y1 + x0
idx_c = base_y0 + x1
idx_d = base_y1 + x1
# use indices to lookup pixels in the flat image and restore channels dim
im_flat = tf . reshape ( im , tf . stack ( [ - 1 , channels ] ) )
im_flat = tf . to_float ( im_flat )
pixel_a = tf . gather ( im_flat , idx_a )
pixel_b = tf . gather ( im_flat , idx_b )
pixel_c = tf . gather ( im_flat , idx_c )
pixel_d = tf . gather ( im_flat , idx_d )
# and finally calculate interpolated values
x1_f = tf . to_float ( x1 )
y1_f = tf . to_float ( y1 )
wa = tf . expand_dims ( ( ( x1_f - x ) * ( y1_f - y ) ) , 1 )
wb = tf . expand_dims ( ( x1_f - x ) * ( 1.0 - ( y1_f - y ) ) , 1 )
wc = tf . expand_dims ( ( ( 1.0 - ( x1_f - x ) ) * ( y1_f - y ) ) , 1 )
wd = tf . expand_dims ( ( ( 1.0 - ( x1_f - x ) ) * ( 1.0 - ( y1_f - y ) ) ) , 1 )
output = tf . add_n ( [ wa * pixel_a , wb * pixel_b , wc * pixel_c , wd * pixel_d ] )
output = tf . reshape ( output , shape = tf . stack ( [ num_batch , height , width , channels ] ) )
return output
|
def build_tree ( X , y , criterion , max_depth , current_depth = 1 ) :
"""Builds the decision tree ."""
|
# check for max _ depth accomplished
if max_depth >= 0 and current_depth >= max_depth :
return Leaf ( y )
# check for 0 gain
gain , question = find_best_question ( X , y , criterion )
if gain == 0 :
return Leaf ( y )
# split
true_X , false_X , true_y , false_y = split ( X , y , question )
# Build the ` true ` branch of the tree recursively
true_branch = build_tree ( true_X , true_y , criterion , max_depth , current_depth = current_depth + 1 )
# Build the ` false ` branch of the tree recursively
false_branch = build_tree ( false_X , false_y , criterion , max_depth , current_depth = current_depth + 1 )
# returning the root of the tree / subtree
return Node ( question = question , true_branch = true_branch , false_branch = false_branch )
|
def print ( root ) : # type : ( Union [ Nonterminal , Terminal , Rule ] ) - > str
"""Transform the parsed tree to the string . Expects tree like structure .
You can see example output below .
( R ) SplitRules26
| - - ( N ) Iterate
| ` - - ( R ) SplitRules30
| ` - - ( N ) Symb
| ` - - ( R ) SplitRules4
| ` - - ( T ) e
` - - ( N ) Concat
` - - ( R ) SplitRules27
` - - ( N ) Iterate
` - - ( R ) SplitRules30
` - - ( N ) Symb
` - - ( R ) SplitRules5
` - - ( T ) f
: param root : Root node of the parsed tree .
: return : String representing the parsed tree ( ends with newline ) ."""
|
# print the part before the element
def print_before ( previous = 0 , defined = None , is_last = False ) :
defined = defined or { }
ret = ''
if previous != 0 :
for i in range ( previous - 1 ) : # if the column is still active write |
if i in defined :
ret += '| '
# otherwise just print space
else :
ret += ' '
# if is current element last child , don ' t print | - - but ` - - instead
ret += '`--' if is_last else '|--'
return ret
# print the terminal
def terminal_traverse ( term , callback , previous = 0 , defined = None , is_last = False ) :
before = print_before ( previous , defined , is_last )
yield before + '(T)' + str ( term . s ) + '\n'
# print the nonterminal
def nonterminal_traverse ( nonterm , callback , previous = 0 , defined = None , is_last = False ) :
before = print_before ( previous , defined , is_last )
yield before + '(N)' + nonterm . __class__ . __name__ + '\n'
yield callback ( nonterm . to_rule , previous + 1 , defined , True )
# print the rule
def rule_traverse ( rule , callback , previous = 0 , defined = None , is_last = False ) : # print the rule name
before = print_before ( previous , defined , is_last )
yield before + '(R)' + rule . __class__ . __name__ + '\n'
# register new column
defined = defined or set ( )
defined . add ( previous )
# print all childs except the last one
for i in range ( len ( rule . to_symbols ) - 1 ) :
yield callback ( rule . to_symbols [ i ] , previous + 1 , defined , False )
# unregister the column as last child print it automatically
defined . remove ( previous )
yield callback ( rule . to_symbols [ - 1 ] , previous + 1 , defined , True )
res = Traversing . traverse_separated ( root , rule_traverse , nonterminal_traverse , terminal_traverse )
return str . join ( "" , res )
|
def _add_inline_definition ( item , statement ) :
'''Adds an inline definition to statement .'''
|
global _current_statement
backup = _current_statement
type_ , options = _expand_one_key_dictionary ( item )
_current_statement = UnnamedStatement ( type = type_ )
_parse_statement ( options )
statement . add_child ( _current_statement )
_current_statement = backup
|
def center_cell_text ( cell ) :
"""Horizontally center the text within a cell ' s grid
Like this : :
| foo | - - > | foo |
Parameters
cell : dashtable . data2rst . Cell
Returns
cell : dashtable . data2rst . Cell"""
|
lines = cell . text . split ( '\n' )
cell_width = len ( lines [ 0 ] ) - 2
truncated_lines = [ '' ]
for i in range ( 1 , len ( lines ) - 1 ) :
truncated = lines [ i ] [ 2 : len ( lines [ i ] ) - 2 ] . rstrip ( )
truncated_lines . append ( truncated )
truncated_lines . append ( '' )
max_line_length = get_longest_line_length ( '\n' . join ( truncated_lines ) )
remainder = cell_width - max_line_length
left_width = math . floor ( remainder / 2 )
left_space = left_width * ' '
for i in range ( len ( truncated_lines ) ) :
truncated_lines [ i ] = left_space + truncated_lines [ i ]
right_width = cell_width - len ( truncated_lines [ i ] )
truncated_lines [ i ] += right_width * ' '
for i in range ( 1 , len ( lines ) - 1 ) :
lines [ i ] = '' . join ( [ lines [ i ] [ 0 ] , truncated_lines [ i ] , lines [ i ] [ - 1 ] ] )
cell . text = '\n' . join ( lines )
return cell
|
def output ( data , ** kwargs ) : # pylint : disable = unused - argument
'''Print out via pretty print'''
|
if isinstance ( data , Exception ) :
data = six . text_type ( data )
if 'output_indent' in __opts__ and __opts__ [ 'output_indent' ] >= 0 :
return pprint . pformat ( data , indent = __opts__ [ 'output_indent' ] )
return pprint . pformat ( data )
|
def count_de_novos_per_transcript ( ensembl , gene_id , de_novos = [ ] ) :
"""count de novos in transcripts for a gene .
Args :
ensembl : EnsemblRequest object to request data from ensembl
gene _ id : HGNC symbol for gene
de _ novos : list of de novo positions , so we can check they all fit in
the gene transcript
Returns :
dictionary of lengths and de novo counts , indexed by transcript IDs ."""
|
transcripts = get_transcript_ids ( ensembl , gene_id )
# TODO : allow for genes without any coding sequence .
if len ( transcripts ) == 0 :
raise IndexError ( "{0} lacks coding transcripts" . format ( gene_id ) )
# count the de novos observed in all transcripts
counts = { }
for key in transcripts :
try :
gene = construct_gene_object ( ensembl , key )
total = len ( get_de_novos_in_transcript ( gene , de_novos ) )
if total > 0 :
counts [ key ] = { }
counts [ key ] [ "n" ] = total
counts [ key ] [ "len" ] = transcripts [ key ]
except ValueError :
pass
return counts
|
def get_account ( self , account , use_sis_id = False , ** kwargs ) :
"""Retrieve information on an individual account .
: calls : ` GET / api / v1 / accounts / : id < https : / / canvas . instructure . com / doc / api / accounts . html # method . accounts . show > ` _
: param account : The object or ID of the account to retrieve .
: type account : int , str or : class : ` canvasapi . account . Account `
: param use _ sis _ id : Whether or not account _ id is an sis ID .
Defaults to ` False ` .
: type use _ sis _ id : bool
: rtype : : class : ` canvasapi . account . Account `"""
|
if use_sis_id :
account_id = account
uri_str = 'accounts/sis_account_id:{}'
else :
account_id = obj_or_id ( account , "account" , ( Account , ) )
uri_str = 'accounts/{}'
response = self . __requester . request ( 'GET' , uri_str . format ( account_id ) , _kwargs = combine_kwargs ( ** kwargs ) )
return Account ( self . __requester , response . json ( ) )
|
def _get_leftMargin ( self ) :
"""This must return an int or float .
If the glyph has no outlines , this must return ` None ` .
Subclasses may override this method ."""
|
bounds = self . bounds
if bounds is None :
return None
xMin , yMin , xMax , yMax = bounds
return xMin
|
def parse_comparison_operation ( operation : str ) -> Tuple [ Optional [ str ] , str ] :
"""Parse the comparision operator in an operation ."""
|
_operation = operation . strip ( )
if not _operation :
raise QueryParserException ( 'Operation is not valid: {}' . format ( operation ) )
# Check inclusion comparison
if _operation [ : 2 ] in ( '<=' , '=<' ) :
return '<=' , _operation [ 2 : ] . strip ( )
if _operation [ : 2 ] in ( '>=' , '=>' ) :
return '>=' , _operation [ 2 : ] . strip ( )
# Non inclusive
if _operation [ : 1 ] in ( '>' , '<' ) :
return _operation [ : 1 ] , _operation [ 1 : ] . strip ( )
return None , _operation
|
def _ordered_categories ( df , categories ) :
"""Make the columns in df categorical
Parameters :
categories : dict
Of the form { str : list } ,
where the key the column name and the value is
the ordered category list"""
|
for col , cats in categories . items ( ) :
df [ col ] = df [ col ] . astype ( CategoricalDtype ( cats , ordered = True ) )
return df
|
def _filter_parameters ( parameters ) :
"""Filters the ignored parameters out ."""
|
if not parameters :
return None
return OrderedDict ( ( param , value ) for param , value in six . iteritems ( parameters ) if param not in IGNORED_PARAMS )
|
def is_en_passant ( self , move : Move ) -> bool :
"""Checks if the given pseudo - legal move is an en passant capture ."""
|
return ( self . ep_square == move . to_square and bool ( self . pawns & BB_SQUARES [ move . from_square ] ) and abs ( move . to_square - move . from_square ) in [ 7 , 9 ] and not self . occupied & BB_SQUARES [ move . to_square ] )
|
def ref ( self ) :
"""Get the reference number of the dataset .
Args : :
no argument
Returns : :
dataset reference number
C library equivalent : SDidtoref"""
|
sds_ref = _C . SDidtoref ( self . _id )
_checkErr ( 'idtoref' , sds_ref , 'illegal SDS identifier' )
return sds_ref
|
def calculateProbableRootOfGeneTree ( speciesTree , geneTree , processID = lambda x : x ) :
"""Goes through each root possible branch making it the root .
Returns tree that requires the minimum number of duplications ."""
|
# get all rooted trees
# run dup calc on each tree
# return tree with fewest number of dups
if geneTree . traversalID . midEnd <= 3 :
return ( 0 , 0 , geneTree )
checkGeneTreeMatchesSpeciesTree ( speciesTree , geneTree , processID )
l = [ ]
def fn ( tree ) :
if tree . traversalID . mid != geneTree . left . traversalID . mid and tree . traversalID . mid != geneTree . right . traversalID . mid :
newGeneTree = moveRoot ( geneTree , tree . traversalID . mid )
binaryTree_depthFirstNumbers ( newGeneTree )
dupCount , lossCount = calculateDupsAndLossesByReconcilingTrees ( speciesTree , newGeneTree , processID )
l . append ( ( dupCount , lossCount , newGeneTree ) )
if tree . internal :
fn ( tree . left )
fn ( tree . right )
fn ( geneTree )
l . sort ( )
return l [ 0 ] [ 2 ] , l [ 0 ] [ 0 ] , l [ 0 ] [ 1 ]
|
def tune ( runner , kernel_options , device_options , tuning_options ) :
"""Find the best performing kernel configuration in the parameter space
: params runner : A runner from kernel _ tuner . runners
: type runner : kernel _ tuner . runner
: param kernel _ options : A dictionary with all options for the kernel .
: type kernel _ options : dict
: param device _ options : A dictionary with all options for the device
on which the kernel should be tuned .
: type device _ options : dict
: param tuning _ options : A dictionary with all options regarding the tuning
process .
: type tuning _ options : dict
: returns : A list of dictionaries for executed kernel configurations and their
execution times . And a dictionary that contains a information
about the hardware / software environment on which the tuning took place .
: rtype : list ( dict ( ) ) , dict ( )"""
|
results = [ ]
cache = { }
# scale variables in x because PSO works with velocities to visit different configurations
tuning_options [ "scaling" ] = True
# using this instead of get _ bounds because scaling is used
bounds , _ , _ = get_bounds_x0_eps ( tuning_options )
args = ( kernel_options , tuning_options , runner , results , cache )
num_particles = 20
maxiter = 100
best_time_global = 1e20
best_position_global = [ ]
# init particle swarm
swarm = [ ]
for i in range ( 0 , num_particles ) :
swarm . append ( Particle ( bounds , args ) )
for i in range ( maxiter ) :
if tuning_options . verbose :
print ( "start iteration " , i , "best time global" , best_time_global )
# evaluate particle positions
for j in range ( num_particles ) :
swarm [ j ] . evaluate ( _cost_func )
# update global best if needed
if swarm [ j ] . time <= best_time_global :
best_position_global = swarm [ j ] . position
best_time_global = swarm [ j ] . time
# update particle velocities and positions
for j in range ( 0 , num_particles ) :
swarm [ j ] . update_velocity ( best_position_global )
swarm [ j ] . update_position ( bounds )
if tuning_options . verbose :
print ( 'Final result:' )
print ( best_position_global )
print ( best_time_global )
return results , runner . dev . get_environment ( )
|
def LearnToExecute ( # pylint : disable = invalid - name
batch_size , max_length = 1 , max_nesting = 1 , token_by_char = True , mode = Mode . TRAIN_COMBINE , loss_threshold = 0.1 , min_tries = DEFAULT_MIN_CURRICULUM_EVAL_TRIES , task_type = TaskType . ALG_CTRL ) :
"""Factory method for LearnToExecute Dataset module .
Args :
batch _ size : ( int ) . The number of elements in a mini - batch .
max _ length : ( int ) . Maximum character length .
max _ nesting : ( int ) . Maximum level of statement nesting .
token _ by _ char : ( bool ) . Tokenize by character or words ?
mode : ( string ) . Either ' train ' , ' test ' .
loss _ threshold : ( int ) curriculum threshold for error below which increase
the task difficulty .
min _ tries : ( int ) minimum update tries for curriculum difficulty level .
task _ type : ( string ) defines the task by allowable ops ( see TASK _ TYPE _ OPS ) .
Returns :
tf . Data . Dataset for LearnToExecute sample generator with the
LearnToExecuteState monkey patched into the ` state ` attribute .
Raises :
ValueError : in case of bad ` mode ` ."""
|
# defaults mode to " train - combine "
if mode == Mode . TRAIN_COMBINE :
curriculum = CombineCurriculum ( max_length , max_nesting , loss_threshold , min_tries = min_tries )
elif mode == Mode . TRAIN_MIX :
curriculum = MixCurriculum ( max_length , max_nesting , loss_threshold , min_tries = min_tries )
elif mode == Mode . TRAIN_NAIVE :
curriculum = NaiveCurriculum ( max_length , max_nesting , loss_threshold , min_tries = min_tries )
elif mode == Mode . TEST :
curriculum = BaselineCurriculum ( max_length , max_nesting , loss_threshold , min_tries = 0 )
else :
raise ValueError ( "Invalid mode." )
lte = LearnToExecuteState ( batch_size , max_length , max_nesting , curriculum , token_by_char , task_type = task_type )
types_ = ( tf . float32 , tf . float32 , tf . float32 , tf . int64 , tf . int64 )
shapes_ = ( tf . TensorShape ( [ lte . num_steps , batch_size , lte . vocab_size ] ) , tf . TensorShape ( [ lte . num_steps_out , batch_size , lte . vocab_size ] ) , tf . TensorShape ( [ lte . num_steps_out , batch_size , lte . vocab_size ] ) , tf . TensorShape ( [ batch_size , ] ) , tf . TensorShape ( [ batch_size , ] ) )
dataset = tf . data . Dataset . from_generator ( lte . make_batch , types_ , shapes_ )
dataset . state = lte
return dataset
|
def apparent_temp ( temp , rh , wind ) :
"""Compute apparent temperature ( real feel ) , using formula from
http : / / www . bom . gov . au / info / thermal _ stress /"""
|
if temp is None or rh is None or wind is None :
return None
vap_press = ( float ( rh ) / 100.0 ) * 6.105 * math . exp ( 17.27 * temp / ( 237.7 + temp ) )
return temp + ( 0.33 * vap_press ) - ( 0.70 * wind ) - 4.00
|
def find_srv_by_name_and_hostname ( self , host_name , sdescr ) :
"""Get a specific service based on a host _ name and service _ description
: param host _ name : host name linked to needed service
: type host _ name : str
: param sdescr : service name we need
: type sdescr : str
: return : the service found or None
: rtype : alignak . objects . service . Service"""
|
key = ( host_name , sdescr )
return self . name_to_item . get ( key , None )
|
def _update_status ( self , sub_job_num = None ) :
"""Gets the job status .
Return :
str : The current status of the job"""
|
job_id = '%s.%s' % ( self . cluster_id , sub_job_num ) if sub_job_num else str ( self . cluster_id )
format = [ '-format' , '"%d"' , 'JobStatus' ]
cmd = 'condor_q {0} {1} && condor_history {0} {1}' . format ( job_id , ' ' . join ( format ) )
args = [ cmd ]
out , err = self . _execute ( args , shell = True , run_in_job_dir = False )
if err :
log . error ( 'Error while updating status for job %s: %s' , job_id , err )
raise HTCondorError ( err )
if not out :
log . error ( 'Error while updating status for job %s: Job not found.' , job_id )
raise HTCondorError ( 'Job not found.' )
out = out . replace ( '\"' , '' )
log . info ( 'Job %s status: %s' , job_id , out )
if not sub_job_num :
if len ( out ) >= self . num_jobs :
out = out [ : self . num_jobs ]
else :
msg = 'There are {0} sub-jobs, but {1} status(es).' . format ( self . num_jobs , len ( out ) )
log . error ( msg )
raise HTCondorError ( msg )
# initialize status dictionary
status_dict = dict ( )
for val in CONDOR_JOB_STATUSES . values ( ) :
status_dict [ val ] = 0
for status_code_str in out :
status_code = 0
try :
status_code = int ( status_code_str )
except ValueError :
pass
key = CONDOR_JOB_STATUSES [ status_code ]
status_dict [ key ] += 1
return status_dict
|
def config_string ( self ) :
"""See the class documentation ."""
|
# Note : _ write _ to _ conf is determined when the value is calculated . This
# is a hidden function call due to property magic .
val = self . str_value
if not self . _write_to_conf :
return ""
if self . orig_type in _BOOL_TRISTATE :
return "{}{}={}\n" . format ( self . kconfig . config_prefix , self . name , val ) if val != "n" else "# {}{} is not set\n" . format ( self . kconfig . config_prefix , self . name )
if self . orig_type in _INT_HEX :
return "{}{}={}\n" . format ( self . kconfig . config_prefix , self . name , val )
# sym . orig _ type is STRING
return '{}{}="{}"\n' . format ( self . kconfig . config_prefix , self . name , escape ( val ) )
|
def create_hook ( self , name , config , events = github . GithubObject . NotSet , active = github . GithubObject . NotSet ) :
""": calls : ` POST / orgs / : owner / hooks < http : / / developer . github . com / v3 / orgs / hooks > ` _
: param name : string
: param config : dict
: param events : list of string
: param active : bool
: rtype : : class : ` github . Hook . Hook `"""
|
assert isinstance ( name , ( str , unicode ) ) , name
assert isinstance ( config , dict ) , config
assert events is github . GithubObject . NotSet or all ( isinstance ( element , ( str , unicode ) ) for element in events ) , events
assert active is github . GithubObject . NotSet or isinstance ( active , bool ) , active
post_parameters = { "name" : name , "config" : config , }
if events is not github . GithubObject . NotSet :
post_parameters [ "events" ] = events
if active is not github . GithubObject . NotSet :
post_parameters [ "active" ] = active
headers , data = self . _requester . requestJsonAndCheck ( "POST" , self . url + "/hooks" , input = post_parameters )
return github . Hook . Hook ( self . _requester , headers , data , completed = True )
|
def getVariantAnnotations ( self , referenceName , startPosition , endPosition ) :
"""Generator for iterating through variant annotations in this
variant annotation set .
: param referenceName :
: param startPosition :
: param endPosition :
: return : generator of protocol . VariantAnnotation"""
|
variantIter = self . _variantSet . getPysamVariants ( referenceName , startPosition , endPosition )
for record in variantIter :
yield self . convertVariantAnnotation ( record )
|
def _get_usage ( self , account_number , number ) :
"""Get Fido usage .
Get the following data
- talk
- text
- data
Roaming data is not supported yet"""
|
# Prepare data
data = { "ctn" : number , "language" : "en-US" , "accountNumber" : account_number }
# Http request
try :
raw_res = yield from self . _session . post ( USAGE_URL , data = data , headers = self . _headers , timeout = self . _timeout )
except OSError :
raise PyFidoError ( "Can not get usage" )
# Load answer as json
try :
output = yield from raw_res . json ( )
except ( OSError , ValueError ) :
raise PyFidoError ( "Can not get usage as json" )
# Format data
ret_data = { }
for data_name , keys in DATA_MAP . items ( ) :
key , subkey = keys
for data in output . get ( key ) [ 0 ] . get ( 'wirelessUsageSummaryInfoList' ) :
if data . get ( 'usageSummaryType' ) == subkey : # Prepare keys :
used_key = "{}_used" . format ( data_name )
remaining_key = "{}_remaining" . format ( data_name )
limit_key = "{}_limit" . format ( data_name )
# Get values
ret_data [ used_key ] = data . get ( 'used' , 0.0 )
if data . get ( 'remaining' ) >= 0 :
ret_data [ remaining_key ] = data . get ( 'remaining' )
else :
ret_data [ remaining_key ] = None
if data . get ( 'total' ) >= 0 :
ret_data [ limit_key ] = data . get ( 'total' )
else :
ret_data [ limit_key ] = None
return ret_data
|
def sort_timeseries ( self , ascending = True ) :
"""Sorts the data points within the TimeSeries according to their occurrence inline .
: param boolean ascending : Determines if the TimeSeries will be ordered ascending or
descending . If this is set to descending once , the ordered parameter defined in
: py : meth : ` TimeSeries . _ _ init _ _ ` will be set to False FOREVER .
: return : Returns : py : obj : ` self ` for convenience .
: rtype : TimeSeries"""
|
# the time series is sorted by default
if ascending and self . _sorted :
return
sortorder = 1
if not ascending :
sortorder = - 1
self . _predefinedSorted = False
self . _timeseriesData . sort ( key = lambda i : sortorder * i [ 0 ] )
self . _sorted = ascending
return self
|
def params ( self ) :
"""Returns a list where each element is a nicely formatted
parameter of this function . This includes argument lists ,
keyword arguments and default values ."""
|
def fmt_param ( el ) :
if isinstance ( el , str ) or isinstance ( el , unicode ) :
return el
else :
return '(%s)' % ( ', ' . join ( map ( fmt_param , el ) ) )
try :
getspec = getattr ( inspect , 'getfullargspec' , inspect . getargspec )
s = getspec ( self . func )
except TypeError : # I guess this is for C builtin functions ?
return [ '...' ]
params = [ ]
for i , param in enumerate ( s . args ) :
if param . lower ( ) == 'self' :
continue
if s . defaults is not None and len ( s . args ) - i <= len ( s . defaults ) :
defind = len ( s . defaults ) - ( len ( s . args ) - i )
default_value = s . defaults [ defind ]
value = repr ( default_value ) . strip ( )
if value [ 0 ] == '<' and value [ - 1 ] == '>' :
if type ( default_value ) == types . TypeType :
value = default_value . __name__
elif type ( default_value ) == types . ObjectType :
value = '%s()' % ( default_value . __class__ . __name__ )
params . append ( '%s=%s' % ( param , value ) )
else :
params . append ( fmt_param ( param ) )
if s . varargs is not None :
params . append ( '*%s' % s . varargs )
# TODO : This needs to be adjusted in Python 3 . There ' s more stuff
# returned from getfullargspec than what we ' re looking at here .
keywords = getattr ( s , 'varkw' , getattr ( s , 'keywords' , None ) )
if keywords is not None :
params . append ( '**%s' % keywords )
return params
|
def generate_PVdelV_nt_pos_vecs ( self , generative_model , genomic_data ) :
"""Process P ( delV | V ) into Pi arrays .
Set the attributes PVdelV _ nt _ pos _ vec and PVdelV _ 2nd _ nt _ pos _ per _ aa _ vec .
Parameters
generative _ model : GenerativeModelVJ
VJ generative model class containing the model parameters .
genomic _ data : GenomicDataVJ
VJ genomic data class containing the V and J germline
sequences and info ."""
|
cutV_genomic_CDR3_segs = genomic_data . cutV_genomic_CDR3_segs
nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 }
num_del_pos = generative_model . PdelV_given_V . shape [ 0 ]
num_V_genes = generative_model . PdelV_given_V . shape [ 1 ]
PVdelV_nt_pos_vec = [ [ ] ] * num_V_genes
PVdelV_2nd_nt_pos_per_aa_vec = [ [ ] ] * num_V_genes
for V_in in range ( num_V_genes ) :
current_PVdelV_nt_pos_vec = np . zeros ( ( 4 , len ( cutV_genomic_CDR3_segs [ V_in ] ) ) )
current_PVdelV_2nd_nt_pos_per_aa_vec = { }
for aa in self . codons_dict . keys ( ) :
current_PVdelV_2nd_nt_pos_per_aa_vec [ aa ] = np . zeros ( ( 4 , len ( cutV_genomic_CDR3_segs [ V_in ] ) ) )
for pos , nt in enumerate ( cutV_genomic_CDR3_segs [ V_in ] ) :
if len ( cutV_genomic_CDR3_segs [ V_in ] ) - pos > num_del_pos :
continue
if pos % 3 == 0 : # Start of a codon
current_PVdelV_nt_pos_vec [ nt2num [ nt ] , pos ] = generative_model . PdelV_given_V [ len ( cutV_genomic_CDR3_segs [ V_in ] ) - pos - 1 , V_in ]
elif pos % 3 == 1 : # Mid codon position
for ins_nt in 'ACGT' : # We need to find what possible codons are allowed for any aa ( or motif )
for aa in self . codons_dict . keys ( ) :
if cutV_genomic_CDR3_segs [ V_in ] [ pos - 1 : pos + 1 ] + ins_nt in self . codons_dict [ aa ] :
current_PVdelV_2nd_nt_pos_per_aa_vec [ aa ] [ nt2num [ ins_nt ] , pos ] = generative_model . PdelV_given_V [ len ( cutV_genomic_CDR3_segs [ V_in ] ) - pos - 1 , V_in ]
elif pos % 3 == 2 : # End of codon
current_PVdelV_nt_pos_vec [ 0 , pos ] = generative_model . PdelV_given_V [ len ( cutV_genomic_CDR3_segs [ V_in ] ) - pos - 1 , V_in ]
PVdelV_nt_pos_vec [ V_in ] = current_PVdelV_nt_pos_vec
PVdelV_2nd_nt_pos_per_aa_vec [ V_in ] = current_PVdelV_2nd_nt_pos_per_aa_vec
self . PVdelV_nt_pos_vec = PVdelV_nt_pos_vec
self . PVdelV_2nd_nt_pos_per_aa_vec = PVdelV_2nd_nt_pos_per_aa_vec
|
def add_material ( self , material ) :
"""Add a material to the mesh , IF it ' s not already present ."""
|
if self . has_material ( material ) :
return
self . materials . append ( material )
|
def acquaint_insides ( swap_gate : ops . Gate , acquaintance_gate : ops . Operation , qubits : Sequence [ ops . Qid ] , before : bool , layers : Layers , mapping : Dict [ ops . Qid , int ] ) -> None :
"""Acquaints each of the qubits with another set specified by an
acquaintance gate .
Args :
qubits : The list of qubits of which half are individually acquainted
with another list of qubits .
layers : The layers to put gates into .
acquaintance _ gate : The acquaintance gate that acquaints the end qubit
with another list of qubits .
before : Whether the acquainting is done before the shift .
swap _ gate : The gate used to swap logical indices .
mapping : The mapping from qubits to logical indices . Used to keep track
of the effect of inside - acquainting swaps ."""
|
max_reach = _get_max_reach ( len ( qubits ) , round_up = before )
reaches = itertools . chain ( range ( 1 , max_reach + 1 ) , range ( max_reach , - 1 , - 1 ) )
offsets = ( 0 , 1 ) * max_reach
swap_gate = SwapPermutationGate ( swap_gate )
ops = [ ]
for offset , reach in zip ( offsets , reaches ) :
if offset == before :
ops . append ( acquaintance_gate )
for dr in range ( offset , reach , 2 ) :
ops . append ( swap_gate ( * qubits [ dr : dr + 2 ] ) )
intrastitial_layer = getattr ( layers , 'pre' if before else 'post' )
intrastitial_layer += ops
# add interstitial gate
interstitial_layer = getattr ( layers , ( 'prior' if before else 'posterior' ) + '_interstitial' )
interstitial_layer . append ( acquaintance_gate )
# update mapping
reached_qubits = qubits [ : max_reach + 1 ]
positions = list ( mapping [ q ] for q in reached_qubits )
mapping . update ( zip ( reached_qubits , reversed ( positions ) ) )
|
def get_att_mats ( translate_model ) :
"""Get ' s the tensors representing the attentions from a build model .
The attentions are stored in a dict on the Transformer object while building
the graph .
Args :
translate _ model : Transformer object to fetch the attention weights from .
Returns :
Tuple of attention matrices ; (
enc _ atts : Encoder self attention weights .
A list of ` num _ layers ` numpy arrays of size
( batch _ size , num _ heads , inp _ len , inp _ len )
dec _ atts : Decoder self attetnion weights .
A list of ` num _ layers ` numpy arrays of size
( batch _ size , num _ heads , out _ len , out _ len )
encdec _ atts : Encoder - Decoder attention weights .
A list of ` num _ layers ` numpy arrays of size
( batch _ size , num _ heads , out _ len , inp _ len )"""
|
enc_atts = [ ]
dec_atts = [ ]
encdec_atts = [ ]
prefix = "transformer/body/"
postfix_self_attention = "/multihead_attention/dot_product_attention"
if translate_model . hparams . self_attention_type == "dot_product_relative" :
postfix_self_attention = ( "/multihead_attention/" "dot_product_attention_relative" )
postfix_encdec = "/multihead_attention/dot_product_attention"
for i in range ( translate_model . hparams . num_hidden_layers ) :
enc_att = translate_model . attention_weights [ "%sencoder/layer_%i/self_attention%s" % ( prefix , i , postfix_self_attention ) ]
dec_att = translate_model . attention_weights [ "%sdecoder/layer_%i/self_attention%s" % ( prefix , i , postfix_self_attention ) ]
encdec_att = translate_model . attention_weights [ "%sdecoder/layer_%i/encdec_attention%s" % ( prefix , i , postfix_encdec ) ]
enc_atts . append ( enc_att )
dec_atts . append ( dec_att )
encdec_atts . append ( encdec_att )
return enc_atts , dec_atts , encdec_atts
|
def setConf ( self , conf , type = 'simu' ) :
"""set information for different type dict ,
: param conf : configuration information , str or dict
: param type : simu , ctrl , misc"""
|
if conf is None :
return
else :
if isinstance ( conf , str ) :
conf = MagBlock . str2dict ( conf )
self . setConfDict [ type ] ( conf )
|
def updateQueue ( destinationRoot , queueDict , debug = False ) :
"""With a dictionary that represents a queue entry , update the queue entry with
the values"""
|
attrDict = bagatom . AttrDict ( queueDict )
url = urlparse . urljoin ( destinationRoot , "APP/queue/" + attrDict . ark + "/" )
queueXML = bagatom . queueEntryToXML ( attrDict )
urlID = os . path . join ( destinationRoot , attrDict . ark )
uploadXML = bagatom . wrapAtom ( queueXML , id = urlID , title = attrDict . ark )
uploadXMLText = '<?xml version="1.0"?>\n' + etree . tostring ( uploadXML , pretty_print = True )
if debug :
print "Sending XML to %s" % url
print uploadXMLText
try :
response , content = doWebRequest ( url , "PUT" , data = uploadXMLText )
except : # Sleep a few minutes then give it a second shot before dying
time . sleep ( 300 )
response , content = doWebRequest ( url , "PUT" , data = uploadXMLText )
if response . getcode ( ) != 200 :
raise Exception ( "Error updating queue %s to url %s. Response code is %s\n%s" % ( attrDict . ark , url , response . getcode ( ) , content ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.