signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _parse_proc_pid_cgroup ( content ) :
"""Parse a / proc / * / cgroup file into tuples of ( subsystem , cgroup ) .
@ param content : An iterable over the lines of the file .
@ return : a generator of tuples"""
|
for ownCgroup in content : # each line is " id : subsystem , subsystem : path "
ownCgroup = ownCgroup . strip ( ) . split ( ':' )
try :
path = ownCgroup [ 2 ] [ 1 : ]
# remove leading /
except IndexError :
raise IndexError ( "index out of range for " + str ( ownCgroup ) )
for subsystem in ownCgroup [ 1 ] . split ( ',' ) :
yield ( subsystem , path )
|
def joint ( self , table , fields , join_table , join_fields , condition_field , condition_join_field , join_method = 'left_join' ) :
""". . : py : method : :
Usage : :
> > > joint ( ' user ' , ' name , id _ number ' , ' medical _ card ' , ' number ' , ' id ' , ' user _ id ' , ' inner _ join ' )
select u . name , u . id _ number , v . number from user as u inner join medical _ card as v on u . id = v . user _ id ;"""
|
import string
fields = map ( string . strip , fields . split ( ',' ) )
select = ', ' . join ( [ 'u.{}' . format ( field ) for field in fields ] )
join_fields = map ( string . strip , join_fields . split ( ',' ) )
join_select = ', ' . join ( [ 'v.{}' . format ( field ) for field in join_fields ] )
sql = "select {select}, {join_select} from {table} as u {join_method}" " {join_table} as v on u.{condition_field}=" "v.{condition_join_field};" . format ( select = select , join_select = join_select , table = table , join_method = join_method , join_table = join_table , condition_field = condition_field , condition_join_field = condition_join_field )
return super ( PGWrapper , self ) . execute ( sql , result = True ) . results
|
def _rlimit_min ( one_val , nother_val ) :
"""Returns the more stringent rlimit value . - 1 means no limit ."""
|
if one_val < 0 or nother_val < 0 :
return max ( one_val , nother_val )
else :
return min ( one_val , nother_val )
|
def Glob2Regex ( glob_pattern ) :
"""Converts a glob pattern to a regular expression .
This function supports basic glob patterns that consist of :
* matches everything
? matches any single character
[ seq ] matches any character in sequence
[ ! seq ] matches any character not in sequence
Args :
glob _ pattern ( str ) : glob pattern .
Returns :
str : regular expression pattern .
Raises :
ValueError : if the glob pattern cannot be converted ."""
|
if not glob_pattern :
raise ValueError ( 'Missing glob pattern.' )
regex_pattern = [ ]
glob_pattern_index = 0
glob_pattern_length = len ( glob_pattern )
while glob_pattern_index < glob_pattern_length :
character = glob_pattern [ glob_pattern_index ]
glob_pattern_index += 1
if character == '*' :
regex_pattern . append ( '.*' )
elif character == '?' :
regex_pattern . append ( '.' )
elif character != '[' :
regex_character = re . escape ( character )
regex_pattern . append ( regex_character )
else :
glob_group_index = glob_pattern_index
if ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] == '!' ) :
glob_group_index += 1
if ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] == ']' ) :
glob_group_index += 1
while ( glob_group_index < glob_pattern_length and glob_pattern [ glob_group_index ] != ']' ) :
glob_group_index += 1
if glob_group_index >= glob_pattern_length :
regex_pattern . append ( '\\[' )
continue
glob_group = glob_pattern [ glob_pattern_index : glob_group_index ]
glob_pattern_index = glob_group_index + 1
glob_group = glob_group . replace ( '\\' , '\\\\' )
if py2to3 . PY_3_7_AND_LATER :
glob_group = glob_group . replace ( '|' , '\\|' )
regex_pattern . append ( '[' )
if glob_group [ 0 ] == '!' :
regex_pattern . append ( '^' )
glob_group = glob_group [ 1 : ]
elif glob_group [ 0 ] == '^' :
regex_pattern . append ( '\\' )
regex_pattern . append ( glob_group )
regex_pattern . append ( ']' )
return '' . join ( regex_pattern )
|
def links ( self , r_server = None , mask = None ) :
"""Get LINKS information .
Optional arguments :
* r _ server = None - Forward the query to this server .
* mask = None - Match mask servers ."""
|
with self . lock :
if not r_server :
self . send ( 'LINKS' )
elif not mask and r_server :
self . send ( 'LINKS %s' % r_server )
else :
self . send ( 'LINKS %s %s' % ( r_server , mask ) )
links = { }
while self . readable ( ) :
msg = self . _recv ( expected_replies = ( '364' , '365' ) )
segments = msg [ 2 ] . split ( )
if msg [ 0 ] == '364' :
server = segments [ 0 ]
desc = ' ' . join ( segments [ 3 : ] )
links [ server ] = desc
elif msg [ 0 ] == '365' :
break
return links
|
def graph_to_svg ( graph ) :
"""Turn a networkx graph into an SVG string , using graphviz dot .
Parameters
graph : networkx graph
Returns
svg : string , pictoral layout in SVG format"""
|
import tempfile
import subprocess
with tempfile . NamedTemporaryFile ( ) as dot_file :
nx . drawing . nx_agraph . write_dot ( graph , dot_file . name )
svg = subprocess . check_output ( [ 'dot' , dot_file . name , '-Tsvg' ] )
return svg
|
def pts_change_axis ( pts = [ ] , flip = [ False , False ] , offset = [ 0.0 , 0.0 ] ) :
'''Return given point with axes flipped and offset , converting points between cartesian axis layouts .
For example , SVG Y - axis increases top to bottom but DXF is bottom to top .'''
|
assert isinstance ( pts , list ) and len ( pts ) > 0
l_pt_prev = None
for pt in pts :
assert isinstance ( pt , tuple )
l_pt = len ( pt )
assert l_pt > 1
for i in pt :
assert isinstance ( i , float )
if l_pt_prev is not None :
assert l_pt == l_pt_prev
l_pt_prev = l_pt
assert isinstance ( flip , list )
l_fl = len ( flip )
assert l_fl == l_pt
for i in flip :
assert isinstance ( i , bool )
assert isinstance ( offset , list )
l_of = len ( offset )
assert l_of == l_pt
for i in offset :
assert isinstance ( i , float )
return [ pt_change_axis ( pt , flip , offset ) for pt in pts ]
|
def period_max_neighborhood_probability ( self , threshold , radius , sigmas = None ) :
"""Calculates the neighborhood probability of exceeding a threshold at any time over the period loaded .
Args :
threshold ( float ) : splitting threshold for probability calculatations
radius ( int ) : distance from point in number of grid points to include in neighborhood calculation .
sigmas ( array of ints ) : Radii for Gaussian filter used to smooth neighborhood probabilities .
Returns :
list of EnsembleConsensus objects"""
|
if sigmas is None :
sigmas = [ 0 ]
weights = disk ( radius )
neighborhood_prob = np . zeros ( self . data . shape [ 2 : ] , dtype = np . float32 )
thresh_data = np . zeros ( self . data . shape [ 2 : ] , dtype = np . uint8 )
for m in range ( self . data . shape [ 0 ] ) :
thresh_data [ self . data [ m ] . max ( axis = 0 ) >= threshold ] = 1
maximized = fftconvolve ( thresh_data , weights , mode = "same" )
maximized [ maximized > 1 ] = 1
neighborhood_prob += fftconvolve ( maximized , weights , mode = "same" )
neighborhood_prob [ neighborhood_prob < 1 ] = 0
neighborhood_prob /= ( self . data . shape [ 0 ] * float ( weights . sum ( ) ) )
consensus_probs = [ ]
for sigma in sigmas :
if sigma > 0 :
filtered_prob = gaussian_filter ( neighborhood_prob , sigma = sigma )
else :
filtered_prob = neighborhood_prob
ec = EnsembleConsensus ( filtered_prob , "neighbor_prob_{0:02d}-hour_r_{1:d}_s_{2:d}" . format ( self . data . shape [ 1 ] , radius , sigma ) , self . ensemble_name , self . run_date , self . variable + "_{0:0.2f}" . format ( float ( threshold ) ) , self . start_date , self . end_date , "" )
consensus_probs . append ( ec )
return consensus_probs
|
def horn_sat ( formula ) :
"""Solving a HORN Sat formula
: param formula : list of couple ( posvar , negvars ) .
negvars is a list of the negative variables and can be empty .
posvar is the positive variable and can be None .
Variables can be any hashable objects , as integers or strings
for example .
: returns : None if formula is not satisfiable , else a minimal set of variables
that have to be set to true in order to satisfy the formula .
: complexity : linear"""
|
# - - - construct data structures
CLAUSES = range ( len ( formula ) )
score = [ 0 for c in CLAUSES ]
# number of negative vars that are not yet in solution
posvar_in_clause = [ None for c in CLAUSES ]
# the unique positive variable of a clause ( if any )
clauses_with_negvar = defaultdict ( set )
# all clauses where a variable appears negatively
for c in CLAUSES :
posvar , negvars = formula [ c ]
score [ c ] = len ( set ( negvars ) )
# do not count twice repeated negative variables
posvar_in_clause [ c ] = posvar
for v in negvars :
clauses_with_negvar [ v ] . add ( c )
pool = [ set ( ) for s in range ( max ( score ) + 1 ) ]
# create the pool
for c in CLAUSES :
pool [ score [ c ] ] . add ( c )
# pool [ s ] = set of clauses with score s
# - - - solve Horn SAT formula
solution = set ( )
# contains all variables set to True
while pool [ 0 ] :
curr = pool [ 0 ] . pop ( )
# arbitrary zero score clause
v = posvar_in_clause [ curr ]
if v == None : # formula is not satisfiable
return None
if v in solution or curr in clauses_with_negvar [ v ] :
continue
# clause is already satisfied
solution . add ( v )
for c in clauses_with_negvar [ v ] : # update score
pool [ score [ c ] ] . remove ( c )
score [ c ] -= 1
pool [ score [ c ] ] . add ( c )
# change c to lower score in pool
return solution
|
def open_output ( self , fname ) :
"""Open the output file FNAME . Returns tuple ( FD , NEED _ CLOSE ) ,
where FD is a file ( or file - like ) object , and NEED _ CLOSE is a
boolean flag that tells whether FD . close ( ) should be called
after finishing writing to the file .
FNAME can be one of the three things :
(1 ) None , in which case ( sys . stdout , False ) is returned .
(2 ) A file - like object , in which case ( fname , False ) is returned .
(3 ) A string , in which case this procedure opens the file and returns
( fd , True ) ."""
|
if not fname :
return ( sys . stdout , False )
elif isinstance ( fname , str ) :
return ( file ( fname , "wb" ) , True )
else :
if not hasattr ( fname , "write" ) :
raise Exception ( "Expecting either a filename or a file-like object, but got %s" % fname )
return ( fname , False )
|
def read_registry ( self ) :
"""Extract resolver configuration from the Windows registry ."""
|
lm = _winreg . ConnectRegistry ( None , _winreg . HKEY_LOCAL_MACHINE )
want_scan = False
try :
try : # XP , 2000
tcp_params = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\Tcpip\Parameters' )
want_scan = True
except EnvironmentError : # ME
tcp_params = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\VxD\MSTCP' )
try :
self . _config_win32_fromkey ( tcp_params )
finally :
tcp_params . Close ( )
if want_scan :
interfaces = _winreg . OpenKey ( lm , r'SYSTEM\CurrentControlSet' r'\Services\Tcpip\Parameters' r'\Interfaces' )
try :
i = 0
while True :
try :
guid = _winreg . EnumKey ( interfaces , i )
i += 1
key = _winreg . OpenKey ( interfaces , guid )
if not self . _win32_is_nic_enabled ( lm , guid , key ) :
continue
try :
self . _config_win32_fromkey ( key )
finally :
key . Close ( )
except EnvironmentError :
break
finally :
interfaces . Close ( )
finally :
lm . Close ( )
|
def get_plugins ( self , plugin_type = None ) :
"""Retrieve a list of plugins in the PluginManager .
All plugins if no arguments are provides , or of the specified type .
: param plugin _ type : list : Types of plugins to retrieve from the plugin manager .
: return : Plugins being managed by the Manager ( optionally of the desired plugin _ type ) .
: rtype : list"""
|
if plugin_type is None :
return self . plugins . values ( )
plugin_list = [ ]
for name , plugin in self . plugins . items ( ) :
if isinstance ( plugin , plugin_type if inspect . isclass ( plugin_type ) else type ( plugin_type ) ) :
plugin_list . append ( plugin )
return plugin_list
|
def create ( cls , name , billing_group = None , description = None , tags = None , settings = None , api = None ) :
"""Create a project .
: param name : Project name .
: param billing _ group : Project billing group .
: param description : Project description .
: param tags : Project tags .
: param settings : Project settings .
: param api : Api instance .
: return :"""
|
api = api if api else cls . _API
if name is None :
raise SbgError ( 'Project name is required!' )
data = { 'name' : name , }
if billing_group :
data [ 'billing_group' ] = Transform . to_billing_group ( billing_group )
if description :
data [ 'description' ] = description
if tags :
data [ 'tags' ] = tags
if settings :
data [ 'settings' ] = settings
extra = { 'resource' : cls . __name__ , 'query' : data }
logger . info ( 'Creating project' , extra = extra )
project_data = api . post ( url = cls . _URL [ 'create' ] , data = data ) . json ( )
return Project ( api = api , ** project_data )
|
def is_same_host ( self , url ) :
"""Check if the given ` ` url ` ` is a member of the same host as this
conncetion pool ."""
|
# TODO : Add optional support for socket . gethostbyname checking .
return ( url . startswith ( '/' ) or get_host ( url ) == ( self . scheme , self . host , self . port ) )
|
async def _send_command ( self , command ) :
"""This is a private utility method .
The method sends a non - sysex command to Firmata .
: param command : command data
: returns : length of data sent"""
|
send_message = ""
for i in command :
send_message += chr ( i )
result = None
for data in send_message :
try :
result = await self . write ( data )
except ( ) :
if self . log_output :
logging . exception ( 'cannot send command' )
else :
print ( 'cannot send command' )
return result
|
def findUniques ( mapF ) :
"""Finds the unique markers in a MAP .
: param mapF : representation of a ` ` map ` ` file .
: type mapF : list
: returns : a : py : class : ` dict ` containing unique markers ( according to their
genomic localisation ) ."""
|
uSNPs = { }
dSNPs = defaultdict ( list )
for i , row in enumerate ( mapF ) :
chromosome = row [ 0 ]
position = row [ 3 ]
snpID = ( chromosome , position )
if snpID not in uSNPs : # This is the first time we see this sample
uSNPs [ snpID ] = i
else : # We have seen this sample at least once . . .
if snpID not in dSNPs : # This is the second time we see this sample . . .
dSNPs [ snpID ] . extend ( [ uSNPs [ snpID ] , i ] )
else : # We have seen this sample multiple times
dSNPs [ snpID ] . append ( i )
# Removing the duplicates from the unique samples
for snpID in dSNPs . iterkeys ( ) :
if snpID in uSNPs :
del uSNPs [ snpID ]
return uSNPs
|
def listen ( self ) :
"""Listen / Connect to message service loop to start receiving messages .
Do not include in constructor , in this way it can be included in tasks"""
|
self . listening = True
try :
self . service_backend . listen ( )
except AuthenticationError :
self . listening = False
raise
else :
self . listening = False
|
def create ( self , name , search , ** kwargs ) :
"""Creates a saved search .
: param name : The name for the saved search .
: type name : ` ` string ` `
: param search : The search query .
: type search : ` ` string ` `
: param kwargs : Additional arguments ( optional ) . For a list of available
parameters , see ` Saved search parameters
< http : / / dev . splunk . com / view / SP - CAAAEE5 # savedsearchparams > ` _
on Splunk Developer Portal .
: type kwargs : ` ` dict ` `
: return : The : class : ` SavedSearches ` collection ."""
|
return Collection . create ( self , name , search = search , ** kwargs )
|
def _extract_error ( self , resp ) :
"""Extract the actual error message from a solr response ."""
|
reason = resp . headers . get ( 'reason' , None )
full_response = None
if reason is None :
try : # if response is in json format
reason = resp . json ( ) [ 'error' ] [ 'msg' ]
except KeyError : # if json response has unexpected structure
full_response = resp . content
except ValueError : # otherwise we assume it ' s html
reason , full_html = self . _scrape_response ( resp . headers , resp . content )
full_response = unescape_html ( full_html )
msg = "[Reason: %s]" % reason
if reason is None :
msg += "\n%s" % full_response
return msg
|
def assert_results_contain ( check_results , expected_status , expected_msgcode = None ) :
"""This helper function is useful when we want to make sure that
a certain log message is emited by a check but it can be in any
order among other log messages ."""
|
found = False
for status , message in check_results :
if status == expected_status and message . code == expected_msgcode :
found = True
break
assert ( found )
|
def apply_numpy_specials ( self , copy = True ) :
"""Convert isis special pixel values to numpy special pixel values .
Isis Numpy
Null nan
Lrs - inf
Lis - inf
His inf
Hrs inf
Parameters
copy : bool [ True ]
Whether to apply the new special values to a copy of the
pixel data and leave the original unaffected
Returns
Numpy Array
A numpy array with special values converted to numpy ' s nan , inf ,
and - inf"""
|
if copy :
data = self . data . astype ( numpy . float64 )
elif self . data . dtype != numpy . float64 :
data = self . data = self . data . astype ( numpy . float64 )
else :
data = self . data
data [ data == self . specials [ 'Null' ] ] = numpy . nan
data [ data < self . specials [ 'Min' ] ] = numpy . NINF
data [ data > self . specials [ 'Max' ] ] = numpy . inf
return data
|
def setenv ( name , value , false_unsets = False , clear_all = False , update_minion = False , permanent = False ) :
'''Set the salt process environment variables .
name
The environment key to set . Must be a string .
value
Either a string or dict . When string , it will be the value
set for the environment key of ' name ' above .
When a dict , each key / value pair represents an environment
variable to set .
false _ unsets
If a key ' s value is False and false _ unsets is True , then the
key will be removed from the salt processes environment dict
entirely . If a key ' s value is False and false _ unsets is not
True , then the key ' s value will be set to an empty string .
Default : False
clear _ all
USE WITH CAUTION ! This option can unset environment variables
needed for salt to function properly .
If clear _ all is True , then any environment variables not
defined in the environ dict will be deleted .
Default : False
update _ minion
If True , apply these environ changes to the main salt - minion
process . If False , the environ changes will only affect the
current salt subprocess .
Default : False
permanent
On Windows minions this will set the environment variable in the
registry so that it is always added as a environment variable when
applications open . If you want to set the variable to HKLM instead of
HKCU just pass in " HKLM " for this parameter . On all other minion types
this will be ignored . Note : This will only take affect on applications
opened after this has been set .
Example :
. . code - block : : yaml
a _ string _ env :
environ . setenv :
- name : foo
- value : bar
- update _ minion : True
a _ dict _ env :
environ . setenv :
- name : does _ not _ matter
- value :
foo : bar
baz : quux'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : '' }
environ = { }
if isinstance ( value , six . string_types ) or value is False :
environ [ name ] = value
elif isinstance ( value , dict ) :
environ = value
else :
ret [ 'result' ] = False
ret [ 'comment' ] = 'Environ value must be string, dict or False'
return ret
if clear_all is True : # Any keys not in ' environ ' dict supplied by user will be unset
to_unset = [ key for key in os . environ if key not in environ ]
for key in to_unset :
if false_unsets is not True : # This key value will change to ' '
ret [ 'changes' ] . update ( { key : '' } )
else : # We ' re going to delete the key
ret [ 'changes' ] . update ( { key : None } )
current_environ = dict ( os . environ )
already_set = [ ]
for key , val in six . iteritems ( environ ) :
if val is False : # We unset this key from the environment if
# false _ unsets is True . Otherwise we want to set
# the value to ' '
def key_exists ( ) :
if salt . utils . platform . is_windows ( ) :
permanent_hive = 'HKCU'
permanent_key = 'Environment'
if permanent == 'HKLM' :
permanent_hive = 'HKLM'
permanent_key = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'
out = __utils__ [ 'reg.read_value' ] ( permanent_hive , permanent_key , _norm_key ( key ) )
return out [ 'success' ] is True
else :
return False
if current_environ . get ( _norm_key ( key ) , None ) is None and not key_exists ( ) : # The key does not exist in environment
if false_unsets is not True : # This key will be added with value ' '
ret [ 'changes' ] . update ( { key : '' } )
else : # The key exists .
if false_unsets is not True : # Check to see if the value will change
if current_environ . get ( _norm_key ( key ) , None ) != '' : # This key value will change to ' '
ret [ 'changes' ] . update ( { key : '' } )
else : # We ' re going to delete the key
ret [ 'changes' ] . update ( { key : None } )
elif current_environ . get ( _norm_key ( key ) , '' ) == val :
already_set . append ( key )
else :
ret [ 'changes' ] . update ( { key : val } )
if __opts__ [ 'test' ] :
if ret [ 'changes' ] :
ret [ 'comment' ] = 'Environ values will be changed'
else :
ret [ 'comment' ] = 'Environ values are already set with the correct values'
return ret
if ret [ 'changes' ] :
environ_ret = __salt__ [ 'environ.setenv' ] ( environ , false_unsets , clear_all , update_minion , permanent )
if not environ_ret :
ret [ 'result' ] = False
ret [ 'comment' ] = 'Failed to set environ variables'
return ret
ret [ 'result' ] = True
ret [ 'changes' ] = environ_ret
ret [ 'comment' ] = 'Environ values were set'
else :
ret [ 'comment' ] = 'Environ values were already set with the correct values'
return ret
|
def close ( self , * args , ** kwds ) :
"""Close database ."""
|
self . cur . close ( )
self . commit ( )
self . DB . close ( )
|
def bitswap_wantlist ( self , peer = None , ** kwargs ) :
"""Returns blocks currently on the bitswap wantlist .
. . code - block : : python
> > > c . bitswap _ wantlist ( )
{ ' Keys ' : [
' QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb ' ,
' QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K ' ,
' QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp '
Parameters
peer : str
Peer to show wantlist for .
Returns
dict : List of wanted blocks"""
|
args = ( peer , )
return self . _client . request ( '/bitswap/wantlist' , args , decoder = 'json' , ** kwargs )
|
def bypass ( * inputs , copy = False ) :
"""Returns the same arguments .
: param inputs :
Inputs values .
: type inputs : T
: param copy :
If True , it returns a deepcopy of input values .
: type copy : bool , optional
: return :
Same input values .
: rtype : ( T , . . . ) , T
Example : :
> > > bypass ( ' a ' , ' b ' , ' c ' )
( ' a ' , ' b ' , ' c ' )
> > > bypass ( ' a ' )"""
|
if len ( inputs ) == 1 :
inputs = inputs [ 0 ]
# Same inputs .
return _copy . deepcopy ( inputs ) if copy else inputs
|
def verify_state ( self ) :
"""Verify if session was not yet opened . If it is , open it and call connections ` on _ open `"""
|
if self . state == CONNECTING :
self . state = OPEN
self . conn . on_open ( self . conn_info )
|
def write ( self , name , ** data ) :
"""Write the metric to elasticsearch
Args :
name ( str ) : The name of the metric to write
data ( dict ) : Additional data to store with the metric"""
|
data [ "name" ] = name
if not ( "timestamp" in data ) :
data [ "timestamp" ] = datetime . utcnow ( )
try :
self . client . index ( index = self . get_index ( ) , doc_type = self . doc_type , id = None , body = data )
except TransportError as exc :
logger . warning ( 'writing metric %r failure %r' , data , exc )
|
def get_form_data ( chart_id , dashboard = None ) :
"""Build ` form _ data ` for chart GET request from dashboard ' s ` default _ filters ` .
When a dashboard has ` default _ filters ` they need to be added as extra
filters in the GET request for charts ."""
|
form_data = { 'slice_id' : chart_id }
if dashboard is None or not dashboard . json_metadata :
return form_data
json_metadata = json . loads ( dashboard . json_metadata )
# do not apply filters if chart is immune to them
if chart_id in json_metadata . get ( 'filter_immune_slices' , [ ] ) :
return form_data
default_filters = json . loads ( json_metadata . get ( 'default_filters' , 'null' ) )
if not default_filters :
return form_data
# are some of the fields in the chart immune to filters ?
filter_immune_slice_fields = json_metadata . get ( 'filter_immune_slice_fields' , { } )
immune_fields = filter_immune_slice_fields . get ( str ( chart_id ) , [ ] )
extra_filters = [ ]
for filters in default_filters . values ( ) :
for col , val in filters . items ( ) :
if col not in immune_fields :
extra_filters . append ( { 'col' : col , 'op' : 'in' , 'val' : val } )
if extra_filters :
form_data [ 'extra_filters' ] = extra_filters
return form_data
|
def p_enumerated_subtype_field ( self , p ) :
'subtype _ field : ID type _ ref NL'
|
p [ 0 ] = AstSubtypeField ( self . path , p . lineno ( 1 ) , p . lexpos ( 1 ) , p [ 1 ] , p [ 2 ] )
|
def contains_regex ( path , regex , lchar = '' ) :
'''. . deprecated : : 0.17.0
Use : func : ` search ` instead .
Return True if the given regular expression matches on any line in the text
of a given file .
If the lchar argument ( leading char ) is specified , it
will strip ` lchar ` from the left side of each line before trying to match
CLI Example :
. . code - block : : bash
salt ' * ' file . contains _ regex / etc / crontab'''
|
path = os . path . expanduser ( path )
if not os . path . exists ( path ) :
return False
try :
with salt . utils . files . fopen ( path , 'r' ) as target :
for line in target :
line = salt . utils . stringutils . to_unicode ( line )
if lchar :
line = line . lstrip ( lchar )
if re . search ( regex , line ) :
return True
return False
except ( IOError , OSError ) :
return False
|
def enforce_cf_variable ( var , mask_and_scale = True ) :
"""Given a Variable constructed from GEOS - Chem output , enforce
CF - compliant metadata and formatting .
Until a bug with lazily - loaded data and masking / scaling is resolved in
xarray , you have the option to manually mask and scale the data here .
Parameters
var : xarray . Variable
A variable holding information decoded from GEOS - Chem output .
mask _ and _ scale : bool
Flag to scale and mask the data given the unit conversions provided
Returns
out : xarray . Variable
The original variable processed to conform to CF standards
. . note : :
This method borrows heavily from the ideas in ` ` xarray . decode _ cf _ variable ` `"""
|
var = as_variable ( var )
data = var . _data
# avoid loading by accessing _ data instead of data
dims = var . dims
attrs = var . attrs . copy ( )
encoding = var . encoding . copy ( )
orig_dtype = data . dtype
# Process masking / scaling coordinates . We only expect a " scale " value
# for the units with this output .
if 'scale' in attrs :
scale = attrs . pop ( 'scale' )
attrs [ 'scale_factor' ] = scale
encoding [ 'scale_factor' ] = scale
# TODO : Once the xr . decode _ cf bug is fixed , we won ' t need to manually
# handle masking / scaling
if mask_and_scale :
data = scale * data
# Process units
# TODO : How do we want to handle parts - per - * units ? These are not part of
# the udunits standard , and the CF conventions suggest using units
# like 1e - 6 for parts - per - million . But we potentially mix mass and
# volume / molar mixing ratios in GEOS - Chem output , so we need a way
# to handle that edge case .
if 'unit' in attrs :
unit = attrs . pop ( 'unit' )
unit = get_cfcompliant_units ( unit )
attrs [ 'units' ] = unit
# TODO : Once the xr . decode _ cf bug is fixed , we won ' t need to manually
# handle masking / scaling
return Variable ( dims , data , attrs , encoding = encoding )
|
def api ( server , command , * args , ** kwargs ) :
'''Call the Spacewalk xmlrpc api .
CLI Example :
. . code - block : : bash
salt - run spacewalk . api spacewalk01 . domain . com systemgroup . create MyGroup Description
salt - run spacewalk . api spacewalk01 . domain . com systemgroup . create arguments = ' [ " MyGroup " , " Description " ] '
State Example :
. . code - block : : yaml
create _ group :
salt . runner :
- name : spacewalk . api
- server : spacewalk01 . domain . com
- command : systemgroup . create
- arguments :
- MyGroup
- Description'''
|
if 'arguments' in kwargs :
arguments = kwargs [ 'arguments' ]
else :
arguments = args
call = '{0} {1}' . format ( command , arguments )
try :
client , key = _get_session ( server )
except Exception as exc :
err_msg = 'Exception raised when connecting to spacewalk server ({0}): {1}' . format ( server , exc )
log . error ( err_msg )
return { call : err_msg }
namespace , method = command . split ( '.' )
endpoint = getattr ( getattr ( client , namespace ) , method )
try :
output = endpoint ( key , * arguments )
except Exception as e :
output = 'API call failed: {0}' . format ( e )
return { call : output }
|
def _send ( self , message ) :
"""Send an email .
Helper method that does the actual sending ."""
|
if not message . recipients ( ) :
return False
try :
self . connection . sendmail ( message . sender , message . recipients ( ) , message . message ( ) . as_string ( ) , )
except Exception as e :
logger . error ( "Error sending a message to server %s:%s: %s" , self . host , self . port , e , )
if not self . fail_silently :
raise
return False
return True
|
def cmd_xor ( k , i , o ) :
"""XOR cipher .
Note : XOR is not a ' secure cipher ' . If you need strong crypto you must use
algorithms like AES . You can use habu . fernet for that .
Example :
$ habu . xor - k mysecretkey - i / bin / ls > xored
$ habu . xor - k mysecretkey - i xored > uxored
$ sha1sum / bin / ls uxored
$ 6fcf930fcee1395a1c95f87dd38413e02deff4bb / bin / ls
$ 6fcf930fcee1395a1c95f87dd38413e02deff4bb uxored"""
|
o . write ( xor ( i . read ( ) , k . encode ( ) ) )
|
def validate_password_reset ( cls , code , new_password ) :
"""Validates an unhashed code against a hashed code .
Once the code has been validated and confirmed
new _ password will replace the old users password"""
|
password_reset_model = PasswordResetModel . where_code ( code )
if password_reset_model is None :
return None
jwt = JWT ( )
if jwt . verify_token ( password_reset_model . token ) :
user = cls . where_id ( jwt . data [ 'data' ] [ 'user_id' ] )
if user is not None :
user . set_password ( new_password )
PasswordResetModel . delete_where_user_id ( user . id )
return user
password_reset_model . delete ( )
# delete expired / invalid token
return None
|
def filter_queryset ( self , request , queryset , view ) :
"""This method overrides the standard filter _ queryset method .
This method will check to see if the view calling this is from
a list type action . This function will also route the filter
by action type if action _ routing is set to True ."""
|
# Check if this is a list type request
if view . lookup_field not in view . kwargs :
if not self . action_routing :
return self . filter_list_queryset ( request , queryset , view )
else :
method_name = "filter_{action}_queryset" . format ( action = view . action )
return getattr ( self , method_name ) ( request , queryset , view )
return queryset
|
def save ( self , * args , ** kwargs ) :
"""Clean text and save formatted version ."""
|
self . text = clean_text ( self . text )
self . text_formatted = format_text ( self . text )
super ( BaseUserContentModel , self ) . save ( * args , ** kwargs )
|
def guest_resize_cpus ( self , userid , cpu_cnt ) :
"""Resize virtual cpus of guests .
: param userid : ( str ) the userid of the guest to be resized
: param cpu _ cnt : ( int ) The number of virtual cpus that the guest should
have defined in user directory after resize . The value should
be an integer between 1 and 64."""
|
action = "resize guest '%s' to have '%i' virtual cpus" % ( userid , cpu_cnt )
LOG . info ( "Begin to %s" % action )
with zvmutils . log_and_reraise_sdkbase_error ( action ) :
self . _vmops . resize_cpus ( userid , cpu_cnt )
LOG . info ( "%s successfully." % action )
|
def get_subscriptions ( self , publication_id = None , owner_id = None , since_when = None , limit_to = 200 , max_calls = None , start_record = 0 , verbose = False ) :
"""Fetches all subscriptions from Membersuite of a particular
` publication _ id ` if set ."""
|
query = "SELECT Objects() FROM Subscription"
# collect all where parameters into a list of
# ( key , operator , value ) tuples
where_params = [ ]
if owner_id :
where_params . append ( ( 'owner' , '=' , "'%s'" % owner_id ) )
if publication_id :
where_params . append ( ( 'publication' , '=' , "'%s'" % publication_id ) )
if since_when :
d = datetime . date . today ( ) - datetime . timedelta ( days = since_when )
where_params . append ( ( 'LastModifiedDate' , ">" , "'%s 00:00:00'" % d ) )
if where_params :
query += " WHERE "
query += " AND " . join ( [ "%s %s %s" % ( p [ 0 ] , p [ 1 ] , p [ 2 ] ) for p in where_params ] )
subscription_list = self . get_long_query ( query , limit_to = limit_to , max_calls = max_calls , start_record = start_record , verbose = verbose )
return subscription_list
|
def insert_many ( self , rows , chunk_size = 1000 ) :
"""Add many rows at a time , which is significantly faster than adding
them one by one . Per default the rows are processed in chunks of
1000 per commit , unless you specify a different ` ` chunk _ size ` ` .
See : py : meth : ` insert ( ) < dataset . Table . insert > ` for details on
the other parameters .
rows = [ dict ( name = ' Dolly ' ) ] * 10000
table . insert _ many ( rows )"""
|
def _process_chunk ( chunk ) :
self . table . insert ( ) . execute ( chunk )
self . _check_dropped ( )
chunk = [ ]
for i , row in enumerate ( rows , start = 1 ) :
chunk . append ( row )
if i % chunk_size == 0 :
_process_chunk ( chunk )
chunk = [ ]
if chunk :
_process_chunk ( chunk )
|
def make_install_requirement ( name , version , extras , markers , constraint = False ) :
"""Generates an : class : ` ~ pip . _ internal . req . req _ install . InstallRequirement ` .
Create an InstallRequirement from the supplied metadata .
: param name : The requirement ' s name .
: type name : str
: param version : The requirement version ( must be pinned ) .
: type version : str .
: param extras : The desired extras .
: type extras : list [ str ]
: param markers : The desired markers , without a preceding semicolon .
: type markers : str
: param constraint : Whether to flag the requirement as a constraint , defaults to False .
: param constraint : bool , optional
: return : A generated InstallRequirement
: rtype : : class : ` ~ pip . _ internal . req . req _ install . InstallRequirement `"""
|
# If no extras are specified , the extras string is blank
from pip_shims . shims import install_req_from_line
extras_string = ""
if extras : # Sort extras for stability
extras_string = "[{}]" . format ( "," . join ( sorted ( extras ) ) )
if not markers :
return install_req_from_line ( str ( "{}{}=={}" . format ( name , extras_string , version ) ) , constraint = constraint )
else :
return install_req_from_line ( str ( "{}{}=={}; {}" . format ( name , extras_string , version , str ( markers ) ) ) , constraint = constraint , )
|
def _safe_sparse_mask ( tensor : torch . Tensor , mask : torch . Tensor ) -> torch . Tensor :
"""In PyTorch 1.0 , Tensor . _ sparse _ mask was changed to Tensor . sparse _ mask .
This wrapper allows AllenNLP to ( temporarily ) work with both 1.0 and 0.4.1."""
|
# pylint : disable = protected - access
try :
return tensor . sparse_mask ( mask )
except AttributeError : # TODO ( joelgrus ) : remove this and / or warn at some point
return tensor . _sparse_mask ( mask )
|
def initialize ( self , stormconf , context ) :
"""Initialization steps :
1 . Prepare sequence of terms based on config : TermCycleSpout / terms ."""
|
self . terms = get_config ( ) [ 'TermCycleSpout' ] [ 'terms' ]
self . term_seq = itertools . cycle ( self . terms )
|
def get_iss_serial_no ( self ) :
"""Get serial number of USB - ISS module"""
|
self . write_data ( [ self . ISS_CMD , self . ISS_SER_NUM ] )
# Return 8 bytes serial number
self . iss_sn = self . read_data ( 8 )
|
def _ProcessHistogram ( self , tag , wall_time , step , histo ) :
"""Processes a proto histogram by adding it to accumulated state ."""
|
histo = self . _ConvertHistogramProtoToTuple ( histo )
histo_ev = HistogramEvent ( wall_time , step , histo )
self . histograms . AddItem ( tag , histo_ev )
self . compressed_histograms . AddItem ( tag , histo_ev , self . _CompressHistogram )
|
def privateKeyToAccount ( self , private_key ) :
'''Returns a convenient object for working with the given private key .
: param private _ key : The raw private key
: type private _ key : hex str , bytes , int or : class : ` eth _ keys . datatypes . PrivateKey `
: return : object with methods for signing and encrypting
: rtype : LocalAccount
. . code - block : : python
> > > acct = Account . privateKeyToAccount (
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
> > > acct . address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E '
> > > acct . privateKey
b " \\ xb2 \\ } \\ xb3 \\ x1f \\ xee \\ xd9 \\ x12 ' ' \\ xbf \\ t9 \\ xdcv \\ x9a \\ x96VK - \\ xe4 \\ xc4rm \\ x03[6 \\ xec \\ xf1 \\ xe5 \\ xb3d "
# These methods are also available : signHash ( ) , signTransaction ( ) , encrypt ( )
# They correspond to the same - named methods in Account . *
# but without the private key argument'''
|
key = self . _parsePrivateKey ( private_key )
return LocalAccount ( key , self )
|
def rate_matrix ( C , dt = 1.0 , method = 'KL' , sparsity = None , t_agg = None , pi = None , tol = 1.0E7 , K0 = None , maxiter = 100000 , on_error = 'raise' ) :
r"""Estimate a reversible rate matrix from a count matrix .
Parameters
C : ( N , N ) ndarray
count matrix at a lag time dt
dt : float , optional , default = 1.0
lag time that was used to estimate C
method : str , one of { ' KL ' , ' CVE ' , ' pseudo ' , ' truncated _ log ' }
Method to use for estimation of the rate matrix .
* ' pseudo ' selects the pseudo - generator . A reversible transition
matrix T is estimated and : math : ` ( T - Id ) / d ` is returned as the rate matrix .
* ' truncated _ log ' selects the truncated logarithm [ 3 ] _ . A
reversible transition matrix T is estimated and : math : ` max ( logm ( T * T ) / ( 2dt ) , 0 ) `
is returned as the rate matrix . logm is the matrix logarithm and
the maximum is taken element - wise .
* ' CVE ' selects the algorithm of Crommelin and Vanden - Eijnden [ 1 ] _ .
It consists of minimizing the following objective function :
. . math : : f ( K ) = \ sum _ { ij } \ left ( \ sum _ { kl } U _ { ik } ^ { - 1 } K _ { kl } U _ { lj } - L _ { ij } \ right ) ^ 2 \ left | \ Lambda _ { i } \ Lambda _ { j } \ right |
where : math : ` \ Lambda _ i ` are the eigenvalues of : math : ` T ` and : math : ` U `
is the matrix of its ( right ) eigenvectors ; : math : ` L _ { ij } = \ delta _ { ij } \ frac { 1 } { \ tau } \ log \ left | \ Lambda _ i \ right | ` .
: math : ` T ` is computed from C using the reversible maximum likelihood
estimator .
* ' KL ' selects the algorihtm of Kalbfleisch and Lawless [ 2 ] _ .
It consists of maximizing the following log - likelihood :
. . math : : f ( K ) = \ log L = \ sum _ { ij } C _ { ij } \ log ( e ^ { K \ Delta t } ) _ { ij }
where : math : ` C _ { ij } ` are the transition counts at a lag - time : math : ` \ Delta t ` .
Here : math : ` e ` is the matrix exponential and the logarithm is taken
element - wise .
sparsity : ( N , N ) ndarray or None , optional , default = None
If sparsity is None , a fully occupied rate matrix will be estimated .
Alternatively , with the methods ' CVE ' and ' KL ' a ndarray of the
same shape as C can be supplied . If sparsity [ i , j ] = 0 and sparsity [ j , i ] = 0
the rate matrix elements : math : ` K _ { ij } ` and : math : ` K _ { ji } ` will be
constrained to zero .
t _ agg : float , optional
the aggregated simulation time ;
by default this is the total number of transition counts times
the lag time ( no sliding window counting ) . This value is used
to compute the lower bound on the transition rate ( that are not zero ) .
If sparsity is None , this value is ignored .
pi : ( N ) ndarray , optional
the stationary vector of the desired rate matrix K .
If no pi is given , the function takes the stationary vector
of the MLE reversible T matrix that is computed from C .
tol : float , optional , default = 1.0E7
Tolerance of the quasi - Newton algorithm that is used to minimize
the objective function . This is passed as the ` factr ` parameter to
` scipy . optimize . fmin _ l _ bfgs _ b ` .
Typical values for factr are : 1e12 for low accuracy ; 1e7
for moderate accuracy ; 10.0 for extremely high accuracy .
maxiter : int , optional , default = 100000
Minimization of the objective function will do at most this number
of steps .
on _ error : string , optional , default = ' raise '
What to do then an error happend . When ' raise ' is given , raise
an exception . When ' warn ' is given , produce a ( Python ) warning .
Returns
K : ( N , N ) ndarray
the optimal rate matrix
Notes
In this implementation the algorithm of Crommelin and Vanden - Eijnden
( CVE ) is initialized with the pseudo - generator estimate . The
algorithm of Kalbfleisch and Lawless ( KL ) is initialized using the
CVE result .
Example
> > > import numpy as np
> > > from msmtools . estimation import rate _ matrix
> > > C = np . array ( [ [ 100,1 ] , [ 50,50 ] ] )
> > > rate _ matrix ( C )
array ( [ [ - 0.01384753 , 0.01384753 ] ,
[ 0.69930032 , - 0.69930032 ] ] )
References
. . [ 1 ] D . Crommelin and E . Vanden - Eijnden . Data - based inference of
generators for markov jump processes using convex optimization .
Multiscale . Model . Sim . , 7(4 ) : 1751-1778 , 2009.
. . [ 2 ] J . D . Kalbfleisch and J . F . Lawless . The analysis of panel
data under a markov assumption . J . Am . Stat . Assoc . ,
80(392 ) : 863-871 , 1985.
. . [ 3 ] E . B . Davies . Embeddable Markov Matrices . Electron . J . Probab .
15:1474 , 2010."""
|
from . dense . ratematrix import estimate_rate_matrix
return estimate_rate_matrix ( C , dt = dt , method = method , sparsity = sparsity , t_agg = t_agg , pi = pi , tol = tol , K0 = K0 , maxiter = maxiter , on_error = on_error )
|
def surface_state ( num_lat = 90 , num_lon = None , water_depth = 10. , T0 = 12. , T2 = - 40. ) :
"""Sets up a state variable dictionary for a surface model
( e . g . : class : ` ~ climlab . model . ebm . EBM ` ) with a uniform slab ocean depth .
The domain is either 1D ( latitude ) or 2D ( latitude , longitude )
depending on whether the input argument num _ lon is supplied .
Returns a single state variable ` Ts ` , the temperature of the surface
mixed layer ( slab ocean ) .
The temperature is initialized to a smooth equator - to - pole shape given by
. . math : :
T ( \ phi ) = T _ 0 + T _ 2 P _ 2 ( \ sin \ phi )
where : math : ` \ phi ` is latitude , and : math : ` P _ 2 ` is the second Legendre
polynomial : class : ` ~ climlab . utils . legendre . P2 ` .
* * Function - call arguments * * \n
: param int num _ lat : number of latitude points [ default : 90]
: param int num _ lat : ( optional ) number of longitude points [ default : None ]
: param float water _ depth : depth of the slab ocean in meters [ default : 10 . ]
: param float T0 : global - mean initial temperature in : math : ` ^ { \ circ } \\ textrm { C } ` [ default : 12 . ]
: param float T2 : 2nd Legendre coefficient for equator - to - pole gradient in
initial temperature , in : math : ` ^ { \ circ } \\ textrm { C } ` [ default : - 40 . ]
: returns : dictionary with temperature
: class : ` ~ climlab . domain . field . Field `
for surface mixed layer ` ` Ts ` `
: rtype : dict
: Example :
> > > from climlab . domain import initial
> > > import numpy as np
> > > T _ dict = initial . surface _ state ( num _ lat = 36)
> > > print np . squeeze ( T _ dict [ ' Ts ' ] )
[ - 27.88584094 - 26.97777479 - 25.18923361 - 22.57456133 - 19.21320344
-15.20729309 - 10.67854785 - 5.76457135 - 0.61467228 4.61467228
9.76457135 14.67854785 19.20729309 23.21320344 26.57456133
29.18923361 30.97777479 31.88584094 31.88584094 30.97777479
29.18923361 26.57456133 23.21320344 19.20729309 14.67854785
9.76457135 4.61467228 - 0.61467228 - 5.76457135 - 10.67854785
-15.20729309 - 19.21320344 - 22.57456133 - 25.18923361 - 26.97777479
-27.88584094]"""
|
if num_lon is None :
sfc = domain . zonal_mean_surface ( num_lat = num_lat , water_depth = water_depth )
else :
sfc = domain . surface_2D ( num_lat = num_lat , num_lon = num_lon , water_depth = water_depth )
if 'lon' in sfc . axes :
lon , lat = np . meshgrid ( sfc . axes [ 'lon' ] . points , sfc . axes [ 'lat' ] . points )
else :
lat = sfc . axes [ 'lat' ] . points
sinphi = np . sin ( np . deg2rad ( lat ) )
initial = T0 + T2 * legendre . P2 ( sinphi )
Ts = Field ( initial , domain = sfc )
# if num _ lon is None :
# Ts = Field ( initial , domain = sfc )
# else :
# Ts = Field ( [ [ initial for k in range ( num _ lon ) ] ] , domain = sfc )
state = AttrDict ( )
state [ 'Ts' ] = Ts
return state
|
def rmdir ( path ) :
"""Recursively deletes a directory . Includes an error handler to retry with
different permissions on Windows . Otherwise , removing directories ( eg .
cloned via git ) can cause rmtree to throw a PermissionError exception"""
|
logger . debug ( "DEBUG** Window rmdir sys.platform: {}" . format ( sys . platform ) )
if sys . platform == 'win32' :
onerror = _windows_rmdir_readonly
else :
onerror = None
return shutil . rmtree ( path , onerror = onerror )
|
def salt_syndic ( ) :
'''Start the salt syndic .'''
|
import salt . utils . process
salt . utils . process . notify_systemd ( )
import salt . cli . daemons
pid = os . getpid ( )
try :
syndic = salt . cli . daemons . Syndic ( )
syndic . start ( )
except KeyboardInterrupt :
os . kill ( pid , 15 )
|
def max_electronegativity ( self ) :
'''returns the maximum pairwise electronegativity difference'''
|
maximum = 0
for e1 , e2 in combinations ( self . elements , 2 ) :
if abs ( Element ( e1 ) . X - Element ( e2 ) . X ) > maximum :
maximum = abs ( Element ( e1 ) . X - Element ( e2 ) . X )
return maximum
|
def _check_and_assign_normalization_members ( self , normalization_ctor , normalization_kwargs ) :
"""Checks that the normalization constructor is callable ."""
|
if isinstance ( normalization_ctor , six . string_types ) :
normalization_ctor = util . parse_string_to_constructor ( normalization_ctor )
if normalization_ctor is not None and not callable ( normalization_ctor ) :
raise ValueError ( "normalization_ctor must be a callable or a string that specifies " "a callable, got {}." . format ( normalization_ctor ) )
self . _normalization_ctor = normalization_ctor
self . _normalization_kwargs = normalization_kwargs
|
def parse_references ( cls , filename ) :
"""Read filename line by line searching for pattern :
- r file . in
or
- - requirement file . in
return set of matched file names without extension .
E . g . [ ' file ' ]"""
|
references = set ( )
for line in open ( filename ) :
matched = cls . RE_REF . match ( line )
if matched :
reference = matched . group ( 'path' )
reference_base = os . path . splitext ( reference ) [ 0 ]
references . add ( reference_base )
return references
|
def update_info ( user_id , newemail , extinfo = None ) :
'''Update the user info by user _ id .
21 : standsfor invalide E - mail .
91 : standsfor unkown reson .'''
|
if extinfo is None :
extinfo = { }
out_dic = { 'success' : False , 'code' : '00' }
if not tools . check_email_valid ( newemail ) :
out_dic [ 'code' ] = '21'
return out_dic
cur_info = MUser . get_by_uid ( user_id )
cur_extinfo = cur_info . extinfo
for key in extinfo :
cur_extinfo [ key ] = extinfo [ key ]
try :
entry = TabMember . update ( user_email = newemail , extinfo = cur_extinfo ) . where ( TabMember . uid == user_id )
entry . execute ( )
out_dic [ 'success' ] = True
except :
out_dic [ 'code' ] = '91'
return out_dic
|
def get_urls ( session , name , data , find_changelogs_fn , ** kwargs ) :
"""Gets URLs to changelogs .
: param session : requests Session instance
: param name : str , package name
: param data : dict , meta data
: param find _ changelogs _ fn : function , find _ changelogs
: return : tuple , ( set ( changelog URLs ) , set ( repo URLs ) )"""
|
# if this package has valid meta data , build up a list of URL candidates we can possibly
# search for changelogs on
if "versions" in data :
candidates = set ( )
for version , item in data [ "versions" ] . items ( ) :
if "homepage" in item and item [ "homepage" ] is not None :
if isinstance ( item [ "homepage" ] , list ) :
candidates . add ( * item [ "homepage" ] )
else :
candidates . add ( item [ "homepage" ] )
if "repository" in item and item [ "repository" ] is not None :
if "url" in item [ "repository" ] :
repo = item [ "repository" ] [ "url" ]
elif "path" in item [ "repository" ] :
repo = item [ "repository" ] [ "path" ]
else :
continue
repo = repo . replace ( "git://" , "https://" ) . replace ( ".git" , "" )
candidates . add ( repo )
return find_changelogs_fn ( session = session , name = name , candidates = candidates )
return set ( ) , set ( )
|
def addldapgrouplink ( self , group_id , cn , group_access , provider ) :
"""Add LDAP group link
: param id : The ID of a group
: param cn : The CN of a LDAP group
: param group _ access : Minimum access level for members of the LDAP group
: param provider : LDAP provider for the LDAP group ( when using several providers )
: return : True if success"""
|
data = { 'id' : group_id , 'cn' : cn , 'group_access' : group_access , 'provider' : provider }
request = requests . post ( '{0}/{1}/ldap_group_links' . format ( self . groups_url , group_id ) , headers = self . headers , data = data , verify = self . verify_ssl )
return request . status_code == 201
|
def parse_task_declaration ( self , declaration_subAST ) :
'''Parses the declaration section of the WDL task AST subtree .
Examples :
String my _ name
String your _ name
Int two _ chains _ i _ mean _ names = 0
: param declaration _ subAST : Some subAST representing a task declaration
like : ' String file _ name '
: return : var _ name , var _ type , var _ value
Example :
Input subAST representing : ' String file _ name '
Output : var _ name = ' file _ name ' , var _ type = ' String ' , var _ value = None'''
|
var_name = self . parse_declaration_name ( declaration_subAST . attr ( "name" ) )
var_type = self . parse_declaration_type ( declaration_subAST . attr ( "type" ) )
var_expressn = self . parse_declaration_expressn ( declaration_subAST . attr ( "expression" ) , es = '' )
return ( var_name , var_type , var_expressn )
|
def make_constrained_cfg_and_lbl_list ( varied_dict , constraint_func = None , slice_dict = None , defaultslice = slice ( 0 , 1 ) ) :
r"""Args :
varied _ dict ( dict ) : parameters to vary with possible variations
constraint _ func ( func ) : function to restirct parameter variations
slice _ dict ( dict ) : dict of slices for each param of valid possible values
defaultslice ( slice ) : default slice used if slice is not specified in slice _ dict
Returns :
tuple : ( cfgdict _ list , cfglbl _ list )
CommandLine :
python - m utool . util _ gridsearch - - test - make _ constrained _ cfg _ and _ lbl _ list
Example :
> > > # DISABLE _ DOCTEST
> > > from utool . util _ gridsearch import * # NOQA
> > > # build test data
> > > varied _ dict = {
. . . ' p ' : [ . 1 , . 3 , 1.0 , 2.0 ] ,
. . . ' dcvs _ clip _ max ' : [ . 1 , . 2 , . 5 ] ,
. . . ' K ' : [ 3 , 5 ] ,
> > > constraint _ func = None
> > > # execute function
> > > ( cfgdict _ list , cfglbl _ list ) = make _ constrained _ cfg _ and _ lbl _ list ( varied _ dict , constraint _ func )
> > > # verify results
> > > result = six . text _ type ( ( cfgdict _ list , cfglbl _ list ) )
> > > print ( result )"""
|
# Restrict via slices
if slice_dict is None :
varied_dict_ = varied_dict
else :
varied_dict_ = { key : val [ slice_dict . get ( key , defaultslice ) ] for key , val in six . iteritems ( varied_dict ) }
# Enumerate all combinations
cfgdict_list_ = util_dict . all_dict_combinations ( varied_dict_ )
if constraint_func is not None : # Remove invalid combinations
cfgdict_list = constrain_cfgdict_list ( cfgdict_list_ , constraint_func )
else :
cfgdict_list = cfgdict_list_
# Get labels and return
cfglbl_list = make_cfglbls ( cfgdict_list , varied_dict )
return cfgdict_list , cfglbl_list
|
def datetimes ( self ) :
"""Return datetimes for this collection as a tuple ."""
|
if self . _datetimes is None :
self . _datetimes = tuple ( self . header . analysis_period . datetimes )
return self . _datetimes
|
def register_json ( ) :
"""Register a encoder / decoder for JSON serialization ."""
|
from anyjson import serialize as json_serialize
from anyjson import deserialize as json_deserialize
registry . register ( 'json' , json_serialize , json_deserialize , content_type = 'application/json' , content_encoding = 'utf-8' )
|
async def zmq_ipc_pipe_end ( ctx , side , endpoint , * , serializer : Optional [ Serializer ] = None , initialize = True ) :
"""Returns a ` ZmqPipeEnd ` backed by an ` ipc ` connection ; the endpoint must contain the scheme part .
If both ends of the connection are created on the same thread / task , to avoid a deadlock ,
it ' s necessary to first create both ends , then initialize ` ' b ' ` then ` ' a ' ` , such as this :
a = await zmq _ ipc _ pipe _ end ( ctx , ' a ' , endpoint , initialize = False )
b = await zmq _ ipc _ pipe _ end ( ctx , ' b ' , endpoint )
await a . initialize ( )
In that case , prefer ` zmq _ ipc _ pipe ` for creating both ends .
Side ` ' a ' ` will bind a ` ROUTER ` socket on the given endpoint ;
side ` ' b ' ` will connect a ` DEALER ` socket to the given endpoint ."""
|
if side == 'a' :
result = ZmqPipeEnd ( ctx , zmq . ROUTER , endpoint , port = None , bind = True , serializer = serializer )
elif side == 'b' :
result = ZmqPipeEnd ( ctx , zmq . DEALER , endpoint , port = None , serializer = serializer )
else :
raise ValueError ( "side must be 'a' or 'b'" )
if initialize :
await result . initialize ( )
return result
|
def rebase ( self , text , char = 'X' ) :
"""Rebases text with stop words removed ."""
|
regexp = re . compile ( r'\b(%s)\b' % '|' . join ( self . collection ) , re . IGNORECASE | re . UNICODE )
def replace ( m ) :
word = m . group ( 1 )
return char * len ( word )
return regexp . sub ( replace , text )
|
def _jzerostr ( ins ) :
"""Jumps if top of the stack contains a NULL pointer
or its len is Zero"""
|
output = [ ]
disposable = False
# True if string must be freed from memory
if ins . quad [ 1 ] [ 0 ] == '_' : # Variable ?
output . append ( 'ld hl, (%s)' % ins . quad [ 1 ] [ 0 ] )
else :
output . append ( 'pop hl' )
output . append ( 'push hl' )
# Saves it for later
disposable = True
output . append ( 'call __STRLEN' )
if disposable :
output . append ( 'ex (sp), hl' )
output . append ( 'call __MEM_FREE' )
output . append ( 'pop hl' )
REQUIRES . add ( 'alloc.asm' )
output . append ( 'ld a, h' )
output . append ( 'or l' )
output . append ( 'jp z, %s' % str ( ins . quad [ 2 ] ) )
REQUIRES . add ( 'strlen.asm' )
return output
|
def dump_string_to_file ( string , filepath ) :
"""Dump @ string as a line to @ filepath ."""
|
create_dirs ( os . path . dirname ( filepath ) )
with open ( filepath , 'a' ) as outfile :
outfile . write ( string )
outfile . write ( '\n' )
|
def xcode ( text , encoding = 'utf8' , mode = 'ignore' ) :
'''Converts unicode encoding to str
> > > xcode ( b ' hello ' )
b ' hello '
> > > xcode ( ' hello ' )
b ' hello ' '''
|
return text . encode ( encoding , mode ) if isinstance ( text , str ) else text
|
def get_profile_configs ( profile = None , use_cache = True ) :
"""Returns upload configs for profile ."""
|
if use_cache and profile in _profile_configs_cache :
return _profile_configs_cache [ profile ]
profile_conf = None
if profile is not None :
try :
profile_conf = dju_settings . DJU_IMG_UPLOAD_PROFILES [ profile ]
except KeyError :
if profile != 'default' :
raise ValueError ( unicode ( ERROR_MESSAGES [ 'unknown_profile' ] ) % { 'profile' : profile } )
conf = copy . deepcopy ( dju_settings . DJU_IMG_UPLOAD_PROFILE_DEFAULT )
if profile_conf :
conf . update ( copy . deepcopy ( profile_conf ) )
for v_i in xrange ( len ( conf [ 'VARIANTS' ] ) ) :
v = conf [ 'VARIANTS' ] [ v_i ]
conf [ 'VARIANTS' ] [ v_i ] = copy . deepcopy ( dju_settings . DJU_IMG_UPLOAD_PROFILE_VARIANT_DEFAULT )
conf [ 'VARIANTS' ] [ v_i ] . update ( v )
if use_cache :
_profile_configs_cache [ profile ] = conf
return conf
|
def pretty_print_config_to_json ( self , services , hostname = None ) :
"""JSON string description of a protorpc . remote . Service in API format .
Args :
services : Either a single protorpc . remote . Service or a list of them
that implements an api / version .
hostname : string , Hostname of the API , to override the value set on the
current service . Defaults to None .
Returns :
string , The API descriptor document as a JSON string ."""
|
descriptor = self . get_config_dict ( services , hostname )
return json . dumps ( descriptor , sort_keys = True , indent = 2 , separators = ( ',' , ': ' ) )
|
def create ( self , ** kwargs ) :
"""Create a notification ."""
|
body = self . client . create ( url = self . base_url , json = kwargs )
return body
|
def get_compound_bodies ( node ) :
"""Returns a list of bodies of a compound statement node .
Args :
node : AST node .
Returns :
A list of bodies of the node . If the given node does not represent
a compound statement , an empty list is returned ."""
|
if isinstance ( node , ( ast . Module , ast . FunctionDef , ast . ClassDef , ast . With ) ) :
return [ node . body ]
elif isinstance ( node , ( ast . If , ast . While , ast . For ) ) :
return [ node . body , node . orelse ]
elif PY2 and isinstance ( node , ast . TryFinally ) :
return [ node . body , node . finalbody ]
elif PY2 and isinstance ( node , ast . TryExcept ) :
return [ node . body , node . orelse ] + [ h . body for h in node . handlers ]
elif PY3 and isinstance ( node , ast . Try ) :
return ( [ node . body , node . orelse , node . finalbody ] + [ h . body for h in node . handlers ] )
end
return [ ]
|
def match ( self , cond , node ) :
"""See Also : : py : meth : ` IMatcher . match < poco . sdk . DefaultMatcher . IMatcher . match > `"""
|
op , args = cond
# 条件匹配
if op == 'and' :
for arg in args :
if not self . match ( arg , node ) :
return False
return True
if op == 'or' :
for arg in args :
if self . match ( arg , node ) :
return True
return False
# 属性匹配
comparator = self . comparators . get ( op )
if comparator :
attribute , value = args
targetValue = node . getAttr ( attribute )
return comparator . compare ( targetValue , value )
raise NoSuchComparatorException ( op , 'poco.sdk.DefaultMatcher' )
|
def p_delays ( self , p ) :
'delays : DELAY LPAREN expression RPAREN'
|
p [ 0 ] = DelayStatement ( p [ 3 ] , lineno = p . lineno ( 1 ) )
p . set_lineno ( 0 , p . lineno ( 1 ) )
|
def save_fileAs ( self ) :
"""Saves current * * Script _ Editor _ tabWidget * * Widget tab Model editor file as user chosen file .
: return : Method success .
: rtype : bool"""
|
editor = self . get_current_editor ( )
if not editor :
return False
file = umbra . ui . common . store_last_browsed_path ( QFileDialog . getSaveFileName ( self , "Save As:" , editor . file ) )
if not file :
return False
candidate_editor = self . get_editor ( file )
if candidate_editor :
if not candidate_editor is editor :
raise foundations . exceptions . UserError ( "{0} | '{1}' file is already opened!" . format ( self . __class__ . __name__ , file ) )
else :
return self . save_file ( file )
LOGGER . info ( "{0} | Saving '{1}' file!" . format ( self . __class__ . __name__ , file ) )
self . __lock_editor ( editor )
self . unregister_node_path ( editor )
if editor . save_fileAs ( file ) :
self . __model . update_authoring_nodes ( editor )
language = self . __languages_model . get_file_language ( file ) or self . __languages_model . get_language ( self . __default_language )
if editor . language . name != language . name :
self . set_language ( editor , language )
return True
|
def _update_parent_attachments ( self ) :
"""Tries to update the parent property ' has _ attachments '"""
|
try :
self . _parent . has_attachments = bool ( len ( self . __attachments ) )
except AttributeError :
pass
|
def _run ( self , url , auth ) :
'''Performs a multiprocess depth - first - search of the catalog references
and yields a URL for each leaf dataset found
: param str url : URL for the current catalog
: param requests . auth . AuthBase auth : requets auth object to use'''
|
if url in self . visited :
logger . debug ( "Skipping %s (already crawled)" % url )
return
self . visited . append ( url )
logger . info ( "Crawling: %s" % url )
url = self . _get_catalog_url ( url )
# Get an etree object
xml_content = request_xml ( url , auth )
for ds in self . _build_catalog ( url , xml_content ) :
yield ds
|
def format_exp_floats ( decimals ) :
"""sometimes the exp . column can be too large"""
|
threshold = 10 ** 5
return ( lambda n : "{:.{prec}e}" . format ( n , prec = decimals ) if n > threshold else "{:4.{prec}f}" . format ( n , prec = decimals ) )
|
def intern ( self , text ) :
"""Interns the given Unicode sequence into the symbol table .
Note :
This operation is only valid on local symbol tables .
Args :
text ( unicode ) : The target to intern .
Returns :
SymbolToken : The mapped symbol token which may already exist in the table ."""
|
if self . table_type . is_shared :
raise TypeError ( 'Cannot intern on shared symbol table' )
if not isinstance ( text , six . text_type ) :
raise TypeError ( 'Cannot intern non-Unicode sequence into symbol table: %r' % text )
token = self . get ( text )
if token is None :
token = self . __add_text ( text )
return token
|
def immediateAssignmentReject ( ) :
"""IMMEDIATE ASSIGNMENT REJECT Section 9.1.20"""
|
a = L2PseudoLength ( l2pLength = 0x13 )
b = TpPd ( pd = 0x6 )
c = MessageType ( mesType = 0x3a )
# 00111010
d = PageModeAndSpareHalfOctets ( )
f = RequestReference ( )
g = WaitIndication ( )
h = RequestReference ( )
i = WaitIndication ( )
j = RequestReference ( )
k = WaitIndication ( )
l = RequestReference ( )
m = WaitIndication ( )
n = IraRestOctets ( )
packet = a / b / c / d / f / g / h / i / j / k / l / m / n
return packet
|
def _loadSCPD ( self , serviceType , timeout ) :
"""Internal method to load the action definitions .
: param str serviceType : the service type to load
: param int timeout : the timeout for downloading"""
|
if serviceType not in self . __deviceServiceDefinitions . keys ( ) :
raise ValueError ( "Can not load SCPD, no service type defined for: " + serviceType )
if "scpdURL" not in self . __deviceServiceDefinitions [ serviceType ] . keys ( ) :
raise ValueError ( "No SCPD URL defined for: " + serviceType )
# remove actions for given service type
self . __deviceSCPD . pop ( serviceType , None )
uri = self . __deviceServiceDefinitions [ serviceType ] [ "scpdURL" ]
# setup proxies
proxies = { }
if self . __httpsProxy :
proxies = { "https" : self . __httpsProxy }
if self . __httpProxy :
proxies = { "http" : self . __httpProxy }
# setup authentication
auth = None
if self . __password :
auth = HTTPDigestAuth ( self . __username , self . __password )
# build the URL
location = self . __protocol + "://" + self . __hostname + ":" + str ( self . port ) + uri
# some devices response differently without a User - Agent
headers = { "User-Agent" : "Mozilla/5.0; SimpleTR64-2" }
# http request
request = requests . get ( location , auth = auth , proxies = proxies , headers = headers , timeout = timeout , verify = self . __verify )
if request . status_code != 200 :
errorStr = DeviceTR64 . _extractErrorString ( request )
raise ValueError ( 'Could not load SCPD for "' + serviceType + '" from ' + location + ': ' + str ( request . status_code ) + ' - ' + request . reason + " -- " + errorStr )
data = request . text . encode ( 'utf-8' )
if len ( data ) == 0 :
return
# parse XML return
try :
root = ET . fromstring ( data )
except Exception as e :
raise ValueError ( "Can not parse SCPD content for '" + serviceType + "' from '" + location + "': " + str ( e ) )
actions = { }
variableTypes = { }
variableParameterDict = { }
# iterate through the full XML tree
for element in root . getchildren ( ) :
tagName = element . tag . lower ( )
# go deeper for action lists
if tagName . endswith ( "actionlist" ) : # remember the actions and where a specific variable gets referenced
self . _parseSCPDActions ( element , actions , variableParameterDict )
# go deeper for the variable declarations
elif tagName . endswith ( "servicestatetable" ) :
self . _parseSCPDVariableTypes ( element , variableTypes )
# everything have been parsed now merge the variable declarations into the action parameters
for name in variableParameterDict . keys ( ) :
if name not in variableTypes . keys ( ) :
raise ValueError ( "Variable reference in action can not be resolved: " + name )
# iterate through all arguments where this variable have been referenced
for argument in variableParameterDict [ name ] : # fill in the type of this variable / argument
argument [ "dataType" ] = variableTypes [ name ] [ "dataType" ]
# if the variable declaration includes a default value add it to the action parameter as well
if "defaultValue" in variableTypes [ name ] . keys ( ) :
argument [ "defaultValue" ] = variableTypes [ name ] [ "defaultValue" ]
self . __deviceSCPD [ serviceType ] = actions
|
def list_nodes ( kwargs = None , call = None ) :
"""This function returns a list of nodes available on this cloud provider , using the following fields :
id ( str )
image ( str )
size ( str )
state ( str )
private _ ips ( list )
public _ ips ( list )
No other fields should be returned in this function , and all of these fields should be returned , even if empty .
The private _ ips and public _ ips fields should always be of a list type , even if empty ,
and the other fields should always be of a str type .
This function is normally called with the - Q option :
. . code - block : : bash
salt - cloud - Q
@ param kwargs :
@ type kwargs :
@ param call :
@ type call :
@ return :
@ rtype :"""
|
if call == 'action' :
raise SaltCloudSystemExit ( 'The list_nodes function must be called ' 'with -f or --function.' )
attributes = [ "id" , "image" , "size" , "state" , "private_ips" , "public_ips" , ]
return __utils__ [ 'cloud.list_nodes_select' ] ( list_nodes_full ( 'function' ) , attributes , call , )
|
def _check_dn ( self , dn , attr_value ) :
"""Check dn attribute for issues ."""
|
if dn is not None :
self . _error ( 'Two lines starting with dn: in one record.' )
if not is_dn ( attr_value ) :
self . _error ( 'No valid string-representation of ' 'distinguished name %s.' % attr_value )
|
def inherit_docstring_from ( cls ) :
"""This decorator modifies the decorated function ' s docstring by
replacing occurrences of ' % ( super ) s ' with the docstring of the
method of the same name from the class ` cls ` .
If the decorated method has no docstring , it is simply given the
docstring of cls method .
Extracted from scipy . misc . doccer ."""
|
def _doc ( func ) :
cls_docstring = getattr ( cls , func . __name__ ) . __doc__
func_docstring = func . __doc__
if func_docstring is None :
func . __doc__ = cls_docstring
else :
new_docstring = func_docstring % dict ( super = cls_docstring )
func . __doc__ = new_docstring
return func
return _doc
|
def brent_kung_add ( A , B , cin = 0 ) :
"""Return symbolic logic for an N - bit Brent - Kung adder ."""
|
if len ( A ) != len ( B ) :
raise ValueError ( "expected A and B to be equal length" )
N = len ( A )
# generate / propagate logic
gs = [ A [ i ] & B [ i ] for i in range ( N ) ]
ps = [ A [ i ] ^ B [ i ] for i in range ( N ) ]
# carry tree
for i in range ( floor ( log ( N , 2 ) ) ) :
step = 2 ** i
for start in range ( 2 ** ( i + 1 ) - 1 , N , 2 ** ( i + 1 ) ) :
gs [ start ] = gs [ start ] | ps [ start ] & gs [ start - step ]
ps [ start ] = ps [ start ] & ps [ start - step ]
# inverse carry tree
for i in range ( floor ( log ( N , 2 ) ) - 2 , - 1 , - 1 ) :
start = 2 ** ( i + 1 ) - 1
step = 2 ** i
while start + step < N :
gs [ start + step ] = gs [ start + step ] | ps [ start + step ] & gs [ start ]
ps [ start + step ] = ps [ start + step ] & ps [ start ]
start += step
# sum logic
ss = [ A [ 0 ] ^ B [ 0 ] ^ cin ]
ss += [ A [ i ] ^ B [ i ] ^ gs [ i - 1 ] for i in range ( 1 , N ) ]
return farray ( ss ) , farray ( gs )
|
def pick ( self , starting_node = None ) :
"""Pick a node on the graph based on the links in a starting node .
Additionally , set ` ` self . current _ node ` ` to the newly picked node .
* if ` ` starting _ node ` ` is specified , start from there
* if ` ` starting _ node ` ` is ` ` None ` ` , start from ` ` self . current _ node ` `
* if ` ` starting _ node ` ` is ` ` None ` ` and ` ` self . current _ node ` `
is ` ` None ` ` , pick a uniformally random node in ` ` self . node _ list ` `
Args :
starting _ node ( Node ) : ` ` Node ` ` to pick from .
Returns : Node
Example :
> > > from blur . markov . node import Node
> > > node _ 1 = Node ( ' One ' )
> > > node _ 2 = Node ( ' Two ' )
> > > node _ 1 . add _ link ( node _ 1 , 5)
> > > node _ 1 . add _ link ( node _ 2 , 2)
> > > node _ 2 . add _ link ( node _ 1 , 1)
> > > graph = Graph ( [ node _ 1 , node _ 2 ] )
> > > [ graph . pick ( ) . get _ value ( ) for i in range ( 5 ) ] # doctest : + SKIP
[ ' One ' , ' One ' , ' Two ' , ' One ' , ' One ' ]"""
|
if starting_node is None :
if self . current_node is None :
random_node = random . choice ( self . node_list )
self . current_node = random_node
return random_node
else :
starting_node = self . current_node
# Use weighted _ choice on start _ node . link _ list
self . current_node = weighted_choice ( [ ( link . target , link . weight ) for link in starting_node . link_list ] )
return self . current_node
|
def copy_from_csv_sql ( qualified_name : str , delimiter = ',' , encoding = 'utf8' , null_str = '' , header = True , escape_str = '\\' , quote_char = '"' , force_not_null = None , force_null = None ) :
"""Generate copy from csv statement ."""
|
options = [ ]
options . append ( "DELIMITER '%s'" % delimiter )
options . append ( "NULL '%s'" % null_str )
if header :
options . append ( 'HEADER' )
options . append ( "QUOTE '%s'" % quote_char )
options . append ( "ESCAPE '%s'" % escape_str )
if force_not_null :
options . append ( _format_force_not_null ( column_names = force_not_null ) )
if force_null :
options . append ( _format_force_null ( column_names = force_null ) )
postgres_encoding = get_postgres_encoding ( encoding )
options . append ( "ENCODING '%s'" % postgres_encoding )
copy_sql = _format_copy_csv_sql ( qualified_name , copy_options = options )
return copy_sql
|
def entity ( ctx , debug , uncolorize , ** kwargs ) :
"""CLI for tonomi . com using contrib - python - qubell - client
To enable completion :
eval " $ ( _ NOMI _ COMPLETE = source nomi ) " """
|
global PROVIDER_CONFIG
if debug :
log . basicConfig ( level = log . DEBUG )
log . getLogger ( "requests.packages.urllib3.connectionpool" ) . setLevel ( log . DEBUG )
for ( k , v ) in kwargs . iteritems ( ) :
if v :
QUBELL [ k ] = v
PROVIDER_CONFIG = { 'configuration.provider' : PROVIDER [ 'provider_type' ] , 'configuration.legacy-regions' : PROVIDER [ 'provider_region' ] , 'configuration.endpoint-url' : '' , 'configuration.legacy-security-group' : '' , 'configuration.identity' : PROVIDER [ 'provider_identity' ] , 'configuration.credential' : PROVIDER [ 'provider_credential' ] }
class UserContext ( object ) :
def __init__ ( self ) :
self . platform = None
self . unauthenticated_platform = None
self . colorize = not ( uncolorize )
def get_platform ( self ) :
if not self . platform :
assert QUBELL [ "tenant" ] , "No platform URL provided. Set QUBELL_TENANT or use --tenant option."
if not QUBELL [ "token" ] :
assert QUBELL [ "user" ] , "No username. Set QUBELL_USER or use --user option."
assert QUBELL [ "password" ] , "No password provided. Set QUBELL_PASSWORD or use --password option."
self . platform = QubellPlatform . connect ( tenant = QUBELL [ "tenant" ] , user = QUBELL [ "user" ] , password = QUBELL [ "password" ] , token = QUBELL [ "token" ] )
return self . platform
def get_unauthenticated_platform ( self ) :
if not self . unauthenticated_platform :
assert QUBELL [ "tenant" ] , "No platform URL provided. Set QUBELL_TENANT or use --tenant option."
self . unauthenticated_platform = QubellPlatform . connect ( tenant = QUBELL [ "tenant" ] )
return self . unauthenticated_platform
ctx = click . get_current_context ( )
ctx . obj = UserContext ( )
|
def _check_intemediate ( self , myntr , maxstate ) :
"""For each state Apq which is a known terminal , this function
searches for rules Apr - > Apq Aqr and Arq - > Arp Apq where
Aqr is also a known terminal or Arp is also a known terminal .
It is mainly used as an optimization in order to avoid the O ( n ^ 3)
for generating all the Apq - > Apr Arq rules during the PDA to CFG
procedure .
Args :
myntr ( str ) : The examined non terminal that was poped out
of the queue
maxstate ( int ) : The maxstate is used for generating in a
dynamic way the CNF rules that were not
included due to the optimization . As a
result , the algorithm generates these
rules only if required .
Returns :
bool : Returns true if the algorithm was applied
at least one time"""
|
# print ' BFS Dictionary Update - Intermediate '
x_term = myntr . rfind ( '@' )
y_term = myntr . rfind ( 'A' )
if y_term > x_term :
x_term = y_term
ids = myntr [ x_term + 1 : ] . split ( ',' )
if len ( ids ) < 2 :
return 0
i = ids [ 0 ]
j = ids [ 1 ]
r = 0
find = 0
while r < maxstate :
if r != i and r != j :
if 'A' + i + ',' + repr ( r ) not in self . resolved and 'A' + j + ',' + repr ( r ) in self . resolved :
self . resolved [ 'A' + i + ',' + repr ( r ) ] = self . resolved [ myntr ] + self . resolved [ 'A' + j + ',' + repr ( r ) ]
if self . _checkfinal ( 'A' + i + ',' + repr ( r ) ) :
return self . resolved [ 'A' + i + ',' + repr ( r ) ]
if 'A' + i + ',' + repr ( r ) not in self . bfs_queue :
self . bfs_queue . append ( 'A' + i + ',' + repr ( r ) )
find = 1
if 'A' + repr ( r ) + ',' + j not in self . resolved and 'A' + repr ( r ) + ',' + i in self . resolved :
self . resolved [ 'A' + repr ( r ) + ',' + j ] = self . resolved [ 'A' + repr ( r ) + ',' + i ] + self . resolved [ myntr ]
if self . _checkfinal ( 'A' + repr ( r ) + ',' + j ) :
return self . resolved [ 'A' + repr ( r ) + ',' + j ]
if 'A' + repr ( r ) + ',' + j not in self . bfs_queue :
self . bfs_queue . append ( 'A' + repr ( r ) + ',' + j )
find = 1
r = r + 1
if find == 1 :
return 1
return 0
|
def flexifunction_command_send ( self , target_system , target_component , command_type , force_mavlink1 = False ) :
'''Acknowldge sucess or failure of a flexifunction command
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
command _ type : Flexifunction command type ( uint8 _ t )'''
|
return self . send ( self . flexifunction_command_encode ( target_system , target_component , command_type ) , force_mavlink1 = force_mavlink1 )
|
def setitem_without_overwrite ( d , key , value ) :
"""@ param d : An instance of dict , that is : isinstance ( d , dict )
@ param key : a key
@ param value : a value to associate with the key
@ return : None
@ raise : OverwriteError if the key is already present in d ."""
|
if key in d :
raise OverwriteError ( key , value , d [ key ] )
else :
dict . __setitem__ ( d , key , value )
|
def get_states ( self , devices ) :
"""Get States of Devices ."""
|
header = BASE_HEADERS . copy ( )
header [ 'Cookie' ] = self . __cookie
json_data = self . _create_get_state_request ( devices )
request = requests . post ( BASE_URL + 'getStates' , headers = header , data = json_data , timeout = 10 )
if request . status_code != 200 :
self . __logged_in = False
self . login ( )
self . get_states ( devices )
return
try :
result = request . json ( )
except ValueError as error :
raise Exception ( "Not a valid result for" + "getStates, protocol error:" + error )
self . _get_states ( result )
|
async def container_dump ( self , container , container_type , params = None ) :
"""Dumps container of elements to the writer .
: param writer :
: param container :
: param container _ type :
: param params :
: param field _ archiver :
: return :"""
|
await self . container_size ( len ( container ) , container_type , params )
elem_type = x . container_elem_type ( container_type , params )
for idx , elem in enumerate ( container ) :
try :
self . tracker . push_index ( idx )
await self . _dump_field ( elem , elem_type , params [ 1 : ] if params else None )
self . tracker . pop ( )
except Exception as e :
raise helpers . ArchiveException ( e , tracker = self . tracker ) from e
|
def oauth2callback ( self , view_func ) :
"""Decorator for OAuth2 callback . Calls ` GoogleLogin . login ` then
passes results to ` view _ func ` ."""
|
@ wraps ( view_func )
def decorated ( * args , ** kwargs ) :
params = { }
# Check sig
if 'state' in request . args :
params . update ( ** self . parse_state ( request . args . get ( 'state' ) ) )
if params . pop ( 'sig' , None ) != make_secure_token ( ** params ) :
return self . login_manager . unauthorized ( )
code = request . args . get ( 'code' )
# Web server flow
if code :
token = self . exchange_code ( code , url_for ( request . endpoint , _external = True , _scheme = self . redirect_scheme , ) , )
userinfo = self . get_userinfo ( token [ 'access_token' ] )
params . update ( token = token , userinfo = userinfo )
# Browser flow
else :
if params :
params . update ( dict ( request . args . items ( ) ) )
else :
return '''
<script>
window.onload = function() {
location.href = '?' + window.location.hash.substr(1);
};
</script>
'''
return view_func ( ** params )
return decorated
|
def parse_event_xml ( xml_event ) :
"""Parse the body of a UPnP event .
Args :
xml _ event ( bytes ) : bytes containing the body of the event encoded
with utf - 8.
Returns :
dict : A dict with keys representing the evented variables . The
relevant value will usually be a string representation of the
variable ' s value , but may on occasion be :
* a dict ( eg when the volume changes , the value will itself be a
dict containing the volume for each channel :
: code : ` { ' Volume ' : { ' LF ' : ' 100 ' , ' RF ' : ' 100 ' , ' Master ' : ' 36 ' } } ` )
* an instance of a ` DidlObject ` subclass ( eg if it represents
track metadata ) .
* a ` SoCoFault ` ( if a variable contains illegal metadata )
Example :
Run this code , and change your volume , tracks etc : :
from _ _ future _ _ import print _ function
try :
from queue import Empty
except : # Py2.7
from Queue import Empty
import soco
from pprint import pprint
from soco . events import event _ listener
# pick a device at random
device = soco . discover ( ) . pop ( )
print ( device . player _ name )
sub = device . renderingControl . subscribe ( )
sub2 = device . avTransport . subscribe ( )
while True :
try :
event = sub . events . get ( timeout = 0.5)
pprint ( event . variables )
except Empty :
pass
try :
event = sub2 . events . get ( timeout = 0.5)
pprint ( event . variables )
except Empty :
pass
except KeyboardInterrupt :
sub . unsubscribe ( )
sub2 . unsubscribe ( )
event _ listener . stop ( )
break"""
|
result = { }
tree = XML . fromstring ( xml_event )
# property values are just under the propertyset , which
# uses this namespace
properties = tree . findall ( '{urn:schemas-upnp-org:event-1-0}property' )
for prop in properties : # pylint : disable = too - many - nested - blocks
for variable in prop : # Special handling for a LastChange event specially . For details on
# LastChange events , see
# http : / / upnp . org / specs / av / UPnP - av - RenderingControl - v1 - Service . pdf
# and http : / / upnp . org / specs / av / UPnP - av - AVTransport - v1 - Service . pdf
if variable . tag == "LastChange" :
last_change_tree = XML . fromstring ( variable . text . encode ( 'utf-8' ) )
# We assume there is only one InstanceID tag . This is true for
# Sonos , as far as we know .
# InstanceID can be in one of two namespaces , depending on
# whether we are looking at an avTransport event , a
# renderingControl event , or a Queue event
# ( there , it is named QueueID )
instance = last_change_tree . find ( "{urn:schemas-upnp-org:metadata-1-0/AVT/}InstanceID" )
if instance is None :
instance = last_change_tree . find ( "{urn:schemas-upnp-org:metadata-1-0/RCS/}InstanceID" )
if instance is None :
instance = last_change_tree . find ( "{urn:schemas-sonos-com:metadata-1-0/Queue/}QueueID" )
# Look at each variable within the LastChange event
for last_change_var in instance :
tag = last_change_var . tag
# Remove any namespaces from the tags
if tag . startswith ( '{' ) :
tag = tag . split ( '}' , 1 ) [ 1 ]
# Un - camel case it
tag = camel_to_underscore ( tag )
# Now extract the relevant value for the variable .
# The UPnP specs suggest that the value of any variable
# evented via a LastChange Event will be in the ' val '
# attribute , but audio related variables may also have a
# ' channel ' attribute . In addition , it seems that Sonos
# sometimes uses a text value instead : see
# http : / / forums . sonos . com / showthread . php ? t = 34663
value = last_change_var . get ( 'val' )
if value is None :
value = last_change_var . text
# If DIDL metadata is returned , convert it to a music
# library data structure
if value . startswith ( '<DIDL-Lite' ) : # Wrap any parsing exception in a SoCoFault , so the
# user can handle it
try :
didl = from_didl_string ( value )
if not didl :
continue
value = didl [ 0 ]
except SoCoException as original_exception :
log . debug ( "Event contains illegal metadata" "for '%s'.\n" "Error message: '%s'\n" "The result will be a SoCoFault." , tag , str ( original_exception ) )
event_parse_exception = EventParseException ( tag , value , original_exception )
value = SoCoFault ( event_parse_exception )
channel = last_change_var . get ( 'channel' )
if channel is not None :
if result . get ( tag ) is None :
result [ tag ] = { }
result [ tag ] [ channel ] = value
else :
result [ tag ] = value
else :
result [ camel_to_underscore ( variable . tag ) ] = variable . text
return result
|
def loads ( s , ** kwargs ) :
"""Loads JSON object ."""
|
try :
return _engine [ 0 ] ( s )
except _engine [ 2 ] : # except _ clause : ' except ' [ test [ ' as ' NAME ] ] # grammar for py3x
# except _ clause : ' except ' [ test [ ( ' as ' | ' , ' ) test ] ] # grammar for py2x
why = sys . exc_info ( ) [ 1 ]
raise JSONError ( why )
|
def cut_segments ( x2d , tr , start , stop ) :
"""Cut continuous signal into segments .
Parameters
x2d : array , shape ( m , n )
Input data with m signals and n samples .
tr : list of int
Trigger positions .
start : int
Window start ( offset relative to trigger ) .
stop : int
Window end ( offset relative to trigger ) .
Returns
x3d : array , shape ( len ( tr ) , m , stop - start )
Segments cut from data . Individual segments are stacked along the first
dimension .
See also
cat _ trials : Concatenate segments .
Examples
> > > data = np . random . randn ( 5 , 1000 ) # 5 channels , 1000 samples
> > > tr = [ 750 , 500 , 250 ] # three segments
> > > x3d = cut _ segments ( data , tr , 50 , 100 ) # each segment is 50 samples
> > > x3d . shape
(3 , 5 , 50)"""
|
if start != int ( start ) :
raise ValueError ( "start index must be an integer" )
if stop != int ( stop ) :
raise ValueError ( "stop index must be an integer" )
x2d = np . atleast_2d ( x2d )
tr = np . asarray ( tr , dtype = int ) . ravel ( )
win = np . arange ( start , stop , dtype = int )
return np . concatenate ( [ x2d [ np . newaxis , : , t + win ] for t in tr ] )
|
def get_metrics ( self ) :
"""Retrieve the current metric values for this : term : ` Metrics Context `
resource from the HMC .
The metric values are returned by this method as a string in the
` MetricsResponse ` format described with the ' Get Metrics ' operation in
the : term : ` HMC API ` book .
The : class : ` ~ zhmcclient . MetricsResponse ` class can be used to process
the ` MetricsResponse ` string returned by this method , and provides
structured access to the metrics values .
Returns :
: term : ` string ` :
The current metric values , in the ` MetricsResponse ` string format .
Raises :
: exc : ` ~ zhmcclient . HTTPError `
: exc : ` ~ zhmcclient . ParseError `
: exc : ` ~ zhmcclient . AuthError `
: exc : ` ~ zhmcclient . ConnectionError `"""
|
metrics_response = self . manager . session . get ( self . uri )
return metrics_response
|
def get_aliasing ( * extra ) :
"""The assembles the dict mapping strings and functions to the list of
supported function names :
e . g . alias [ ' add ' ] = ' sum ' and alias [ sorted ] = ' sort '
This funciton should only be called during import ."""
|
alias = dict ( ( k , k ) for k in funcs_common )
alias . update ( _alias_str )
alias . update ( ( fn , fn ) for fn in _alias_builtin . values ( ) )
alias . update ( _alias_builtin )
for d in extra :
alias . update ( d )
alias . update ( ( k , k ) for k in set ( alias . values ( ) ) )
# Treat nan - functions as firstclass member and add them directly
for key in set ( alias . values ( ) ) :
if key not in funcs_no_separate_nan :
key = 'nan' + key
alias [ key ] = key
return alias
|
def rstyle ( self , name ) :
"""Remove one style"""
|
try :
del self . chart_style [ name ]
except KeyError :
self . warning ( "Style " + name + " is not set" )
except :
self . err ( "Can not remove style " + name )
|
def apply ( self , query , data ) :
"""Filter a query ."""
|
field = self . model_field or query . model_class . _meta . fields . get ( self . name )
if not field or self . name not in data :
return query
value = self . value ( data )
if value is self . default :
return query
value = field . db_value ( value )
return self . filter_query ( query , field , value )
|
def version ( context = None ) :
'''Attempts to run systemctl - - version . Returns None if unable to determine
version .'''
|
contextkey = 'salt.utils.systemd.version'
if isinstance ( context , dict ) : # Can ' t put this if block on the same line as the above if block ,
# because it will break the elif below .
if contextkey in context :
return context [ contextkey ]
elif context is not None :
raise SaltInvocationError ( 'context must be a dictionary if passed' )
stdout = subprocess . Popen ( [ 'systemctl' , '--version' ] , close_fds = True , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) . communicate ( ) [ 0 ]
outstr = salt . utils . stringutils . to_str ( stdout )
try :
ret = int ( re . search ( r'\w+ ([0-9]+)' , outstr . splitlines ( ) [ 0 ] ) . group ( 1 ) )
except ( AttributeError , IndexError , ValueError ) :
log . error ( 'Unable to determine systemd version from systemctl ' '--version, output follows:\n%s' , outstr )
return None
else :
try :
context [ contextkey ] = ret
except TypeError :
pass
return ret
|
def prox_dca ( x , f , g , niter , gamma , callback = None ) :
r"""Proximal DCA of Sun , Sampaio and Candido .
This algorithm solves a problem of the form : :
min _ x f ( x ) - g ( x )
where ` ` f ` ` and ` ` g ` ` are two proper , convex and lower semicontinuous
functions .
Parameters
x : ` LinearSpaceElement `
Initial point , updated in - place .
f : ` Functional `
Convex functional . Needs to implement ` ` f . proximal ` ` .
g : ` Functional `
Convex functional . Needs to implement ` ` g . gradient ` ` .
niter : int
Number of iterations .
gamma : positive float
Stepsize in the primal updates .
callback : callable , optional
Function called with the current iterate after each iteration .
Notes
The algorithm was proposed as Algorithm 2.3 in
` [ SSC2003]
< http : / / www . global - sci . org / jcm / readabs . php ? vol = 21 & no = 4 & page = 451 & year = 2003 & ppage = 462 > ` _ .
It solves the problem
. . math : :
\ min f ( x ) - g ( x )
by using subgradients of : math : ` g ` and proximal points of : math : ` f ` .
The iteration is given by
. . math : :
y _ n \ in \ partial g ( x _ n ) , \ qquad x _ { n + 1}
= \ mathrm { Prox } _ { \ gamma f } ( x _ n + \ gamma y _ n ) .
In contrast to ` dca ` , ` prox _ dca ` uses proximal steps with respect to the
convex part ` ` f ` ` . Both algorithms use subgradients of the concave part
References
[ SSC2003 ] Sun , W , Sampaio R J B , and Candido M A B . * Proximal point
algorithm for minimization of DC function * . Journal of Computational
Mathematics , 21.4 ( 2003 ) , pp 451 - - 462.
See also
dca :
Solver with subgradinet steps for all the functionals .
doubleprox _ dc :
Solver with proximal steps for all the nonsmooth convex functionals
and a gradient step for a smooth functional ."""
|
space = f . domain
if g . domain != space :
raise ValueError ( '`f.domain` and `g.domain` need to be equal, but ' '{} != {}' . format ( space , g . domain ) )
for _ in range ( niter ) :
f . proximal ( gamma ) ( x . lincomb ( 1 , x , gamma , g . gradient ( x ) ) , out = x )
if callback is not None :
callback ( x )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.