signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_settings ( config_file ) :
"""Search and load a configuration file ."""
|
default_settings = { 'general' : { 'endpoint' : 'http://guacamole.antojitos.io/files/' , 'shortener' : 'http://t.antojitos.io/api/v1/urls' , } }
settings = configparser . ConfigParser ( )
try :
settings . read_dict ( default_settings )
except AttributeError : # using python 2.7
for section , options in default_settings . items ( ) :
settings . add_section ( section )
for option , value in options . items ( ) :
settings . set ( section , option , value )
if config_file is not None and os . path . exists ( config_file ) :
settings . read ( config_file )
return settings
if os . path . exists ( CONFIG_FILE ) :
settings . read ( CONFIG_FILE )
return settings
return settings
|
def release ( self ) :
"""Release the lock .
: returns : Returns ` ` True ` ` if the lock was released ."""
|
unlocked = self . database . run_script ( 'lock_release' , keys = [ self . key , self . event ] , args = [ self . _lock_id ] )
return unlocked != 0
|
def infer ( self , expl_dims , inf_dims , x ) :
"""Use the sensorimotor model to compute the expected value on inf _ dims given that the value on expl _ dims is x .
. . note : : This corresponds to a prediction if expl _ dims = self . conf . m _ dims and inf _ dims = self . conf . s _ dims and to inverse prediction if expl _ dims = self . conf . s _ dims and inf _ dims = self . conf . m _ dims ."""
|
try :
if self . n_bootstrap > 0 :
self . n_bootstrap -= 1
raise ExplautoBootstrapError
y = self . sensorimotor_model . infer ( expl_dims , inf_dims , x . flatten ( ) )
except ExplautoBootstrapError :
logger . warning ( 'Sensorimotor model not bootstrapped yet' )
y = rand_bounds ( self . conf . bounds [ : , inf_dims ] ) . flatten ( )
return y
|
def is_path_hidden ( filepath ) :
"""Determines if a given file or directory is hidden .
Parameters
filepath : str
The path to a file or directory
Returns
hidden : bool
Returns ` True ` if the file is hidden"""
|
name = os . path . basename ( os . path . abspath ( filepath ) )
if isinstance ( name , bytes ) :
is_dotted = name . startswith ( b'.' )
else :
is_dotted = name . startswith ( '.' )
return is_dotted or _has_hidden_attribute ( filepath )
|
def is_topology ( self , layers = None ) :
'''valid the topology'''
|
if layers is None :
layers = self . layers
layers_nodle = [ ]
result = [ ]
for i , layer in enumerate ( layers ) :
if layer . is_delete is False :
layers_nodle . append ( i )
while True :
flag_break = True
layers_toremove = [ ]
for layer1 in layers_nodle :
flag_arrive = True
for layer2 in layers [ layer1 ] . input :
if layer2 in layers_nodle :
flag_arrive = False
if flag_arrive is True :
for layer2 in layers [ layer1 ] . output : # Size is error
if layers [ layer2 ] . set_size ( layer1 , layers [ layer1 ] . size ) is False :
return False
layers_toremove . append ( layer1 )
result . append ( layer1 )
flag_break = False
for layer in layers_toremove :
layers_nodle . remove ( layer )
result . append ( '|' )
if flag_break :
break
# There is loop in graph | | some layers can ' t to arrive
if layers_nodle :
return False
return result
|
def normalize ( score , alpha = 15 ) :
"""Normalize the score to be between - 1 and 1 using an alpha that
approximates the max expected value"""
|
norm_score = score / math . sqrt ( ( score * score ) + alpha )
if norm_score < - 1.0 :
return - 1.0
elif norm_score > 1.0 :
return 1.0
else :
return norm_score
|
def update ( cls , draft_share_invite_api_key_id , status = None , sub_status = None , expiration = None , custom_headers = None ) :
"""Update a draft share invite . When sending status CANCELLED it is
possible to cancel the draft share invite .
: type user _ id : int
: type draft _ share _ invite _ api _ key _ id : int
: param status : The status of the draft share invite . Can be CANCELLED
( the user cancels the draft share before it ' s used ) .
: type status : str
: param sub _ status : The sub - status of the draft share invite . Can be
NONE , ACCEPTED or REJECTED .
: type sub _ status : str
: param expiration : The moment when this draft share invite expires .
: type expiration : str
: type custom _ headers : dict [ str , str ] | None
: rtype : BunqResponseDraftShareInviteApiKey"""
|
if custom_headers is None :
custom_headers = { }
api_client = client . ApiClient ( cls . _get_api_context ( ) )
request_map = { cls . FIELD_STATUS : status , cls . FIELD_SUB_STATUS : sub_status , cls . FIELD_EXPIRATION : expiration }
request_map_string = converter . class_to_json ( request_map )
request_map_string = cls . _remove_field_for_request ( request_map_string )
request_bytes = request_map_string . encode ( )
endpoint_url = cls . _ENDPOINT_URL_UPDATE . format ( cls . _determine_user_id ( ) , draft_share_invite_api_key_id )
response_raw = api_client . put ( endpoint_url , request_bytes , custom_headers )
return BunqResponseDraftShareInviteApiKey . cast_from_bunq_response ( cls . _from_json ( response_raw , cls . _OBJECT_TYPE_PUT ) )
|
def _get_file_alignment_for_new_binary_file ( self , file : File ) -> int :
"""Detects alignment requirements for binary files with new nn : : util : : BinaryFileHeader ."""
|
if len ( file . data ) <= 0x20 :
return 0
bom = file . data [ 0xc : 0xc + 2 ]
if bom != b'\xff\xfe' and bom != b'\xfe\xff' :
return 0
be = bom == b'\xfe\xff'
file_size : int = struct . unpack_from ( _get_unpack_endian_character ( be ) + 'I' , file . data , 0x1c ) [ 0 ]
if len ( file . data ) != file_size :
return 0
return 1 << file . data [ 0xe ]
|
def ekcii ( table , cindex , lenout = _default_len_out ) :
"""Return attribute information about a column belonging to a loaded
EK table , specifying the column by table and index .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ekcii _ c . html
: param table : Name of table containing column .
: type table : str
: param cindex : Index of column whose attributes are to be found .
: type cindex : int
: param lenout : Maximum allowed length of column name .
: return : Name of column , Column attribute descriptor .
: rtype : tuple"""
|
table = stypes . stringToCharP ( table )
cindex = ctypes . c_int ( cindex )
lenout = ctypes . c_int ( lenout )
column = stypes . stringToCharP ( lenout )
attdsc = stypes . SpiceEKAttDsc ( )
libspice . ekcii_c ( table , cindex , lenout , column , ctypes . byref ( attdsc ) )
return stypes . toPythonString ( column ) , attdsc
|
def create_with ( cls , event : str = None , observable : T . Union [ str , Observable ] = None ) -> T . Callable [ ... , "ObservableProperty" ] :
"""Creates a partial application of ObservableProperty with
event and observable preset ."""
|
return functools . partial ( cls , event = event , observable = observable )
|
def revise_buffer_size ( info , settings ) :
'''This function is used to revise buffer size , use byte
as its unit , instead of data item .
This is only used for nnb , not for csrc .
When settings contains user customized data type , not pure
FLOAT32 , it affects the memory consumption .'''
|
size_mapping = { 'FLOAT32' : 4 , 'FIXED16' : 2 , 'FIXED8' : 1 }
var_dict = settings [ 'variables' ]
buffer_index = 0
info . _variable_sizes = [ ]
info . _variable_buffer_index = collections . OrderedDict ( )
info . _variable_buffer_size = collections . OrderedDict ( )
info . _buffer_ids = { }
for n , v in enumerate ( info . _network . variable ) :
byte_per_item = size_mapping . get ( var_dict . get ( v . name , 'FLOAT32' ) . split ( '_' ) [ 0 ] , 4 )
size = nnabla . utils . converter . calc_shape_size ( v . shape , info . _batch_size ) * byte_per_item
info . _variable_sizes . append ( size )
if v . type == 'Buffer' :
info . _variable_buffer_index [ buffer_index ] = [ n ]
for vid in info . _variable_buffer_index [ buffer_index ] :
info . _buffer_ids [ vid ] = buffer_index
info . _variable_buffer_size [ buffer_index ] = size
buffer_index += 1
|
def run ( self ) :
'''Run until there are no events to be processed .'''
|
# We left - append rather than emit ( right - append ) because some message
# may have been already queued for execution before the director runs .
global_event_queue . appendleft ( ( INITIATE , self , ( ) , { } ) )
while global_event_queue :
self . process_event ( global_event_queue . popleft ( ) )
|
def _create_more_application ( ) :
"""Create an ` Application ` instance that displays the " - - MORE - - " ."""
|
from prompt_toolkit . shortcuts import create_prompt_application
registry = Registry ( )
@ registry . add_binding ( ' ' )
@ registry . add_binding ( 'y' )
@ registry . add_binding ( 'Y' )
@ registry . add_binding ( Keys . ControlJ )
@ registry . add_binding ( Keys . ControlI ) # Tab .
def _ ( event ) :
event . cli . set_return_value ( True )
@ registry . add_binding ( 'n' )
@ registry . add_binding ( 'N' )
@ registry . add_binding ( 'q' )
@ registry . add_binding ( 'Q' )
@ registry . add_binding ( Keys . ControlC )
def _ ( event ) :
event . cli . set_return_value ( False )
return create_prompt_application ( '--MORE--' , key_bindings_registry = registry , erase_when_done = True )
|
def add_settings_parser ( subparsers , parent_parser ) :
"""Creates the args parser needed for the settings command and its
subcommands ."""
|
# The following parser is for the settings subsection of commands . These
# commands display information about the currently applied on - chain
# settings .
settings_parser = subparsers . add_parser ( 'settings' , help = 'Displays on-chain settings' , description = 'Displays the values of currently active on-chain ' 'settings.' )
settings_parsers = settings_parser . add_subparsers ( title = 'settings' , dest = 'settings_cmd' )
settings_parsers . required = True
list_parser = settings_parsers . add_parser ( 'list' , help = 'Lists the current keys and values of on-chain settings' , description = 'List the current keys and values of on-chain ' 'settings. The content can be exported to various ' 'formats for external consumption.' )
list_parser . add_argument ( '--url' , type = str , help = "identify the URL of a validator's REST API" , default = 'http://localhost:8008' )
list_parser . add_argument ( '--filter' , type = str , default = '' , help = 'filters keys that begin with this value' )
list_parser . add_argument ( '--format' , default = 'default' , choices = [ 'default' , 'csv' , 'json' , 'yaml' ] , help = 'choose the output format' )
|
def _make_interpolation ( self ) :
"""creates an interpolation grid in H _ 0 , omega _ m and computes quantities in Dd and Ds _ Dds
: return :"""
|
H0_range = np . linspace ( 10 , 100 , 90 )
omega_m_range = np . linspace ( 0.05 , 1 , 95 )
grid2d = np . dstack ( np . meshgrid ( H0_range , omega_m_range ) ) . reshape ( - 1 , 2 )
H0_grid = grid2d [ : , 0 ]
omega_m_grid = grid2d [ : , 1 ]
Dd_grid = np . zeros_like ( H0_grid )
Ds_Dds_grid = np . zeros_like ( H0_grid )
for i in range ( len ( H0_grid ) ) :
Dd , Ds_Dds = self . cosmo2Dd_Ds_Dds ( H0_grid [ i ] , omega_m_grid [ i ] )
Dd_grid [ i ] = Dd
Ds_Dds_grid [ i ] = Ds_Dds
self . _f_H0 = interpolate . interp2d ( Dd_grid , Ds_Dds_grid , H0_grid , kind = 'linear' , copy = False , bounds_error = False , fill_value = - 1 )
print ( "H0 interpolation done" )
self . _f_omega_m = interpolate . interp2d ( Dd_grid , Ds_Dds_grid , omega_m_grid , kind = 'linear' , copy = False , bounds_error = False , fill_value = - 1 )
print ( "omega_m interpolation done" )
|
def moveToPoint ( self , xxx_todo_changeme1 ) :
"""Moves the circle to the point x , y"""
|
( x , y ) = xxx_todo_changeme1
self . set_cx ( float ( self . get_cx ( ) ) + float ( x ) )
self . set_cy ( float ( self . get_cy ( ) ) + float ( y ) )
|
def make_driver ( self , driver_name = 'generic' ) :
"""Make driver factory function ."""
|
module_str = 'condoor.drivers.%s' % driver_name
try :
__import__ ( module_str )
module = sys . modules [ module_str ]
driver_class = getattr ( module , 'Driver' )
except ImportError as e : # pylint : disable = invalid - name
print ( "driver name: {}" . format ( driver_name ) )
self . chain . connection . log ( "Import error: {}: '{}'" . format ( driver_name , str ( e ) ) )
# no driver - call again with default ' generic '
return self . make_driver ( )
self . chain . connection . log ( "Make Device: {} with Driver: {}" . format ( self , driver_class . platform ) )
return driver_class ( self )
|
def retino_colors ( vcolorfn , * args , ** kwargs ) :
'See eccen _ colors , angle _ colors , sigma _ colors , and varea _ colors .'
|
if len ( args ) == 0 :
def _retino_color_pass ( * args , ** new_kwargs ) :
return retino_colors ( vcolorfn , * args , ** { k : ( new_kwargs [ k ] if k in new_kwargs else kwargs [ k ] ) for k in set ( kwargs . keys ( ) + new_kwargs . keys ( ) ) } )
return _retino_color_pass
elif len ( args ) > 1 :
raise ValueError ( 'retinotopy color functions accepts at most one argument' )
m = args [ 0 ]
# we need to handle the arguments
if isinstance ( m , ( geo . VertexSet , pimms . ITable ) ) :
tbl = m . properties if isinstance ( m , geo . VertexSet ) else m
n = tbl . row_count
# if the weight or property arguments are lists , we need to thread these along
if 'property' in kwargs :
props = kwargs [ 'property' ]
del kwargs [ 'property' ]
if not ( pimms . is_vector ( props ) or pimms . is_matrix ( props ) ) :
props = [ props for _ in range ( n ) ]
else :
props = None
if 'weight' in kwargs :
ws = kwargs [ 'weight' ]
del kwargs [ 'weight' ]
if not pimms . is_vector ( ws ) and not pimms . is_matrix ( ws ) :
ws = [ ws for _ in range ( n ) ]
else :
ws = None
vcolorfn0 = vcolorfn ( Ellipsis , ** kwargs ) if len ( kwargs ) > 0 else vcolorfn
if props is None and ws is None :
vcfn = lambda m , k : vcolorfn0 ( m )
elif props is None :
vcfn = lambda m , k : vcolorfn0 ( m , weight = ws [ k ] )
elif ws is None :
vcfn = lambda m , k : vcolorfn0 ( m , property = props [ k ] )
else :
vcfn = lambda m , k : vcolorfn0 ( m , property = props [ k ] , weight = ws [ k ] )
return np . asarray ( [ vcfn ( r , kk ) for ( kk , r ) in enumerate ( tbl . rows ) ] )
else :
return vcolorfn ( m , ** kwargs )
|
def workspace_clone ( ctx , clobber_mets , download , mets_url , workspace_dir ) :
"""Create a workspace from a METS _ URL and return the directory
METS _ URL can be a URL , an absolute path or a path relative to $ PWD .
If WORKSPACE _ DIR is not provided , creates a temporary directory ."""
|
workspace = ctx . resolver . workspace_from_url ( mets_url , dst_dir = os . path . abspath ( workspace_dir if workspace_dir else mkdtemp ( prefix = TMP_PREFIX ) ) , mets_basename = ctx . mets_basename , clobber_mets = clobber_mets , download = download , )
workspace . save_mets ( )
print ( workspace . directory )
|
async def Offer ( self , offers ) :
'''offers : typing . Sequence [ ~ AddApplicationOffer ]
Returns - > typing . Sequence [ ~ ErrorResult ]'''
|
# map input types to rpc msg
_params = dict ( )
msg = dict ( type = 'ApplicationOffers' , request = 'Offer' , version = 2 , params = _params )
_params [ 'Offers' ] = offers
reply = await self . rpc ( msg )
return reply
|
def load_from_dict ( self , data : dict , overwrite : bool = True ) :
"""Loads key / values from dicts or list into the ConfigKey .
: param data : The data object to load .
This can be a dict , or a list of key / value tuples .
: param overwrite : Should the ConfigKey overwrite data already in it ?"""
|
if data is None or data == { } :
return False
# Loop over items
if isinstance ( data , list ) or isinstance ( data , tuple ) : # Pick a random item from the tuple .
if len ( data [ 0 ] ) != 2 :
raise exc . LoaderException ( "Cannot load data with length {}" . format ( len ( data [ 0 ] ) ) )
items = data
elif isinstance ( data , dict ) or isinstance ( data , self . __class__ ) :
items = data . items ( )
else :
raise exc . LoaderException ( "Cannot load data of type {}" . format ( type ( data ) ) )
for key , item in items :
assert isinstance ( key , str )
if hasattr ( self , key ) and not overwrite : # Refuse to overwrite existing data .
continue
# Check name to verify it ' s safe .
if self . safe_load :
if key . startswith ( "__" ) or key in [ 'dump' , 'items' , 'keys' , 'values' , 'iter_list' , 'load_from_dict' , 'iter_list_dump' , 'parsed' , 'safe_load' ] : # It ' s evil !
key = "unsafe_" + key
if '.' in key : # Doubly evil !
key = key . replace ( '.' , '_' )
if isinstance ( item , dict ) : # Create a new ConfigKey object with the dict .
ncfg = ConfigKey ( )
# Parse the data .
ncfg . load_from_dict ( item )
# Set our new ConfigKey as an attribute of ourselves .
setattr ( self , key , ncfg )
elif isinstance ( item , list ) : # Iterate over the list , creating ConfigKey items as appropriate .
nlst = self . iter_list ( item )
# Set our new list as an attribute of ourselves .
setattr ( self , key , nlst )
else : # Set the item as an attribute of ourselves .
setattr ( self , key , item )
# Flip the parsed flag ,
self . parsed = True
|
def qsub ( command , queue = None , cwd = True , name = None , deps = [ ] , stdout = '' , stderr = '' , env = [ ] , array = None , context = 'grid' , hostname = None , memfree = None , hvmem = None , gpumem = None , pe_opt = None , io_big = False ) :
"""Submits a shell job to a given grid queue
Keyword parameters :
command
The command to be submitted to the grid
queue
A valid queue name or None , to use the default queue
cwd
If the job should change to the current working directory before starting
name
An optional name to set for the job . If not given , defaults to the script
name being launched .
deps
Job ids to which this job will be dependent on
stdout
The standard output directory . If not given , defaults to what qsub has as a
default .
stderr
The standard error directory ( if not given , defaults to the stdout
directory ) .
env
This is a list of extra variables that will be set on the environment
running the command of your choice .
array
If set should be either :
1 . a string in the form m [ - n [ : s ] ] which indicates the starting range ' m ' ,
the closing range ' n ' and the step ' s ' .
2 . an integer value indicating the total number of jobs to be submitted .
This is equivalent ot set the parameter to a string " 1 - k : 1 " where " k " is
the passed integer value
3 . a tuple that contains either 1 , 2 or 3 elements indicating the start ,
end and step arguments ( " m " , " n " , " s " ) .
The minimum value for " m " is 1 . Giving " 0 " is an error .
If submitted with this option , the job to be created will be an SGE
parametric job . In this mode SGE does not allow individual control of each
job . The environment variable SGE _ TASK _ ID will be set on the executing
process automatically by SGE and indicates the unique identifier in the
range for which the current job instance is for .
context
The setshell context in which we should try a ' qsub ' . Normally you don ' t
need to change the default . This variable can also be set to a context
dictionary in which case we just setup using that context instead of
probing for a new one , what can be fast .
memfree
If set , it asks the queue for a node with a minimum amount of memory
Used only if mem is not set
( cf . qsub - l mem _ free = < . . . > )
hvmem
If set , it asks the queue for a node with a minimum amount of memory
Used only if mem is not set
( cf . qsub - l h _ vmem = < . . . > )
gpumem
Applicable only for GPU - based queues . If set , it asks for the GPU queue
with a minimum amount of memory . The amount should not be more than 24.
( cf . qsub - l gpumem = < . . . > )
hostname
If set , it asks the queue to use only a subset of the available nodes
Symbols : | for OR , & for AND , ! for NOT , etc .
( cf . qsub - l hostname = < . . . > )
pe _ opt
If set , add a - pe option when launching a job ( for instance pe _ exclusive * 1 - )
io _ big
If set to true , the io _ big flag will be set .
Use this flag if your process will need a lot of Input / Output operations .
Returns the job id assigned to this job ( integer )"""
|
scmd = [ 'qsub' ]
import six
if isinstance ( queue , six . string_types ) and queue not in ( 'all.q' , 'default' ) :
scmd += [ '-l' , queue ]
if memfree :
scmd += [ '-l' , 'mem_free=%s' % memfree ]
if hvmem :
scmd += [ '-l' , 'h_vmem=%s' % hvmem ]
if gpumem :
scmd += [ '-l' , 'gpumem=%s' % gpumem ]
if io_big :
scmd += [ '-l' , 'io_big' ]
if hostname :
scmd += [ '-l' , 'hostname=%s' % hostname ]
if pe_opt :
scmd += [ '-pe' ] + pe_opt . split ( )
if cwd :
scmd += [ '-cwd' ]
if name :
scmd += [ '-N' , name ]
if deps :
scmd += [ '-hold_jid' , ',' . join ( [ '%d' % k for k in deps ] ) ]
if stdout :
if not cwd : # pivot , temporarily , to home directory
curdir = os . path . realpath ( os . curdir )
os . chdir ( os . environ [ 'HOME' ] )
if not os . path . exists ( stdout ) :
makedirs_safe ( stdout )
if not cwd : # go back
os . chdir ( os . path . realpath ( curdir ) )
scmd += [ '-o' , stdout ]
if stderr :
if not os . path . exists ( stderr ) :
makedirs_safe ( stderr )
scmd += [ '-e' , stderr ]
elif stdout : # just re - use the stdout settings
scmd += [ '-e' , stdout ]
scmd += [ '-terse' ]
# simplified job identifiers returned by the command line
for k in env :
scmd += [ '-v' , k ]
if array is not None :
scmd . append ( '-t' )
if isinstance ( array , six . string_types ) :
try :
i = int ( array )
scmd . append ( '1-%d:1' % i )
except ValueError : # must be complete . . .
scmd . append ( '%s' % array )
if isinstance ( array , six . integer_types ) :
scmd . append ( '1-%d:1' % array )
if isinstance ( array , ( tuple , list ) ) :
if len ( array ) < 1 or len ( array ) > 3 :
raise RuntimeError ( "Array tuple should have length between 1 and 3" )
elif len ( array ) == 1 :
scmd . append ( '%s' % array [ 0 ] )
elif len ( array ) == 2 :
scmd . append ( '%s-%s' % ( array [ 0 ] , array [ 1 ] ) )
elif len ( array ) == 3 :
scmd . append ( '%s-%s:%s' % ( array [ 0 ] , array [ 1 ] , array [ 2 ] ) )
if not isinstance ( command , ( list , tuple ) ) :
command = [ command ]
scmd += command
logger . debug ( "Qsub command '%s'" , ' ' . join ( scmd ) )
from . setshell import sexec
jobid = str_ ( sexec ( context , scmd ) )
return int ( jobid . split ( '.' , 1 ) [ 0 ] )
|
def object_exists_in_project ( obj_id , proj_id ) :
''': param obj _ id : object ID
: type obj _ id : str
: param proj _ id : project ID
: type proj _ id : str
Returns True if the specified data object can be found in the specified
project .'''
|
if obj_id is None :
raise ValueError ( "Expected obj_id to be a string" )
if proj_id is None :
raise ValueError ( "Expected proj_id to be a string" )
if not is_container_id ( proj_id ) :
raise ValueError ( 'Expected %r to be a container ID' % ( proj_id , ) )
return try_call ( dxpy . DXHTTPRequest , '/' + obj_id + '/describe' , { 'project' : proj_id } ) [ 'project' ] == proj_id
|
def get_voms_proxy_user ( ) :
"""Returns the owner of the voms proxy ."""
|
out = _voms_proxy_info ( [ "--identity" ] ) [ 1 ] . strip ( )
try :
return re . match ( r".*\/CN\=([^\/]+).*" , out . strip ( ) ) . group ( 1 )
except :
raise Exception ( "no valid identity found in voms proxy: {}" . format ( out ) )
|
def fix_config ( self , options ) :
"""Fixes the options , if necessary . I . e . , it adds all required elements to the dictionary .
: param options : the options to fix
: type options : dict
: return : the ( potentially ) fixed options
: rtype : dict"""
|
options = super ( PRC , self ) . fix_config ( options )
opt = "class_index"
if opt not in options :
options [ opt ] = [ 0 ]
if opt not in self . help :
self . help [ opt ] = "The list of 0-based class-label indices to display (list)."
opt = "key_loc"
if opt not in options :
options [ opt ] = "lower center"
if opt not in self . help :
self . help [ opt ] = "The location of the key in the plot (str)."
opt = "title"
if opt not in options :
options [ opt ] = None
if opt not in self . help :
self . help [ opt ] = "The title for the plot (str)."
opt = "outfile"
if opt not in options :
options [ opt ] = None
if opt not in self . help :
self . help [ opt ] = "The file to store the plot in (str)."
opt = "wait"
if opt not in options :
options [ opt ] = True
if opt not in self . help :
self . help [ opt ] = "Whether to wait for user to close the plot window (bool)."
return options
|
def spline_matrix2d ( x , y , px , py , mask = None ) :
'''For boundary constraints , the first two and last two spline pieces are constrained
to be part of the same cubic curve .'''
|
V = np . kron ( spline_matrix ( x , px ) , spline_matrix ( y , py ) )
lenV = len ( V )
if mask is not None :
indices = np . nonzero ( mask . T . flatten ( ) )
if len ( indices ) > 1 :
indices = np . nonzero ( mask . T . flatten ( ) ) [ 1 ] [ 0 ]
newV = V . T [ indices ]
V = newV . T
V = V . reshape ( ( V . shape [ 0 ] , V . shape [ 1 ] ) )
return V
|
def flatten_dict ( root , parents = None , sep = '.' ) :
'''Args :
root ( dict ) : Nested dictionary ( e . g . , JSON object ) .
parents ( list ) : List of ancestor keys .
Returns
list
List of ` ` ( key , value ) ` ` tuples , where ` ` key ` ` corresponds to the
ancestor keys of the respective value joined by ` ` ' . ' ` ` . For example ,
for the item in the dictionary ` ` { ' a ' : { ' b ' : { ' c ' : ' foo ' } } } ` ` , the
joined key would be ` ` ' a . b . c ' ` ` .
See also : func : ` expand _ items ` .'''
|
if parents is None :
parents = [ ]
result = [ ]
for i , ( k , v ) in enumerate ( root . iteritems ( ) ) :
parents_i = parents + [ k ]
key_i = sep . join ( parents_i )
if isinstance ( v , dict ) :
value_i = flatten_dict ( v , parents = parents_i , sep = sep )
result . extend ( value_i )
else :
value_i = v
result . append ( ( key_i , value_i ) )
return result
|
def broadcastPush ( self , pushMessage ) :
"""广播消息方法 ( fromuserid 和 message为null即为不落地的push ) 方法
@ param pushMessage : json数据
@ return code : 返回码 , 200 为正常 。
@ return errorMessage : 错误信息 。"""
|
desc = { "name" : "CodeSuccessReslut" , "desc" : " http 成功返回结果" , "fields" : [ { "name" : "code" , "type" : "Integer" , "desc" : "返回码,200 为正常。" } , { "name" : "errorMessage" , "type" : "String" , "desc" : "错误信息。" } ] }
r = self . call_api ( method = ( 'API' , 'POST' , 'application/json' ) , action = '/push.json' , params = pushMessage )
return Response ( r , desc )
|
def aot_blpop ( self ) : # pylint : disable = R1710
"""Subscribe to AOT action channel ."""
|
if self . tcex . default_args . tc_playbook_db_type == 'Redis' :
res = None
try :
self . tcex . log . info ( 'Blocking for AOT message.' )
msg_data = self . db . blpop ( self . tcex . default_args . tc_action_channel , timeout = self . tcex . default_args . tc_terminate_seconds , )
if msg_data is None :
self . tcex . exit ( 0 , 'AOT subscription timeout reached.' )
msg_data = json . loads ( msg_data [ 1 ] )
msg_type = msg_data . get ( 'type' , 'terminate' )
if msg_type == 'execute' :
res = msg_data . get ( 'params' , { } )
elif msg_type == 'terminate' :
self . tcex . exit ( 0 , 'Received AOT terminate message.' )
else :
self . tcex . log . warn ( 'Unsupported AOT message type: ({}).' . format ( msg_type ) )
res = self . aot_blpop ( )
except Exception as e :
self . tcex . exit ( 1 , 'Exception during AOT subscription ({}).' . format ( e ) )
return res
|
def return_data ( self , data , format = None ) :
"""Format and return data appropriate to the requested API format .
data : The data retured by the api request"""
|
if format is None :
format = self . format
if format == "json" :
formatted_data = json . loads ( data )
else :
formatted_data = data
return formatted_data
|
def extract_datetime ( cls , datetime_str ) :
"""Tries to extract a ` datetime ` object from the given string , including
time information .
Raises ` DateTimeFormatterException ` if the extraction fails ."""
|
if not datetime_str :
raise DateTimeFormatterException ( 'datetime_str must a valid string' )
try :
return cls . _extract_timestamp ( datetime_str , cls . DATETIME_FORMAT )
except ( TypeError , ValueError ) :
raise DateTimeFormatterException ( 'Invalid datetime string {}.' . format ( datetime_str ) )
|
def send_group_text ( self , sender , receiver , content ) :
"""发送群聊文本消息
: param sender : 发送人
: param receiver : 会话 ID
: param content : 消息内容
: return : 返回的 JSON 数据包"""
|
return self . send_text ( sender , 'group' , receiver , content )
|
def _process_intersects_filter_directive ( filter_operation_info , location , context , parameters ) :
"""Return a Filter basic block that checks if the directive arg and the field intersect .
Args :
filter _ operation _ info : FilterOperationInfo object , containing the directive and field info
of the field where the filter is to be applied .
location : Location where this filter is used .
context : dict , various per - compilation data ( e . g . declared tags , whether the current block
is optional , etc . ) . May be mutated in - place in this function !
parameters : list of 1 element , specifying the collection in which the value must exist ;
if the collection is optional and missing , the check will return True
Returns :
a Filter basic block that performs the intersects check"""
|
filtered_field_type = filter_operation_info . field_type
filtered_field_name = filter_operation_info . field_name
argument_inferred_type = strip_non_null_from_type ( filtered_field_type )
if not isinstance ( argument_inferred_type , GraphQLList ) :
raise GraphQLCompilationError ( u'Cannot apply "intersects" to non-list ' u'type {}' . format ( filtered_field_type ) )
argument_expression , non_existence_expression = _represent_argument ( location , context , parameters [ 0 ] , argument_inferred_type )
filter_predicate = expressions . BinaryComposition ( u'intersects' , expressions . LocalField ( filtered_field_name ) , argument_expression )
if non_existence_expression is not None : # The argument comes from an optional block and might not exist ,
# in which case the filter expression should evaluate to True .
filter_predicate = expressions . BinaryComposition ( u'||' , non_existence_expression , filter_predicate )
return blocks . Filter ( filter_predicate )
|
def _set_description ( self , v , load = False ) :
"""Setter method for description , mapped from YANG variable / interface / ethernet / description ( string )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ description is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ description ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = RestrictedClassType ( base_type = unicode , restriction_dict = { 'length' : [ u'1 .. 63' ] } ) , is_leaf = True , yang_name = "description" , rest_name = "description" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Interface specific description' , u'cli-multi-value' : None , u'sort-priority' : u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG' } } , namespace = 'urn:brocade.com:mgmt:brocade-interface' , defining_module = 'brocade-interface' , yang_type = 'string' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """description must be of a type compatible with string""" , 'defined-type' : "string" , 'generated-type' : """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1 .. 63']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='string', is_config=True)""" , } )
self . __description = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def verifyExpanded ( self , samplerate ) :
"""Checks the expanded parameters for invalidating conditions
: param samplerate : generation samplerate ( Hz ) , passed on to component verification
: type samplerate : int
: returns : str - - error message , if any , 0 otherwise"""
|
results = self . expandFunction ( self . verifyComponents , args = ( samplerate , ) )
msg = [ x for x in results if x ]
if len ( msg ) > 0 :
return msg [ 0 ]
else :
return 0
|
def validate_type_registry ( option , value ) :
"""Validate the type _ registry option ."""
|
if value is not None and not isinstance ( value , TypeRegistry ) :
raise TypeError ( "%s must be an instance of %s" % ( option , TypeRegistry ) )
return value
|
def get_netconf_client_capabilities_output_session_host_ip ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_netconf_client_capabilities = ET . Element ( "get_netconf_client_capabilities" )
config = get_netconf_client_capabilities
output = ET . SubElement ( get_netconf_client_capabilities , "output" )
session = ET . SubElement ( output , "session" )
host_ip = ET . SubElement ( session , "host-ip" )
host_ip . text = kwargs . pop ( 'host_ip' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def _init_minimizer ( self , minimizer , ** minimizer_options ) :
"""Takes a : class : ` ~ symfit . core . minimizers . BaseMinimizer ` and instantiates
it , passing the jacobian and constraints as appropriate for the
minimizer .
: param minimizer : : class : ` ~ symfit . core . minimizers . BaseMinimizer ` to
instantiate .
: param * * minimizer _ options : Further options to be passed to the
minimizer on instantiation .
: returns : instance of : class : ` ~ symfit . core . minimizers . BaseMinimizer ` ."""
|
if isinstance ( minimizer , BaseMinimizer ) :
return minimizer
if issubclass ( minimizer , BasinHopping ) :
minimizer_options [ 'local_minimizer' ] = self . _init_minimizer ( self . _determine_minimizer ( ) )
if issubclass ( minimizer , GradientMinimizer ) : # If an analytical version of the Jacobian exists we should use
# that , otherwise we let the minimizer estimate it itself .
# Hence the check of jacobian _ model , as this is the
# py function version of the analytical jacobian .
if hasattr ( self . model , 'jacobian_model' ) and hasattr ( self . objective , 'eval_jacobian' ) :
minimizer_options [ 'jacobian' ] = self . objective . eval_jacobian
if issubclass ( minimizer , HessianMinimizer ) : # If an analytical version of the Hessian exists we should use
# that , otherwise we let the minimizer estimate it itself .
# Hence the check of hessian _ model , as this is the
# py function version of the analytical hessian .
if hasattr ( self . model , 'hessian_model' ) and hasattr ( self . objective , 'eval_hessian' ) :
minimizer_options [ 'hessian' ] = self . objective . eval_hessian
if issubclass ( minimizer , ConstrainedMinimizer ) : # set the constraints as MinimizeModel . The dependent vars of the
# constraint are set to None since their value is irrelevant .
constraint_objectives = [ ]
for constraint in self . constraints :
data = self . data
# No copy , share state
data . update ( { var : None for var in constraint . dependent_vars } )
constraint_objectives . append ( MinimizeModel ( constraint , data ) )
minimizer_options [ 'constraints' ] = constraint_objectives
return minimizer ( self . objective , self . model . params , ** minimizer_options )
|
def execute ( self ) :
"""migrate storage"""
|
repo = repository . PickleRepository ( self . params . storage_path )
clusters = [ i [ : - 7 ] for i in os . listdir ( self . params . storage_path ) if i . endswith ( '.pickle' ) ]
if self . params . cluster :
clusters = [ x for x in clusters if x in self . params . cluster ]
if not clusters :
print ( "No clusters" )
sys . exit ( 0 )
patch_cluster ( )
for cluster in clusters :
print ( "Cluster `%s`" % cluster )
print ( "path: %s" % repo . storage_path + '/%s.pickle' % cluster )
cl = repo . get ( cluster )
if cl . _patches :
print ( "Attributes changed: " )
for attr , val in cl . _patches . items ( ) :
print ( " %s: %s -> %s" % ( attr , val [ 0 ] , val [ 1 ] ) )
else :
print ( "No upgrade needed" )
print ( "" )
if not self . params . dry_run :
if cl . _patches :
del cl . _patches
print ( "Changes saved to disk" )
cl . repository . save_or_update ( cl )
|
def download ( self , output = "" , outputFile = "" , silent = True ) :
"""Downloads the image of the comic onto your computer .
Arguments :
output : the output directory where comics will be downloaded to . The
default argument for ' output is the empty string ; if the empty
string is passed , it defaults to a " Downloads " directory in your home folder
( this directory will be created if it does not exist ) .
outputFile : the filename that will be written . If the empty string
is passed , outputFile will default to a string of the form xkcd - ( comic number ) - ( image filename ) ,
so for example , xkcd - 1691 - optimization . png .
silent : boolean , defaults to True . If set to False , an error will be printed
to standard output should the provided integer argument not be valid .
Returns the path to the downloaded file , or an empty string in the event
of failure ."""
|
image = urllib . urlopen ( self . imageLink ) . read ( )
# Process optional input to work out where the dowload will go and what it ' ll be called
if output != "" :
output = os . path . abspath ( os . path . expanduser ( output ) )
if output == "" or not os . path . exists ( output ) :
output = os . path . expanduser ( os . path . join ( "~" , "Downloads" ) )
# Create ~ / Downloads if it doesn ' t exist , since this is the default path .
if not os . path . exists ( output ) :
os . mkdir ( output )
if outputFile == "" :
outputFile = "xkcd-" + str ( self . number ) + "-" + self . imageName
output = os . path . join ( output , outputFile )
try :
download = open ( output , 'wb' )
except :
if not silent :
print ( "Unable to make file " + output )
return ""
download . write ( image )
download . close ( )
return output
|
def init_app ( self , app ) :
"""Configures the specified Flask app to enforce SSL ."""
|
app . config . setdefault ( 'SSLIFY_AGE' , self . defaults [ 'age' ] )
app . config . setdefault ( 'SSLIFY_SUBDOMAINS' , self . defaults [ 'subdomains' ] )
app . config . setdefault ( 'SSLIFY_PERMANENT' , self . defaults [ 'permanent' ] )
app . config . setdefault ( 'SSLIFY_SKIPS' , self . defaults [ 'skips' ] )
app . before_request ( self . redirect_to_ssl )
app . after_request ( self . set_hsts_header )
|
def complex_data ( data , edata = None , draw = True , ** kwargs ) :
"""Plots the imaginary vs real for complex data .
Parameters
data
Array of complex data
edata = None
Array of complex error bars
draw = True
Draw the plot after it ' s assembled ?
See spinmob . plot . xy . data ( ) for additional optional keyword arguments ."""
|
_pylab . ioff ( )
# generate the data the easy way
try :
rdata = _n . real ( data )
idata = _n . imag ( data )
if edata is None :
erdata = None
eidata = None
else :
erdata = _n . real ( edata )
eidata = _n . imag ( edata )
# generate the data the hard way .
except :
rdata = [ ]
idata = [ ]
if edata is None :
erdata = None
eidata = None
else :
erdata = [ ]
eidata = [ ]
for n in range ( len ( data ) ) :
rdata . append ( _n . real ( data [ n ] ) )
idata . append ( _n . imag ( data [ n ] ) )
if not edata is None :
erdata . append ( _n . real ( edata [ n ] ) )
eidata . append ( _n . imag ( edata [ n ] ) )
if 'xlabel' not in kwargs :
kwargs [ 'xlabel' ] = 'Real'
if 'ylabel' not in kwargs :
kwargs [ 'ylabel' ] = 'Imaginary'
xy_data ( rdata , idata , eidata , erdata , draw = False , ** kwargs )
if draw :
_pylab . ion ( )
_pylab . draw ( )
_pylab . show ( )
|
def ledger ( self , start = None , end = None ) :
"""Returns a list of entries for this account .
Ledger returns a sequence of LedgerEntry ' s matching the criteria
in chronological order . The returned sequence can be boolean - tested
( ie . test that nothing was returned ) .
If ' start ' is given , only entries on or after that datetime are
returned . ' start ' must be given with a timezone .
If ' end ' is given , only entries before that datetime are
returned . ' end ' must be given with a timezone ."""
|
DEBIT_IN_DB = self . _DEBIT_IN_DB ( )
flip = 1
if self . _positive_credit ( ) :
flip *= - 1
qs = self . _entries_range ( start = start , end = end )
qs = qs . order_by ( "transaction__t_stamp" , "transaction__tid" )
balance = Decimal ( "0.00" )
if start :
balance = self . balance ( start )
if not qs :
return [ ]
# helper is a hack so the caller can test for no entries .
def helper ( balance_in ) :
balance = balance_in
for e in qs . all ( ) :
amount = e . amount * DEBIT_IN_DB
o_balance = balance
balance += flip * amount
yield LedgerEntry ( amount , e , o_balance , balance )
return helper ( balance )
|
def paginate_stream_index ( self , bucket , index , startkey , endkey = None , max_results = 1000 , return_terms = None , continuation = None , timeout = None , term_regex = None ) :
"""Iterates over a streaming paginated index query . This is equivalent to
calling : meth : ` stream _ index ` and then successively calling
: meth : ` ~ riak . client . index _ page . IndexPage . next _ page ` until all
results are exhausted .
Because limiting the result set is necessary to invoke
pagination , the ` ` max _ results ` ` option has a default of ` ` 1000 ` ` .
The caller should explicitly close each yielded page , either using
: func : ` contextlib . closing ` or calling ` ` close ( ) ` ` explicitly . Consuming
the entire page will also close the stream . If it does not , the
associated connection might not be returned to the pool . Example : :
from contextlib import closing
# Using contextlib . closing
for page in client . paginate _ stream _ index ( mybucket , ' name _ bin ' ,
' Smith ' ) :
with closing ( page ) :
for key in page :
do _ something ( key )
# Explicit close ( )
for page in client . paginate _ stream _ index ( mybucket , ' name _ bin ' ,
' Smith ' ) :
for key in page :
do _ something ( key )
page . close ( )
: param bucket : the bucket whose index will be queried
: type bucket : RiakBucket
: param index : the index to query
: type index : string
: param startkey : the sole key to query , or beginning of the query range
: type startkey : string , integer
: param endkey : the end of the query range ( optional if equality )
: type endkey : string , integer
: param return _ terms : whether to include the secondary index value
: type return _ terms : boolean
: param max _ results : the maximum number of results to return ( page
size ) , defaults to 1000
: type max _ results : integer
: param continuation : the opaque continuation returned from a
previous paginated request
: type continuation : string
: param timeout : a timeout value in milliseconds , or ' infinity '
: type timeout : int
: param term _ regex : a regular expression used to filter index terms
: type term _ regex : string
: rtype : generator over instances of
: class : ` ~ riak . client . index _ page . IndexPage `"""
|
# TODO FUTURE : implement " retry on connection closed "
# as in stream _ mapred
page = self . stream_index ( bucket , index , startkey , endkey = endkey , max_results = max_results , return_terms = return_terms , continuation = continuation , timeout = timeout , term_regex = term_regex )
yield page
while page . has_next_page ( ) :
page = page . next_page ( )
yield page
|
def render_text ( path , cp ) :
"""Render a file as text ."""
|
# define filename and slug from path
filename = os . path . basename ( path )
slug = filename . replace ( '.' , '_' )
# initializations
content = None
# read file as a string
with codecs . open ( path , 'r' , encoding = 'utf-8' , errors = 'replace' ) as fp :
content = fp . read ( )
# replace all the escaped characters
content = unescape ( content , unescape_table )
# render template
template_dir = pycbc . results . __path__ [ 0 ] + '/templates/files'
env = Environment ( loader = FileSystemLoader ( template_dir ) )
env . globals . update ( abs = abs )
env . globals . update ( path_exists = os . path . exists )
template = env . get_template ( 'file_pre.html' )
context = { 'filename' : filename , 'slug' : slug , 'cp' : cp , 'content' : content }
output = template . render ( context )
return output
|
def _rebuild_groups ( self ) :
"""Recreates the groups master list based on the groups hierarchy ( order matters here ,
since the parser uses order to determine lineage ) ."""
|
self . groups = [ ]
def collapse_group ( group ) :
for subgroup in group . children :
self . groups . append ( subgroup )
collapse_group ( subgroup )
collapse_group ( self . root )
|
def set_cpuid_leaf ( self , idx , idx_sub , val_eax , val_ebx , val_ecx , val_edx ) :
"""Sets the virtual CPU cpuid information for the specified leaf . Note that these values
are not passed unmodified . VirtualBox clears features that it doesn ' t support .
Currently supported index values for cpuid :
Standard CPUID leaves : 0 - 0x1f
Extended CPUID leaves : 0x800000 - 0x8000001f
VIA CPUID leaves : 0xc00000 - 0xc00000f
The subleaf index is only applicable to certain leaves ( see manuals as this is
subject to change ) .
See the Intel , AMD and VIA programmer ' s manuals for detailed information
about the cpuid instruction and its leaves .
Do not use this method unless you know exactly what you ' re doing . Misuse can lead to
random crashes inside VMs .
in idx of type int
CPUID leaf index .
in idx _ sub of type int
CPUID leaf sub - index ( ECX ) . Set to 0xfffff ( or 0 ) if not applicable .
The 0xfffff causes it to remove all other subleaves before adding one
with sub - index 0.
in val _ eax of type int
CPUID leaf value for register eax .
in val _ ebx of type int
CPUID leaf value for register ebx .
in val _ ecx of type int
CPUID leaf value for register ecx .
in val _ edx of type int
CPUID leaf value for register edx .
raises : class : ` OleErrorInvalidarg `
Invalid index ."""
|
if not isinstance ( idx , baseinteger ) :
raise TypeError ( "idx can only be an instance of type baseinteger" )
if not isinstance ( idx_sub , baseinteger ) :
raise TypeError ( "idx_sub can only be an instance of type baseinteger" )
if not isinstance ( val_eax , baseinteger ) :
raise TypeError ( "val_eax can only be an instance of type baseinteger" )
if not isinstance ( val_ebx , baseinteger ) :
raise TypeError ( "val_ebx can only be an instance of type baseinteger" )
if not isinstance ( val_ecx , baseinteger ) :
raise TypeError ( "val_ecx can only be an instance of type baseinteger" )
if not isinstance ( val_edx , baseinteger ) :
raise TypeError ( "val_edx can only be an instance of type baseinteger" )
self . _call ( "setCPUIDLeaf" , in_p = [ idx , idx_sub , val_eax , val_ebx , val_ecx , val_edx ] )
|
def disable_wrapper ( cls , disable , new_class ) :
"""Wrap the disable method to call pre and post disable signals and update
module status"""
|
def _wrapped ( self , * args , ** kwargs ) :
if not self . enabled :
raise AssertionError ( 'Module %s is already disabled' % self . verbose_name )
logger . info ( "Disabling %s module" % self . verbose_name )
pre_disable . send ( sender = self )
res = disable ( self , * args , ** kwargs )
# Unregister interfaces ( if present )
if isinstance ( self , DropletInterface ) :
self . unregister ( )
post_disable . send ( sender = self )
info = self . _info
info . status = ModuleInfo . DISABLED
info . save ( )
logger . info ( "Disabled %s module" % self . verbose_name )
return res
return _wrapped
|
def sweep ( crypto , private_key , to_address , fee = None , password = None , ** modes ) :
"""Move all funds by private key to another address ."""
|
from moneywagon . tx import Transaction
tx = Transaction ( crypto , verbose = modes . get ( 'verbose' , False ) )
tx . add_inputs ( private_key = private_key , password = password , ** modes )
tx . change_address = to_address
tx . fee ( fee )
return tx . push ( )
|
def run_fba ( model , rxn_id , direction = "max" , single_value = True ) :
"""Return the solution of an FBA to a set objective function .
Parameters
model : cobra . Model
The metabolic model under investigation .
rxn _ id : string
A string containing the reaction ID of the desired FBA objective .
direction : string
A string containing either " max " or " min " to specify the direction
of the desired FBA objective function .
single _ value : boolean
Indicates whether the results for all reactions are gathered from the
solver , or only the result for the objective value .
Returns
cobra . solution
The cobra solution object for the corresponding FBA problem ."""
|
model . objective = model . reactions . get_by_id ( rxn_id )
model . objective_direction = direction
if single_value :
try :
return model . slim_optimize ( )
except Infeasible :
return np . nan
else :
try :
solution = model . optimize ( )
return solution
except Infeasible :
return np . nan
|
def create_namespaced_persistent_volume_claim ( self , namespace , body , ** kwargs ) : # noqa : E501
"""create _ namespaced _ persistent _ volume _ claim # noqa : E501
create a PersistentVolumeClaim # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . create _ namespaced _ persistent _ volume _ claim ( namespace , body , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param V1PersistentVolumeClaim body : ( required )
: param bool include _ uninitialized : If true , partially initialized resources are included in the response .
: param str pretty : If ' true ' , then the output is pretty printed .
: param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed
: return : V1PersistentVolumeClaim
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . create_namespaced_persistent_volume_claim_with_http_info ( namespace , body , ** kwargs )
# noqa : E501
else :
( data ) = self . create_namespaced_persistent_volume_claim_with_http_info ( namespace , body , ** kwargs )
# noqa : E501
return data
|
def find_agent ( self , desc ) :
'''Gives medium class of the agent if the agency hosts it .'''
|
agent_id = ( desc . doc_id if IDocument . providedBy ( desc ) else desc )
self . log ( "I'm trying to find the agent with id: %s" , agent_id )
result = first ( x for x in self . _agents if x . _descriptor . doc_id == agent_id )
return defer . succeed ( result )
|
def prepare_delete_monarchy ( self , node , position = None , save = True ) :
"""Prepares a given : class : ` CTENode ` ` node ` for deletion , by executing
the : const : ` DELETE _ METHOD _ MONARCHY ` semantics . Descendant nodes ,
if present , will be moved ; in this case the optional ` position ` can
be a ` ` callable ` ` which is invoked prior to each move operation ( see
: meth : ` move ` for details ) .
By default , after each move operation , sub - tree nodes which were
moved will be saved through a call to : meth : ` Model . save ` unless
` save ` is ` ` False ` ` .
This method delegates move operations to : meth : ` move ` .
: param node : the : class : ` CTENode ` to prepare for deletion .
: param position : optionally , a ` ` callable ` ` to invoke prior to each
move operation .
: param save : flag indicating whether to save after each move
operation , ` ` True ` ` by default ."""
|
# We are going to iterate all children , even though the first child is
# treated in a special way , because the query iterator may be custom , so
# we will avoid using slicing children [ 0 ] and children [ 1 : ] .
first = None
for child in node . children . all ( ) :
if first is None :
first = child
first . move ( node . parent , position , save )
else :
child . move ( first , position , save )
|
def sample ( cls , dataset , samples = [ ] ) :
"""Samples the gridded data into dataset of samples ."""
|
ndims = dataset . ndims
dimensions = dataset . dimensions ( label = 'name' )
arrays = [ dataset . data [ vdim . name ] for vdim in dataset . vdims ]
data = defaultdict ( list )
for sample in samples :
if np . isscalar ( sample ) :
sample = [ sample ]
if len ( sample ) != ndims :
sample = [ sample [ i ] if i < len ( sample ) else None for i in range ( ndims ) ]
sampled , int_inds = [ ] , [ ]
for d , ind in zip ( dimensions , sample ) :
cdata = dataset . data [ d ]
mask = cls . key_select_mask ( dataset , cdata , ind )
inds = np . arange ( len ( cdata ) ) if mask is None else np . argwhere ( mask )
int_inds . append ( inds )
sampled . append ( cdata [ mask ] )
for d , arr in zip ( dimensions , np . meshgrid ( * sampled ) ) :
data [ d ] . append ( arr )
for vdim , array in zip ( dataset . vdims , arrays ) :
da = dask_array_module ( )
flat_index = np . ravel_multi_index ( tuple ( int_inds ) [ : : - 1 ] , array . shape )
if da and isinstance ( array , da . Array ) :
data [ vdim . name ] . append ( array . flatten ( ) . vindex [ tuple ( flat_index ) ] )
else :
data [ vdim . name ] . append ( array . flat [ flat_index ] )
concatenated = { d : np . concatenate ( arrays ) . flatten ( ) for d , arrays in data . items ( ) }
return concatenated
|
def _generate_soma ( self ) :
'''soma'''
|
radius = self . _obj . soma . radius
return _square_segment ( radius , ( 0. , - radius ) )
|
def _update ( dashboard , profile ) :
'''Update a specific dashboard .'''
|
payload = { 'dashboard' : dashboard , 'overwrite' : True }
request_url = "{0}/api/dashboards/db" . format ( profile . get ( 'grafana_url' ) )
response = requests . post ( request_url , headers = { "Authorization" : "Bearer {0}" . format ( profile . get ( 'grafana_token' ) ) } , json = payload )
return response . json ( )
|
def ensure_index ( self , key_or_list , cache_for = 300 , ** kwargs ) :
"""* * DEPRECATED * * - Ensures that an index exists on this collection .
. . versionchanged : : 3.0
* * DEPRECATED * *"""
|
warnings . warn ( "ensure_index is deprecated. Use create_index instead." , DeprecationWarning , stacklevel = 2 )
# The types supported by datetime . timedelta .
if not ( isinstance ( cache_for , integer_types ) or isinstance ( cache_for , float ) ) :
raise TypeError ( "cache_for must be an integer or float." )
if "drop_dups" in kwargs :
kwargs [ "dropDups" ] = kwargs . pop ( "drop_dups" )
if "bucket_size" in kwargs :
kwargs [ "bucketSize" ] = kwargs . pop ( "bucket_size" )
keys = helpers . _index_list ( key_or_list )
name = kwargs . setdefault ( "name" , helpers . _gen_index_name ( keys ) )
# Note that there is a race condition here . One thread could
# check if the index is cached and be preempted before creating
# and caching the index . This means multiple threads attempting
# to create the same index concurrently could send the index
# to the server two or more times . This has no practical impact
# other than wasted round trips .
if not self . __database . client . _cached ( self . __database . name , self . __name , name ) :
self . __create_index ( keys , kwargs )
self . __database . client . _cache_index ( self . __database . name , self . __name , name , cache_for )
return name
return None
|
def replace_strings_in_list ( array_of_strigs , replace_with_strings ) :
"A value in replace _ with _ strings can be either single string or list of strings"
|
potentially_nested_list = [ replace_with_strings . get ( s ) or s for s in array_of_strigs ]
return list ( flatten ( potentially_nested_list ) )
|
def plot ( histogram : HistogramBase , kind : Optional [ str ] = None , backend : Optional [ str ] = None , ** kwargs ) :
"""Universal plotting function .
All keyword arguments are passed to the plotting methods .
Parameters
kind : Type of the plot ( like " scatter " , " line " , . . . ) , similar to pandas"""
|
backend_name , backend = _get_backend ( backend )
if kind is None :
kinds = [ t for t in backend . types if histogram . ndim in backend . dims [ t ] ]
if not kinds :
raise RuntimeError ( "No plot type is supported for {0}" . format ( histogram . __class__ . __name__ ) )
kind = kinds [ 0 ]
if kind in backend . types :
method = getattr ( backend , kind )
return method ( histogram , ** kwargs )
else :
raise RuntimeError ( "Histogram type error: {0} missing in backend {1}" . format ( kind , backend_name ) )
|
def set_volume ( self , percent , update_group = True ) :
"""Set client volume percent ."""
|
if percent not in range ( 0 , 101 ) :
raise ValueError ( 'Volume percent out of range' )
new_volume = self . _client [ 'config' ] [ 'volume' ]
new_volume [ 'percent' ] = percent
self . _client [ 'config' ] [ 'volume' ] [ 'percent' ] = percent
yield from self . _server . client_volume ( self . identifier , new_volume )
if update_group :
self . _server . group ( self . group . identifier ) . callback ( )
_LOGGER . info ( 'set volume to %s on %s' , percent , self . friendly_name )
|
def __send_smtp_email ( self , recipients , subject , html_body , text_body ) :
"""Send an email using SMTP
Args :
recipients ( ` list ` of ` str ` ) : List of recipient email addresses
subject ( str ) : Subject of the email
html _ body ( str ) : HTML body of the email
text _ body ( str ) : Text body of the email
Returns :
` None `"""
|
smtp = smtplib . SMTP ( dbconfig . get ( 'smtp_server' , NS_EMAIL , 'localhost' ) , dbconfig . get ( 'smtp_port' , NS_EMAIL , 25 ) )
source_arn = dbconfig . get ( 'source_arn' , NS_EMAIL )
return_arn = dbconfig . get ( 'return_path_arn' , NS_EMAIL )
from_arn = dbconfig . get ( 'from_arn' , NS_EMAIL )
msg = MIMEMultipart ( 'alternative' )
# Set SES options if needed
if source_arn and from_arn and return_arn :
msg [ 'X-SES-SOURCE-ARN' ] = source_arn
msg [ 'X-SES-FROM-ARN' ] = from_arn
msg [ 'X-SES-RETURN-PATH-ARN' ] = return_arn
msg [ 'Subject' ] = subject
msg [ 'To' ] = ',' . join ( recipients )
msg [ 'From' ] = self . sender
# Check body types to avoid exceptions
if html_body :
html_part = MIMEText ( html_body , 'html' )
msg . attach ( html_part )
if text_body :
text_part = MIMEText ( text_body , 'plain' )
msg . attach ( text_part )
# TLS if needed
if dbconfig . get ( 'smtp_tls' , NS_EMAIL , False ) :
smtp . starttls ( )
# Login if needed
username = dbconfig . get ( 'smtp_username' , NS_EMAIL )
password = dbconfig . get ( 'smtp_password' , NS_EMAIL )
if username and password :
smtp . login ( username , password )
smtp . sendmail ( self . sender , recipients , msg . as_string ( ) )
smtp . quit ( )
|
def create_followup ( self , post , content , anonymous = False ) :
"""Create a follow - up on a post ` post ` .
It seems like if the post has ` < p > ` tags , then it ' s treated as HTML ,
but is treated as text otherwise . You ' ll want to provide ` content `
accordingly .
: type post : dict | str | int
: param post : Either the post dict returned by another API method , or
the ` cid ` field of that post .
: type content : str
: param content : The content of the followup .
: type anonymous : bool
: param anonymous : Whether or not to post anonymously .
: rtype : dict
: returns : Dictionary with information about the created follow - up ."""
|
try :
cid = post [ "id" ]
except KeyError :
cid = post
params = { "cid" : cid , "type" : "followup" , # For followups , the content is actually put into the subject .
"subject" : content , "content" : "" , "anonymous" : "yes" if anonymous else "no" , }
return self . _rpc . content_create ( params )
|
def unget_service ( self , reference ) : # type : ( ServiceReference ) - > bool
"""Disables a reference to the service
: return : True if the bundle was using this reference , else False"""
|
# Lose the dependency
return self . __framework . _registry . unget_service ( self . __bundle , reference )
|
def get_raw ( tree ) :
"""Get the exact words in lowercase in the tree object .
Args :
tree ( Tree ) : Parsed tree structure
Returns :
Resulting string of tree ` ` ( Ex : " The red car " ) ` `"""
|
if isinstance ( tree , Tree ) :
words = [ ]
for child in tree :
words . append ( get_raw ( child ) )
return ' ' . join ( words )
else :
return tree
|
def _cleanRecursive ( self , subSelf ) :
"""Delete all NestedOrderedDict that haven ' t any entries ."""
|
for key , item in list ( subSelf . items ( ) ) :
if self . isNestedDict ( item ) :
if not item :
subSelf . pop ( key )
else :
self . _cleanRecursive ( item )
|
def connectRelay ( self ) :
"""Builds the target protocol and connects it to the relay transport ."""
|
self . protocol = self . connector . buildProtocol ( None )
self . connected = True
self . protocol . makeConnection ( self )
|
def app_create ( self , data , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / core / apps # create - app"
|
api_path = "/api/v2/apps.json"
return self . call ( api_path , method = "POST" , data = data , ** kwargs )
|
def GetFileSystemTypeIndicators ( cls , path_spec , resolver_context = None ) :
"""Determines if a file contains a supported file system types .
Args :
path _ spec ( PathSpec ) : path specification .
resolver _ context ( Optional [ Context ] ) : resolver context , where None
represents the built - in context which is not multi process safe .
Returns :
list [ str ] : supported format type indicators ."""
|
if ( cls . _file_system_remainder_list is None or cls . _file_system_store is None ) :
specification_store , remainder_list = cls . _GetSpecificationStore ( definitions . FORMAT_CATEGORY_FILE_SYSTEM )
cls . _file_system_remainder_list = remainder_list
cls . _file_system_store = specification_store
if cls . _file_system_scanner is None :
cls . _file_system_scanner = cls . _GetSignatureScanner ( cls . _file_system_store )
return cls . _GetTypeIndicators ( cls . _file_system_scanner , cls . _file_system_store , cls . _file_system_remainder_list , path_spec , resolver_context = resolver_context )
|
def logn2 ( n , p ) :
"""Best p - bit lower and upper bounds for log ( 2 ) / log ( n ) , as Fractions ."""
|
with precision ( p ) :
extra = 10
while True :
with precision ( p + extra ) : # use extra precision for intermediate step
log2upper = log2 ( n , RoundTowardPositive )
log2lower = log2 ( n , RoundTowardNegative )
lower = div ( 1 , log2upper , RoundTowardNegative )
upper = div ( 1 , log2lower , RoundTowardPositive )
# if lower and upper are adjacent ( or equal ) we ' re done
if next_up ( lower ) == upper :
return ( Fraction ( * lower . as_integer_ratio ( ) ) , Fraction ( * upper . as_integer_ratio ( ) ) )
# otherwise , increase the precision and try again
extra += 10
|
def make_symlink ( source , link_path ) :
"""Create a symlink at ` link _ path ` referring to ` source ` ."""
|
if not supports_symlinks ( ) :
dbt . exceptions . system_error ( 'create a symbolic link' )
return os . symlink ( source , link_path )
|
def load_sequences_to_strains ( self , joblib = False , cores = 1 , force_rerun = False ) :
"""Wrapper function for _ load _ sequences _ to _ strain"""
|
log . info ( 'Loading sequences to strain GEM-PROs...' )
if joblib :
result = DictList ( Parallel ( n_jobs = cores ) ( delayed ( self . _load_sequences_to_strain ) ( s , force_rerun ) for s in self . strain_ids ) )
else :
result = [ ]
for s in tqdm ( self . strain_ids ) :
result . append ( self . _load_sequences_to_strain ( s , force_rerun ) )
for strain_id , gp_seqs_path in result :
self . strain_infodict [ strain_id ] [ 'gp_seqs_path' ] = gp_seqs_path
|
def __execute_ret ( command , host = None , admin_username = None , admin_password = None , module = None ) :
'''Execute rac commands'''
|
if module :
if module == 'ALL' :
modswitch = '-a '
else :
modswitch = '-m {0}' . format ( module )
else :
modswitch = ''
if not host : # This is a local call
cmd = __salt__ [ 'cmd.run_all' ] ( 'racadm {0} {1}' . format ( command , modswitch ) )
else :
cmd = __salt__ [ 'cmd.run_all' ] ( 'racadm -r {0} -u {1} -p {2} {3} {4}' . format ( host , admin_username , admin_password , command , modswitch ) , output_loglevel = 'quiet' )
if cmd [ 'retcode' ] != 0 :
log . warning ( 'racadm returned an exit code of %s' , cmd [ 'retcode' ] )
else :
fmtlines = [ ]
for l in cmd [ 'stdout' ] . splitlines ( ) :
if l . startswith ( 'Security Alert' ) :
continue
if l . startswith ( 'RAC1168:' ) :
break
if l . startswith ( 'RAC1169:' ) :
break
if l . startswith ( 'Continuing execution' ) :
continue
if not l . strip ( ) :
continue
fmtlines . append ( l )
if '=' in l :
continue
cmd [ 'stdout' ] = '\n' . join ( fmtlines )
return cmd
|
def floor_func ( self , addr ) :
"""Return the function who has the greatest address that is less than or equal to ` addr ` .
: param int addr : The address to query .
: return : A Function instance , or None if there is no other function before ` addr ` .
: rtype : Function or None"""
|
try :
prev_addr = self . _function_map . floor_addr ( addr )
return self . _function_map [ prev_addr ]
except KeyError :
return None
|
def _copy_module ( self , conn , tmp , module_name , module_args , inject ) :
'''transfer a module over SFTP , does not run it'''
|
if module_name . startswith ( "/" ) :
raise errors . AnsibleFileNotFound ( "%s is not a module" % module_name )
# Search module path ( s ) for named module .
in_path = utils . plugins . module_finder . find_plugin ( module_name )
if in_path is None :
raise errors . AnsibleFileNotFound ( "module %s not found in %s" % ( module_name , utils . plugins . module_finder . print_paths ( ) ) )
out_path = os . path . join ( tmp , module_name )
module_data = ""
is_new_style = False
with open ( in_path ) as f :
module_data = f . read ( )
if module_common . REPLACER in module_data :
is_new_style = True
module_data = module_data . replace ( module_common . REPLACER , module_common . MODULE_COMMON )
encoded_args = "\"\"\"%s\"\"\"" % module_args . replace ( "\"" , "\\\"" )
module_data = module_data . replace ( module_common . REPLACER_ARGS , encoded_args )
encoded_lang = "\"\"\"%s\"\"\"" % C . DEFAULT_MODULE_LANG
module_data = module_data . replace ( module_common . REPLACER_LANG , encoded_lang )
if is_new_style :
facility = C . DEFAULT_SYSLOG_FACILITY
if 'ansible_syslog_facility' in inject :
facility = inject [ 'ansible_syslog_facility' ]
module_data = module_data . replace ( 'syslog.LOG_USER' , "syslog.%s" % facility )
lines = module_data . split ( "\n" )
shebang = None
if lines [ 0 ] . startswith ( "#!" ) :
shebang = lines [ 0 ]
args = shlex . split ( str ( shebang [ 2 : ] ) )
interpreter = args [ 0 ]
interpreter_config = 'ansible_%s_interpreter' % os . path . basename ( interpreter )
if interpreter_config in inject :
lines [ 0 ] = shebang = "#!%s %s" % ( inject [ interpreter_config ] , " " . join ( args [ 1 : ] ) )
module_data = "\n" . join ( lines )
self . _transfer_str ( conn , tmp , module_name , module_data )
return ( out_path , is_new_style , shebang )
|
def decode_preflist ( self , item ) :
"""Decodes a preflist response
: param preflist : a bucket / key preflist
: type preflist : list of
riak . pb . riak _ kv _ pb2 . RpbBucketKeyPreflistItem
: rtype dict"""
|
result = { 'partition' : item . partition , 'node' : bytes_to_str ( item . node ) , 'primary' : item . primary }
return result
|
def user_describe ( object_id , input_params = { } , always_retry = True , ** kwargs ) :
"""Invokes the / user - xxxx / describe API method .
For more info , see : https : / / wiki . dnanexus . com / API - Specification - v1.0.0 / Users # API - method % 3A - % 2Fuser - xxxx % 2Fdescribe"""
|
return DXHTTPRequest ( '/%s/describe' % object_id , input_params , always_retry = always_retry , ** kwargs )
|
def fix_bad_headers ( self ) :
"""Fix up bad headers that cause problems for the wrapped WCS
module .
Subclass can override this method to fix up issues with the
header for problem FITS files ."""
|
# WCSLIB doesn ' t like " nonstandard " units
unit = self . header . get ( 'CUNIT1' , 'deg' )
if unit . upper ( ) == 'DEGREE' : # self . header . update ( ' CUNIT1 ' , ' deg ' )
self . header [ 'CUNIT1' ] = 'deg'
unit = self . header . get ( 'CUNIT2' , 'deg' )
if unit . upper ( ) == 'DEGREE' : # self . header . update ( ' CUNIT2 ' , ' deg ' )
self . header [ 'CUNIT2' ] = 'deg'
|
def pop_all ( self ) :
"""NON - BLOCKING POP ALL IN QUEUE , IF ANY"""
|
with self . lock :
output = list ( self . queue )
self . queue . clear ( )
return output
|
def blksize ( path ) :
"""Get optimal file system buffer size ( in bytes ) for I / O calls ."""
|
diskfreespace = win32file . GetDiskFreeSpace
dirname = os . path . dirname ( fullpath ( path ) )
try :
cluster_sectors , sector_size = diskfreespace ( dirname ) [ : 2 ]
size = cluster_sectors * sector_size
except win32file . error as e :
if e . winerror != winerror . ERROR_NOT_READY :
raise
sleep ( 3 )
size = blksize ( dirname )
return size
|
def map_uniprot_resnum_to_pdb ( uniprot_resnum , chain_id , sifts_file ) :
"""Map a UniProt residue number to its corresponding PDB residue number .
This function requires that the SIFTS file be downloaded ,
and also a chain ID ( as different chains may have different mappings ) .
Args :
uniprot _ resnum ( int ) : integer of the residue number you ' d like to map
chain _ id ( str ) : string of the PDB chain to map to
sifts _ file ( str ) : Path to the SIFTS XML file
Returns :
( tuple ) : tuple containing :
mapped _ resnum ( int ) : Mapped residue number
is _ observed ( bool ) : Indicates if the 3D structure actually shows the residue"""
|
# Load the xml with lxml
parser = etree . XMLParser ( ns_clean = True )
tree = etree . parse ( sifts_file , parser )
root = tree . getroot ( )
my_pdb_resnum = None
# TODO : " Engineered _ Mutation is also a possible annotation , need to figure out what to do with that
my_pdb_annotation = False
# Find the right chain ( entities in the xml doc )
ent = './/{http://www.ebi.ac.uk/pdbe/docs/sifts/eFamily.xsd}entity'
for chain in root . findall ( ent ) : # TODO : IMPORTANT - entityId is not the chain ID ! ! ! it is just in alphabetical order !
if chain . attrib [ 'entityId' ] == chain_id : # Find the " crossRefDb " tag that has the attributes dbSource = " UniProt " and dbResNum = " your _ resnum _ here "
# Then match it to the crossRefDb dbResNum that has the attribute dbSource = " PDBresnum "
# Check if uniprot + resnum even exists in the sifts file ( it won ' t if the pdb doesn ' t contain the residue )
ures = './/{http://www.ebi.ac.uk/pdbe/docs/sifts/eFamily.xsd}crossRefDb[@dbSource="UniProt"][@dbResNum="%s"]' % uniprot_resnum
my_uniprot_residue = chain . findall ( ures )
if len ( my_uniprot_residue ) == 1 : # Get crossRefDb dbSource = " PDB "
parent = my_uniprot_residue [ 0 ] . getparent ( )
pres = './/{http://www.ebi.ac.uk/pdbe/docs/sifts/eFamily.xsd}crossRefDb[@dbSource="PDB"]'
my_pdb_residue = parent . findall ( pres )
my_pdb_resnum = int ( my_pdb_residue [ 0 ] . attrib [ 'dbResNum' ] )
# Get < residueDetail dbSource = " PDBe " property = " Annotation " >
# Will be Not _ Observed if it is not seen in the PDB
anno = './/{http://www.ebi.ac.uk/pdbe/docs/sifts/eFamily.xsd}residueDetail[@dbSource="PDBe"][@property="Annotation"]'
my_pdb_annotation = parent . findall ( anno )
if len ( my_pdb_annotation ) == 1 :
my_pdb_annotation = my_pdb_annotation [ 0 ] . text
if my_pdb_annotation == 'Not_Observed' :
my_pdb_annotation = False
else :
my_pdb_annotation = True
else :
return None , False
return my_pdb_resnum , my_pdb_annotation
|
def download_from_files ( files , output_path , width ) :
"""Download files from a given file list ."""
|
files_to_download = get_files_from_arguments ( files , width )
download_files_if_not_in_manifest ( files_to_download , output_path )
|
def create_authorization ( self , scopes = github . GithubObject . NotSet , note = github . GithubObject . NotSet , note_url = github . GithubObject . NotSet , client_id = github . GithubObject . NotSet , client_secret = github . GithubObject . NotSet , onetime_password = None ) :
""": calls : ` POST / authorizations < http : / / developer . github . com / v3 / oauth > ` _
: param scopes : list of string
: param note : string
: param note _ url : string
: param client _ id : string
: param client _ secret : string
: param onetime _ password : string
: rtype : : class : ` github . Authorization . Authorization `"""
|
assert scopes is github . GithubObject . NotSet or all ( isinstance ( element , ( str , unicode ) ) for element in scopes ) , scopes
assert note is github . GithubObject . NotSet or isinstance ( note , ( str , unicode ) ) , note
assert note_url is github . GithubObject . NotSet or isinstance ( note_url , ( str , unicode ) ) , note_url
assert client_id is github . GithubObject . NotSet or isinstance ( client_id , ( str , unicode ) ) , client_id
assert client_secret is github . GithubObject . NotSet or isinstance ( client_secret , ( str , unicode ) ) , client_secret
assert onetime_password is None or isinstance ( onetime_password , ( str , unicode ) ) , onetime_password
post_parameters = dict ( )
if scopes is not github . GithubObject . NotSet :
post_parameters [ "scopes" ] = scopes
if note is not github . GithubObject . NotSet :
post_parameters [ "note" ] = note
if note_url is not github . GithubObject . NotSet :
post_parameters [ "note_url" ] = note_url
if client_id is not github . GithubObject . NotSet :
post_parameters [ "client_id" ] = client_id
if client_secret is not github . GithubObject . NotSet :
post_parameters [ "client_secret" ] = client_secret
if onetime_password is not None :
request_header = { Consts . headerOTP : onetime_password }
# pragma no cover ( Should be covered )
else :
request_header = None
headers , data = self . _requester . requestJsonAndCheck ( "POST" , "/authorizations" , input = post_parameters , headers = request_header , )
return github . Authorization . Authorization ( self . _requester , headers , data , completed = True )
|
def validate_unwrap ( self , value ) :
'''Check that ` ` value ` ` is valid for unwrapping with ` ` ComputedField . computed _ type ` `'''
|
try :
self . computed_type . validate_unwrap ( value )
except BadValueException as bve :
self . _fail_validation ( value , 'Bad value for computed field' , cause = bve )
|
def string ( self , units : typing . Optional [ str ] = None ) -> str :
"""Return a string representation of the pressure , using the given units ."""
|
if not units :
_units : str = self . _units
else :
if not units . upper ( ) in CustomPressure . legal_units :
raise UnitsError ( "unrecognized pressure unit: '" + units + "'" )
_units = units . upper ( )
val = self . value ( units )
if _units == "MB" :
return "%.0f mb" % val
if _units == "HPA" :
return "%.0f hPa" % val
if _units == "IN" :
return "%.2f inches" % val
if _units == "MM" :
return "%.0f mmHg" % val
raise ValueError ( _units )
|
def from_http_response ( response ) :
"""Create a : class : ` GoogleAPICallError ` from a : class : ` requests . Response ` .
Args :
response ( requests . Response ) : The HTTP response .
Returns :
GoogleAPICallError : An instance of the appropriate subclass of
: class : ` GoogleAPICallError ` , with the message and errors populated
from the response ."""
|
try :
payload = response . json ( )
except ValueError :
payload = { "error" : { "message" : response . text or "unknown error" } }
error_message = payload . get ( "error" , { } ) . get ( "message" , "unknown error" )
errors = payload . get ( "error" , { } ) . get ( "errors" , ( ) )
message = "{method} {url}: {error}" . format ( method = response . request . method , url = response . request . url , error = error_message )
exception = from_http_status ( response . status_code , message , errors = errors , response = response )
return exception
|
def find_packages ( path ) :
"""Find all java files matching the " * Package . java " pattern within
the given enaml package directory relative to the java source path ."""
|
matches = [ ]
root = join ( path , 'src' , 'main' , 'java' )
for folder , dirnames , filenames in os . walk ( root ) :
for filename in fnmatch . filter ( filenames , '*Package.java' ) : # : Open and make sure it ' s an EnamlPackage somewhere
with open ( join ( folder , filename ) ) as f :
if "implements EnamlPackage" in f . read ( ) :
package = os . path . relpath ( folder , root )
matches . append ( os . path . join ( package , filename ) )
return matches
|
def recover ( self , key , value ) :
"""Get the deserialized value for a given key , and the serialized version ."""
|
if key not in self . _dtypes :
self . read_types ( )
if key not in self . _dtypes :
raise ValueError ( "Unknown datatype for {} and {}" . format ( key , value ) )
return self . _dtypes [ key ] [ 2 ] ( value )
|
def read_config ( cls , configparser ) :
"""Read configuration file options ."""
|
config = dict ( )
config [ cls . _filename_re_key ] = configparser . get ( cls . __name__ , cls . _filename_re_key ) if configparser . has_option ( cls . __name__ , cls . _filename_re_key ) else None
return config
|
def write_file ( self , * args , ** kwargs ) :
"""Write a file into this directory
This method takes the same arguments as : meth : ` . FileDataAPI . write _ file `
with the exception of the ` ` path ` ` argument which is not needed here ."""
|
return self . _fdapi . write_file ( self . get_path ( ) , * args , ** kwargs )
|
def _windows_get_window_size ( ) :
"""Return ( width , height ) of available window area on Windows .
(0 , 0 ) if no console is allocated ."""
|
sbi = CONSOLE_SCREEN_BUFFER_INFO ( )
ret = windll . kernel32 . GetConsoleScreenBufferInfo ( console_handle , byref ( sbi ) )
if ret == 0 :
return ( 0 , 0 )
return ( sbi . srWindow . Right - sbi . srWindow . Left + 1 , sbi . srWindow . Bottom - sbi . srWindow . Top + 1 )
|
def brancher ( # noqa : E302
self , branches = None , all_branches = False , tags = None , all_tags = False ) :
"""Generator that iterates over specified revisions .
Args :
branches ( list ) : a list of branches to iterate over .
all _ branches ( bool ) : iterate over all available branches .
tags ( list ) : a list of tags to iterate over .
all _ tags ( bool ) : iterate over all available tags .
Yields :
str : the display name for the currently selected tree , it could be :
- a git revision identifier
- empty string it there is no branches to iterate over
- " Working Tree " if there are uncommited changes in the SCM repo"""
|
if not any ( [ branches , all_branches , tags , all_tags ] ) :
yield ""
return
saved_tree = self . tree
revs = [ ]
scm = self . scm
if self . scm . is_dirty ( ) :
from dvc . scm . tree import WorkingTree
self . tree = WorkingTree ( self . root_dir )
yield "Working Tree"
if all_branches :
branches = scm . list_branches ( )
if all_tags :
tags = scm . list_tags ( )
if branches is None :
revs . extend ( [ scm . active_branch ( ) ] )
else :
revs . extend ( branches )
if tags is not None :
revs . extend ( tags )
# NOTE : it might be a good idea to wrap this loop in try / finally block
# to don ' t leave the tree on some unexpected branch after the
# ` brancher ( ) ` , but this could cause problems on exception handling
# code which might expect the tree on which exception was raised to
# stay in place . This behavior is a subject to change .
for rev in revs :
self . tree = scm . get_tree ( rev )
yield rev
self . tree = saved_tree
|
def get_vnetwork_portgroups_output_vnetwork_pgs_datacenter ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_vnetwork_portgroups = ET . Element ( "get_vnetwork_portgroups" )
config = get_vnetwork_portgroups
output = ET . SubElement ( get_vnetwork_portgroups , "output" )
vnetwork_pgs = ET . SubElement ( output , "vnetwork-pgs" )
datacenter = ET . SubElement ( vnetwork_pgs , "datacenter" )
datacenter . text = kwargs . pop ( 'datacenter' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def subtract_ranges ( r1s , r2s , already_sorted = False ) :
"""Subtract multiple ranges from a list of ranges
: param r1s : range list 1
: param r2s : range list 2
: param already _ sorted : default ( False )
: type r1s : GenomicRange [ ]
: type r2s : GenomicRange [ ]
: return : new range r1s minus r2s
: rtype : GenomicRange [ ]"""
|
from seqtools . stream import MultiLocusStream
if not already_sorted :
r1s = merge_ranges ( r1s )
r2s = merge_ranges ( r2s )
outputs = [ ]
mls = MultiLocusStream ( [ BedArrayStream ( r1s ) , BedArrayStream ( r2s ) ] )
tot1 = 0
tot2 = 0
for loc in mls : # [ beds1 , beds2 ] = loc . get _ payload ( )
v = loc . payload
# print v
[ beds1 , beds2 ] = v
beds1 = beds1 [ : ]
beds2 = beds2 [ : ]
if len ( beds1 ) == 0 :
continue
if len ( beds2 ) == 0 :
outputs += beds1
continue
# this loop could be made much more efficient
mapping = { }
# keyed by beds1 index stores list of overlaping beds2 indecies
for i in range ( 0 , len ( beds1 ) ) :
mapping [ i ] = [ ]
beds2min = 0
beds2max = len ( beds2 )
for i in range ( 0 , len ( beds1 ) ) :
for j in range ( beds2min , beds2max ) :
cmpval = beds1 [ i ] . cmp ( beds2 [ j ] )
if cmpval == - 1 :
beds2min = j + 1
elif cmpval == 0 :
mapping [ i ] . append ( j )
else :
break
for i in range ( 0 , len ( beds1 ) ) :
if len ( mapping [ i ] ) == 0 :
outputs += beds1
else :
outputs += subtract_range_array ( beds1 [ i ] , [ beds2 [ j ] for j in mapping [ i ] ] , is_sorted = True )
# while len ( beds2 ) > 0:
# b2 = beds2 . pop ( 0)
# vs = [ x . subtract ( b2 ) for x in beds1]
# tot = [ ]
# for res in vs :
# tot = tot + res
# beds1 = tot
# print " subtract " + str ( len ( beds1 ) ) + " \ t " + str ( len ( beds2 ) )
# print beds1[0 ] . get _ range _ string ( )
# outputs = merge _ ranges ( outputs )
# print [ x . get _ range _ string ( ) for x in outputs ]
return merge_ranges ( outputs )
|
def findnode ( obj , path = '' ) :
"""Returns a Node pointing to obj .
If obj is a ctypes - derived class , an UnboundNode is returned . If obj is
an instance of such a class , then a BoundNode will be returned .
If the optional path is provided , it is a string to look up searching
down the original source node , such as ' . overhead . window [ 2 ] . page '"""
|
if isclass ( obj ) :
node = _createunbound ( obj )
else :
node = _createbound ( obj )
# And walk it down .
pathparts = re . split ( r'\]?(?:[[.]|$)' , path )
for part in pathparts :
if not part :
continue
try :
idx = int ( part )
node = node [ idx ]
except ValueError :
node = node [ part ]
return node
|
def produce_pca_explorer ( corpus , category , word2vec_model = None , projection_model = None , embeddings = None , projection = None , term_acceptance_re = re . compile ( '[a-z]{3,}' ) , x_dim = 0 , y_dim = 1 , scaler = scale , show_axes = False , show_dimensions_on_tooltip = True , ** kwargs ) :
"""Parameters
corpus : ParsedCorpus
It is highly recommended to use a stoplisted , unigram corpus - - ` corpus . get _ stoplisted _ unigram _ corpus ( ) `
category : str
word2vec _ model : Word2Vec
A gensim word2vec model . A default model will be used instead . See Word2VecFromParsedCorpus for the default
model .
projection _ model : sklearn - style dimensionality reduction model . Ignored if ' projection ' is presents
By default : umap . UMAP ( min _ dist = 0.5 , metric = ' cosine ' ) unless projection is present . If so ,
You could also use , e . g . , sklearn . manifold . TSNE ( perplexity = 10 , n _ components = 2 , init = ' pca ' , n _ iter = 2500 , random _ state = 23)
embeddings : array [ len ( corpus . get _ terms ( ) ) , X ]
Word embeddings . If None ( default ) , and no value is passed into projection , use word2vec _ model
projection : DataFrame ( ' x ' : array [ len ( corpus . get _ terms ( ) ) ] , ' y ' : array [ len ( corpus . get _ terms ( ) ) ] )
If None ( default ) , produced using projection _ model
term _ acceptance _ re : SRE _ Pattern ,
Regular expression to identify valid terms
x _ dim : int , default 0
Dimension of transformation matrix for x - axis
y _ dim : int , default 1
Dimension of transformation matrix for y - axis
scalers : function , default scattertext . Scalers . scale
Function used to scale projection
show _ axes : bool , default False
Show the ticked axes on the plot . If false , show inner axes as a crosshair .
show _ dimensions _ on _ tooltip : bool , False by default
If true , shows dimension positions on tooltip , along with term name . Otherwise , default to the
get _ tooltip _ content parameter .
kwargs : dict
Remaining produce _ scattertext _ explorer keywords get _ tooltip _ content
Returns
str
HTML of visualization"""
|
if projection is None :
embeddings_resolover = EmbeddingsResolver ( corpus )
if embeddings is not None :
embeddings_resolover . set_embeddings ( embeddings )
else :
embeddings_resolover . set_embeddings_model ( word2vec_model , term_acceptance_re )
corpus , projection = embeddings_resolover . project_embeddings ( projection_model , x_dim = x_dim , y_dim = y_dim )
else :
assert type ( projection ) == pd . DataFrame
assert 'x' in projection and 'y' in projection
if kwargs . get ( 'use_non_text_features' , False ) :
assert set ( projection . index ) == set ( corpus . get_metadata ( ) )
else :
assert set ( projection . index ) == set ( corpus . get_terms ( ) )
if show_dimensions_on_tooltip :
kwargs [ 'get_tooltip_content' ] = '''(function(d) {
return d.term + "<br/>Dim %s: " + Math.round(d.ox*1000)/1000 + "<br/>Dim %s: " + Math.round(d.oy*1000)/1000
})''' % ( x_dim , y_dim )
html = produce_scattertext_explorer ( corpus = corpus , category = category , minimum_term_frequency = 0 , sort_by_dist = False , original_x = projection [ 'x' ] , original_y = projection [ 'y' ] , x_coords = scaler ( projection [ 'x' ] ) , y_coords = scaler ( projection [ 'y' ] ) , y_label = '' , x_label = '' , show_axes = show_axes , horizontal_line_y_position = 0 , vertical_line_x_position = 0 , ** kwargs )
return html
|
def upix_to_pix ( upix ) :
"""Get the nside from a unique pixel number ."""
|
nside = np . power ( 2 , np . floor ( np . log2 ( upix / 4 ) ) / 2 ) . astype ( int )
pix = upix - 4 * np . power ( nside , 2 )
return pix , nside
|
def restore ( self , res_id , backup_snap = None ) :
"""Restores a snapshot .
: param res _ id : the LUN number of primary LUN or snapshot mount point to
be restored .
: param backup _ snap : the name of a backup snapshot to be created before
restoring ."""
|
name = self . _get_name ( )
out = self . _cli . restore_snap ( name , res_id , backup_snap )
ex . raise_if_err ( out , 'failed to restore snap {}.' . format ( name ) , default = ex . VNXSnapError )
|
def delete_many ( cls , documents ) :
"""Delete multiple documents"""
|
# Ensure all documents have been converted to frames
frames = cls . _ensure_frames ( documents )
all_count = len ( documents )
assert len ( [ f for f in frames if '_id' in f . _document ] ) == all_count , "Can't delete documents without `_id`s"
# Send delete signal
signal ( 'delete' ) . send ( cls , frames = frames )
# Prepare the documents to be deleted
ids = [ f . _id for f in frames ]
# Delete the documents
cls . get_collection ( ) . delete_many ( { '_id' : { '$in' : ids } } )
# Send deleted signal
signal ( 'deleted' ) . send ( cls , frames = frames )
|
def settings_view_for_block ( block_wrapper , settings_view_factory ) :
"""Returns the settings view for an arbitrary block .
Args :
block _ wrapper ( BlockWrapper ) : The block for which a settings
view is to be returned
settings _ view _ factory ( SettingsViewFactory ) : The settings
view factory used to create the SettingsView object
Returns :
SettingsView object associated with the block"""
|
state_root_hash = block_wrapper . state_root_hash if block_wrapper is not None else None
return settings_view_factory . create_settings_view ( state_root_hash )
|
def create_v4_signature ( self , request_params ) :
'''Create URI and signature headers based on AWS V4 signing process .
Refer to https : / / docs . aws . amazon . com / AlexaWebInfoService / latest / ApiReferenceArticle . html for request params .
: param request _ params : dictionary of request parameters
: return : URL and header to be passed to requests . get'''
|
method = 'GET'
service = 'awis'
host = 'awis.us-west-1.amazonaws.com'
region = 'us-west-1'
endpoint = 'https://awis.amazonaws.com/api'
request_parameters = urlencode ( [ ( key , request_params [ key ] ) for key in sorted ( request_params . keys ( ) ) ] )
# Key derivation functions . See :
# http : / / docs . aws . amazon . com / general / latest / gr / signature - v4 - examples . html # signature - v4 - examples - python
def sign ( key , msg ) :
return hmac . new ( key , msg . encode ( 'utf-8' ) , hashlib . sha256 ) . digest ( )
def getSignatureKey ( key , dateStamp , regionName , serviceName ) :
kDate = sign ( ( 'AWS4' + key ) . encode ( 'utf-8' ) , dateStamp )
kRegion = sign ( kDate , regionName )
kService = sign ( kRegion , serviceName )
kSigning = sign ( kService , 'aws4_request' )
return kSigning
# Create a date for headers and the credential string
t = datetime . datetime . utcnow ( )
amzdate = t . strftime ( '%Y%m%dT%H%M%SZ' )
datestamp = t . strftime ( '%Y%m%d' )
# Date w / o time , used in credential scope
# Create canonical request
canonical_uri = '/api'
canonical_querystring = request_parameters
canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n'
signed_headers = 'host;x-amz-date'
payload_hash = hashlib . sha256 ( '' . encode ( 'utf8' ) ) . hexdigest ( )
canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
# Create string to sign
algorithm = 'AWS4-HMAC-SHA256'
credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request'
string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib . sha256 ( canonical_request . encode ( 'utf8' ) ) . hexdigest ( )
# Calculate signature
signing_key = getSignatureKey ( self . secret_access_key , datestamp , region , service )
# Sign the string _ to _ sign using the signing _ key
signature = hmac . new ( signing_key , ( string_to_sign ) . encode ( 'utf-8' ) , hashlib . sha256 ) . hexdigest ( )
# Add signing information to the request
authorization_header = algorithm + ' ' + 'Credential=' + self . access_id + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature
headers = { 'X-Amz-Date' : amzdate , 'Authorization' : authorization_header , 'Content-Type' : 'application/xml' , 'Accept' : 'application/xml' }
# Create request url
request_url = endpoint + '?' + canonical_querystring
return request_url , headers
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.