signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_url_authcode_flow_user ( client_id , redirect_uri , display = "page" , scope = None , state = None ) :
"""Authorization Code Flow for User Access Token
Use Authorization Code Flow to run VK API methods from the server side of an application .
Access token received this way is not bound to an ip address but set of permissions that can be granted is limited for security reasons .
Args :
client _ id ( int ) : Application id .
redirect _ uri ( str ) : Address to redirect user after authorization .
display ( str ) : Sets authorization page appearance .
Sets : { ` page ` , ` popup ` , ` mobile ` }
Defaults to ` page `
scope ( : obj : ` str ` , optional ) : Permissions bit mask , to check on authorization and request if necessary .
More scope : https : / / vk . com / dev / permissions
state ( : obj : ` str ` , optional ) : An arbitrary string that will be returned together with authorization result .
Returns :
str : Url
Examples :
> > > vk . get _ url _ authcode _ flow _ user ( 1 , ' http : / / example . com / ' , scope = " wall , email " )
' https : / / oauth . vk . com / authorize ? client _ id = 1 & display = page & redirect _ uri = http : / / example . com / & scope = wall , email & response _ type = code
. . _ Docs :
https : / / vk . com / dev / authcode _ flow _ user"""
|
url = "https://oauth.vk.com/authorize"
params = { "client_id" : client_id , "redirect_uri" : redirect_uri , "display" : display , "response_type" : "code" }
if scope :
params [ 'scope' ] = scope
if state :
params [ 'state' ] = state
return u"{url}?{params}" . format ( url = url , params = urlencode ( params ) )
|
def convert_list_to_tuple ( lst ) :
"""A function that transforms a list into a tuple .
Example :
convert _ list _ to _ tuple ( [ 5 , 10 , 7 , 4 , 15 , 3 ] ) - > ( 5 , 10 , 7 , 4 , 15 , 3)
convert _ list _ to _ tuple ( [ 2 , 4 , 5 , 6 , 2 , 3 , 4 , 4 , 7 ] ) - > ( 2 , 4 , 5 , 6 , 2 , 3 , 4 , 4 , 7)
convert _ list _ to _ tuple ( [ 58 , 44 , 56 ] ) - > ( 58 , 44 , 56)
Args :
lst : The list to be converted
Returns :
A tuple with the same elements as the input list"""
|
return tuple ( lst )
|
def isLocked ( self ) :
'''Checks if the device screen is locked .
@ return True if the device screen is locked'''
|
self . __checkTransport ( )
lockScreenRE = re . compile ( 'mShowingLockscreen=(true|false)' )
dwp = self . shell ( 'dumpsys window policy' )
m = lockScreenRE . search ( dwp )
if m :
return m . group ( 1 ) == 'true'
dreamingLockscreenRE = re . compile ( 'mDreamingLockscreen=(true|false)' )
m = dreamingLockscreenRE . search ( dwp )
if m :
return m . group ( 1 ) == 'true'
raise RuntimeError ( "Couldn't determine screen lock state" )
|
def forward_request ( self , method , path = None , json = None , params = None , headers = None ) :
"""Makes HTTP requests to the configured nodes .
Retries connection errors
( e . g . DNS failures , refused connection , etc ) .
A user may choose to retry other errors
by catching the corresponding
exceptions and retrying ` forward _ request ` .
Exponential backoff is implemented individually for each node .
Backoff delays are expressed as timestamps stored on the object and
they are not reset in between multiple function calls .
Times out when ` self . timeout ` is expired , if not ` None ` .
Args :
method ( str ) : HTTP method name ( e . g . : ` ` ' GET ' ` ` ) .
path ( str ) : Path to be appended to the base url of a node . E . g . :
` ` ' / transactions ' ` ` ) .
json ( dict ) : Payload to be sent with the HTTP request .
params ( dict ) ) : Dictionary of URL ( query ) parameters .
headers ( dict ) : Optional headers to pass to the request .
Returns :
dict : Result of : meth : ` requests . models . Response . json `"""
|
error_trace = [ ]
timeout = self . timeout
backoff_cap = NO_TIMEOUT_BACKOFF_CAP if timeout is None else timeout / 2
while timeout is None or timeout > 0 :
connection = self . connection_pool . get_connection ( )
start = time ( )
try :
response = connection . request ( method = method , path = path , params = params , json = json , headers = headers , timeout = timeout , backoff_cap = backoff_cap , )
except ConnectionError as err :
error_trace . append ( err )
continue
else :
return response . data
finally :
elapsed = time ( ) - start
if timeout is not None :
timeout -= elapsed
raise TimeoutError ( error_trace )
|
def from_command_line ( ) :
"""Run CGI var to gVCF conversion from the command line ."""
|
# Parse options
parser = argparse . ArgumentParser ( description = 'Convert Complete Genomics var files to gVCF format.' )
parser . add_argument ( '-d' , '--refseqdir' , metavar = 'REFSEQDIR' , required = True , dest = 'refseqdir' , help = 'Directory twobit reference genomes files are stored.' )
parser . add_argument ( '-i' , '--input' , metavar = 'INPUTVARFILE' , dest = 'cgivarfile' , help = 'Path to Complete Genomics var file to convert. If omitted, data ' ' also be piped in as standard input.' )
parser . add_argument ( '-o' , '--output' , metavar = 'OUTPUTVCFFILE' , dest = 'vcfoutfile' , help = 'Path to where to save output VCF file.' )
parser . add_argument ( '-D' , '--download' , action = 'store_true' , dest = 'downloadrefseq' , help = 'Download the 2bit file from UCSC to REFSEQDIR, if needed.' )
parser . add_argument ( '-v' , '--var-only' , action = 'store_true' , dest = 'varonly' , help = 'Only report variant lines (i.e. VCF, but not gVCF)' )
args = parser . parse_args ( )
# Get local twobit file from its directory . Download and store if needed .
twobit_path , twobit_name = get_reference_genome_file ( args . refseqdir , build = 'b37' )
# Handle input
if sys . stdin . isatty ( ) : # false if data is piped in
var_input = args . cgivarfile
else :
var_input = sys . stdin
# Handle output
if args . vcfoutfile :
convert_to_file ( var_input , args . vcfoutfile , twobit_path , twobit_name , args . varonly )
else :
for line in convert ( cgi_input = var_input , twobit_ref = twobit_path , twobit_name = twobit_name , var_only = args . varonly ) :
print ( line )
|
def database_caller_creator ( self , number_of_rows , username , password , host , port , name = None , custom = None ) :
'''creates a postgresql db
returns the related connection object
which will be later used to spawn the cursor'''
|
cursor = None
conn = None
if name :
dbname = name
else :
dbname = 'postgresql_' + str_generator ( self ) . lower ( )
try : # createdb
conn = psycopg2 . connect ( user = username , password = password , host = host , port = port )
conn . set_isolation_level ( ISOLATION_LEVEL_AUTOCOMMIT )
cur = conn . cursor ( )
cur . execute ( 'CREATE DATABASE %s;' % dbname )
cur . close ( )
conn . close ( )
# reconnect to the new database
conn = psycopg2 . connect ( user = username , password = password , host = host , port = port , database = dbname )
cursor = conn . cursor ( )
logger . warning ( 'Database created and opened succesfully: %s' % dbname , extra = d )
except Exception as err :
logger . error ( err , extra = d )
raise
if custom :
self . custom_db_creator ( number_of_rows , cursor , conn , custom )
cursor . close ( )
conn . close ( )
sys . exit ( 0 )
return cursor , conn
|
def request_absolute_cursor_position ( self ) :
"""Get current cursor position .
For vt100 : Do CPR request . ( answer will arrive later . )
For win32 : Do API call . ( Answer comes immediately . )"""
|
# Only do this request when the cursor is at the top row . ( after a
# clear or reset ) . We will rely on that in ` report _ absolute _ cursor _ row ` .
assert self . _cursor_pos . y == 0
# For Win32 , we have an API call to get the number of rows below the
# cursor .
if is_windows ( ) :
self . _min_available_height = self . output . get_rows_below_cursor_position ( )
else :
if self . use_alternate_screen :
self . _min_available_height = self . output . get_size ( ) . rows
else : # Asks for a cursor position report ( CPR ) .
self . waiting_for_cpr = True
self . output . ask_for_cpr ( )
|
def get_header ( changelog ) :
"""Return line number of the first version - like header . We check for
patterns like ' 2.10 ( unreleased ) ' , so with either ' unreleased ' or a date
between parenthesis as that ' s the format we ' re using . As an alternative ,
we support an alternative format used by some zope / plone paster templates :
'2.10 - unreleased ' or ' 2.10 ~ unreleased ' Note that new headers are in our
preferred form ( so ' version ( date ) ' ) ."""
|
pattern = re . compile ( r"""
(?P<version>.+) # Version string
\( # Opening (
(?P<date>.+) # Date
\) # Closing )
\W*$ # Possible whitespace at end of line.
""" , re . VERBOSE )
alt_pattern = re . compile ( r"""
^ # Start of line
(?P<version>.+) # Version string
\ [-~]\ # space dash/twiggle space
(?P<date>.+) # Date
\W*$ # Possible whitespace at end of line.
""" , re . VERBOSE )
with changelog . open ( 'rt' ) as f :
for line_number , line in enumerate ( f ) :
match = pattern . search ( line )
alt_match = alt_pattern . search ( line )
if match or alt_match :
return line_number
|
def _credit_card_type ( self , card_type = None ) :
"""Returns a random credit card type instance ."""
|
if card_type is None :
card_type = self . random_element ( self . credit_card_types . keys ( ) )
elif isinstance ( card_type , CreditCard ) :
return card_type
return self . credit_card_types [ card_type ]
|
def _learn_init_params ( self , n_calib_beats = 8 ) :
"""Find a number of consecutive beats and use them to initialize :
- recent qrs amplitude
- recent noise amplitude
- recent rr interval
- qrs detection threshold
The learning works as follows :
- Find all local maxima ( largest sample within ` qrs _ radius `
samples ) of the filtered signal .
- Inspect the local maxima until ` n _ calib _ beats ` beats are
found :
- Calculate the cross - correlation between a ricker wavelet of
length ` qrs _ width ` , and the filtered signal segment centered
around the local maximum .
- If the cross - correlation exceeds 0.6 , classify it as a beat .
- Use the beats to initialize the previously described
parameters .
- If the system fails to find enough beats , the default
parameters will be used instead . See the docstring of
` XQRS . _ set _ default _ init _ params ` for detauls .
Parameters
n _ calib _ beats : int , optional
Number of calibration beats to detect for learning"""
|
if self . verbose :
print ( 'Learning initial signal parameters...' )
last_qrs_ind = - self . rr_max
qrs_inds = [ ]
qrs_amps = [ ]
noise_amps = [ ]
ricker_wavelet = signal . ricker ( self . qrs_radius * 2 , 4 ) . reshape ( - 1 , 1 )
# Find the local peaks of the signal .
peak_inds_f = find_local_peaks ( self . sig_f , self . qrs_radius )
# Peak numbers at least qrs _ width away from signal boundaries
peak_nums_r = np . where ( peak_inds_f > self . qrs_width ) [ 0 ]
peak_nums_l = np . where ( peak_inds_f <= self . sig_len - self . qrs_width ) [ 0 ]
# Skip if no peaks in range
if ( not peak_inds_f . size or not peak_nums_r . size or not peak_nums_l . size ) :
if self . verbose :
print ( 'Failed to find %d beats during learning.' % n_calib_beats )
self . _set_default_init_params ( )
return
# Go through the peaks and find qrs peaks and noise peaks .
# only inspect peaks with at least qrs _ radius around either side
for peak_num in range ( peak_nums_r [ 0 ] , peak_nums_l [ - 1 ] ) :
i = peak_inds_f [ peak_num ]
# Calculate cross - correlation between the filtered signal
# segment and a ricker wavelet
# Question : should the signal be squared ? Case for inverse qrs
# complexes
sig_segment = normalize ( ( self . sig_f [ i - self . qrs_radius : i + self . qrs_radius ] ) . reshape ( - 1 , 1 ) , axis = 0 )
xcorr = np . correlate ( sig_segment [ : , 0 ] , ricker_wavelet [ : , 0 ] )
# Classify as qrs if xcorr is large enough
if xcorr > 0.6 and i - last_qrs_ind > self . rr_min :
last_qrs_ind = i
qrs_inds . append ( i )
qrs_amps . append ( self . sig_i [ i ] )
else :
noise_amps . append ( self . sig_i [ i ] )
if len ( qrs_inds ) == n_calib_beats :
break
# Found enough calibration beats to initialize parameters
if len ( qrs_inds ) == n_calib_beats :
if self . verbose :
print ( 'Found %d beats during learning.' % n_calib_beats + ' Initializing using learned parameters' )
# QRS amplitude is most important .
qrs_amp = np . mean ( qrs_amps )
# Set noise amplitude if found
if noise_amps :
noise_amp = np . mean ( noise_amps )
else : # Set default of 1/10 of qrs amplitude
noise_amp = qrs_amp / 10
# Get rr intervals of consecutive beats , if any .
rr_intervals = np . diff ( qrs_inds )
rr_intervals = rr_intervals [ rr_intervals < self . rr_max ]
if rr_intervals . any ( ) :
rr_recent = np . mean ( rr_intervals )
else :
rr_recent = self . rr_init
# If an early qrs was detected , set last _ qrs _ ind so that it can be
# picked up .
last_qrs_ind = min ( 0 , qrs_inds [ 0 ] - self . rr_min - 1 )
self . _set_init_params ( qrs_amp_recent = qrs_amp , noise_amp_recent = noise_amp , rr_recent = rr_recent , last_qrs_ind = last_qrs_ind )
self . learned_init_params = True
# Failed to find enough calibration beats . Use default values .
else :
if self . verbose :
print ( 'Failed to find %d beats during learning.' % n_calib_beats )
self . _set_default_init_params ( )
|
def make_label ( loss , key ) :
'''Create a legend label for an optimization run .'''
|
algo , rate , mu , half , reg = key
slots , args = [ '{:.3f}' , '{}' , 'm={:.3f}' ] , [ loss , algo , mu ]
if algo in 'SGD NAG RMSProp Adam ESGD' . split ( ) :
slots . append ( 'lr={:.2e}' )
args . append ( rate )
if algo in 'RMSProp ADADELTA ESGD' . split ( ) :
slots . append ( 'rmsh={}' )
args . append ( half )
slots . append ( 'rmsr={:.2e}' )
args . append ( reg )
return ' ' . join ( slots ) . format ( * args )
|
def KL ( self , other ) :
"""Compute the KL divergence to another NormalPosterior Object . This only holds , if the two NormalPosterior objects have the same shape , as we do computational tricks for the multivariate normal KL divergence ."""
|
return .5 * ( np . sum ( self . variance / other . variance ) + ( ( other . mean - self . mean ) ** 2 / other . variance ) . sum ( ) - self . num_data * self . input_dim + np . sum ( np . log ( other . variance ) ) - np . sum ( np . log ( self . variance ) ) )
|
def load ( self , filething , ** kwargs ) :
"""Load stream and tag information from a file ."""
|
fileobj = filething . fileobj
try :
self . tags = _IFFID3 ( fileobj , ** kwargs )
except ID3NoHeaderError :
self . tags = None
except ID3Error as e :
raise error ( e )
else :
self . tags . filename = self . filename
fileobj . seek ( 0 , 0 )
self . info = AIFFInfo ( fileobj )
|
def get_img ( self , url , headers = None , cookies = None , timeout = 60 , verify = False , proxies = None , allow_redirects = True , params = None ) :
"""get方式获取 img 二进制信息
: param url : 访问Url
: param headers : 请求头
: param cookies : 请求cookies
: param timeout : 超时时间
: param verify : ssl验证
: param proxies : 代理
: param allow _ redirects : 是否允许重定向
: param encoding : 返回的html编码s
: param params : 查询请求参数
: return : 二进制图片数据"""
|
if self . session :
r = self . session . get ( url , headers = odict ( headers ) , cookies = cookies , timeout = timeout , verify = verify , proxies = proxies , allow_redirects = allow_redirects , params = params )
else :
r = requests . get ( url , headers = odict ( headers ) , cookies = cookies , timeout = timeout , verify = verify , proxies = proxies , allow_redirects = allow_redirects , params = params )
r . raise_for_status ( )
return r . content
|
def get_throttled_by_consumed_read_percent ( table_name , lookback_window_start = 15 , lookback_period = 5 ) :
"""Returns the number of throttled read events in percent of consumption
: type table _ name : str
: param table _ name : Name of the DynamoDB table
: type lookback _ window _ start : int
: param lookback _ window _ start : Relative start time for the CloudWatch metric
: type lookback _ period : int
: param lookback _ period : Number of minutes to look at
: returns : float - - Percent of throttled read events by consumption"""
|
try :
metrics1 = __get_aws_metric ( table_name , lookback_window_start , lookback_period , 'ConsumedReadCapacityUnits' )
metrics2 = __get_aws_metric ( table_name , lookback_window_start , lookback_period , 'ReadThrottleEvents' )
except BotoServerError :
raise
if metrics1 and metrics2 :
lookback_seconds = lookback_period * 60
throttled_by_consumed_read_percent = ( ( ( float ( metrics2 [ 0 ] [ 'Sum' ] ) / float ( lookback_seconds ) ) / ( float ( metrics1 [ 0 ] [ 'Sum' ] ) / float ( lookback_seconds ) ) ) * 100 )
else :
throttled_by_consumed_read_percent = 0
logger . info ( '{0} - Throttled read percent by consumption: {1:.2f}%' . format ( table_name , throttled_by_consumed_read_percent ) )
return throttled_by_consumed_read_percent
|
def type_last ( self , obj : JsonObj ) -> JsonObj :
"""Move the type identifiers to the end of the object for print purposes"""
|
def _tl_list ( v : List ) -> List :
return [ self . type_last ( e ) if isinstance ( e , JsonObj ) else _tl_list ( e ) if isinstance ( e , list ) else e for e in v if e is not None ]
rval = JsonObj ( )
for k in as_dict ( obj ) . keys ( ) :
v = obj [ k ]
if v is not None and k not in ( 'type' , '_context' ) :
rval [ k ] = _tl_list ( v ) if isinstance ( v , list ) else self . type_last ( v ) if isinstance ( v , JsonObj ) else v
if 'type' in obj and obj . type :
rval . type = obj . type
return rval
|
def make_middleware ( cls , app , ** options ) :
"""Creates the application WSGI middleware in charge of serving local files .
A Depot middleware is required if your application wants to serve files from
storages that don ' t directly provide and HTTP interface like
: class : ` depot . io . local . LocalFileStorage ` and : class : ` depot . io . gridfs . GridFSStorage `"""
|
from depot . middleware import DepotMiddleware
mw = DepotMiddleware ( app , ** options )
cls . set_middleware ( mw )
return mw
|
def _deref ( self ) -> List [ "InstanceNode" ] :
"""XPath : return the list of nodes that the receiver refers to ."""
|
return ( [ ] if self . is_internal ( ) else self . schema_node . type . _deref ( self ) )
|
def approximating_model_reg ( self , beta , T , Z , R , Q , h_approx , data , X , state_no ) :
"""Creates approximating Gaussian model for Poisson measurement density
- dynamic regression model
Parameters
beta : np . array
Contains untransformed starting values for latent variables
T , Z , R , Q : np . array
State space matrices used in KFS algorithm
data : np . array
The univariate time series data
X : np . array
The regressors
state _ no : int
Number of states
Returns
H : np . array
Approximating measurement variance matrix
mu : np . array
Approximating measurement constants"""
|
H = np . ones ( data . shape [ 0 ] )
mu = np . zeros ( data . shape [ 0 ] )
alpha = np . zeros ( [ state_no , data . shape [ 0 ] ] )
tol = 100.0
it = 0
while tol > 10 ** - 7 and it < 5 :
old_alpha = np . sum ( X * alpha . T , axis = 1 )
alpha , V = nld_univariate_KFS ( data , Z , H , T , Q , R , mu )
H = np . exp ( - np . sum ( X * alpha . T , axis = 1 ) )
mu = data - np . sum ( X * alpha . T , axis = 1 ) - np . exp ( - np . sum ( X * alpha . T , axis = 1 ) ) * ( data - np . exp ( np . sum ( X * alpha . T , axis = 1 ) ) )
tol = np . mean ( np . abs ( np . sum ( X * alpha . T , axis = 1 ) - old_alpha ) )
it += 1
return H , mu
|
def _prepare_method ( self , pandas_func , ** kwargs ) :
"""Prepares methods given various metadata .
Args :
pandas _ func : The function to prepare .
Returns
Helper function which handles potential transpose ."""
|
if self . _is_transposed :
def helper ( df , internal_indices = [ ] ) :
if len ( internal_indices ) > 0 :
return pandas_func ( df . T , internal_indices = internal_indices , ** kwargs )
return pandas_func ( df . T , ** kwargs )
else :
def helper ( df , internal_indices = [ ] ) :
if len ( internal_indices ) > 0 :
return pandas_func ( df , internal_indices = internal_indices , ** kwargs )
return pandas_func ( df , ** kwargs )
return helper
|
def _play ( self ) :
"""Send play command to receiver command via HTTP post ."""
|
# Use pause command only for sources which support NETAUDIO
if self . _input_func in self . _netaudio_func_list :
body = { "cmd0" : "PutNetAudioCommand/CurEnter" , "cmd1" : "aspMainZone_WebUpdateStatus/" , "ZoneName" : "MAIN ZONE" }
try :
if self . send_post_command ( self . _urls . command_netaudio_post , body ) :
self . _state = STATE_PLAYING
return True
else :
return False
except requests . exceptions . RequestException :
_LOGGER . error ( "Connection error: play command not sent." )
return False
|
def to_ipv6 ( self , ip_type = '6-to-4' ) :
"""Convert ( an IPv4 ) IP address to an IPv6 address .
> > > ip = IP ( ' 192.0.2.42 ' )
> > > print ( ip . to _ ipv6 ( ) )
2002 : c000:022a : 0000:0000:0000:0000:0000
> > > print ( ip . to _ ipv6 ( ' compat ' ) )
0000:0000:0000:0000:0000:0000 : c000:022a
> > > print ( ip . to _ ipv6 ( ' mapped ' ) )
0000:0000:0000:0000:0000 : ffff : c000:022a"""
|
assert ip_type in [ '6-to-4' , 'compat' , 'mapped' ] , 'Conversion ip_type not supported'
if self . v == 4 :
if ip_type == '6-to-4' :
return IP ( BASE_6TO4 | int ( self ) << 80 , version = 6 )
elif ip_type == 'compat' :
return IP ( int ( self ) , version = 6 )
elif ip_type == 'mapped' :
return IP ( 0xffff << 32 | int ( self ) , version = 6 )
else :
return self
|
def get_file ( self , hash_list ) :
"""Returns the path of the file - but verifies that the hash is actually present ."""
|
assert len ( hash_list ) == 1
self . _check_hashes ( hash_list )
return self . object_path ( hash_list [ 0 ] )
|
def gen ( name , data ) :
"""Generate dataentry * name * from * data * ."""
|
return '---- dataentry %s ----\n%s\n----' % ( name , '\n' . join ( '%s:%s' % ( attr , value ) for attr , value in data . items ( ) ) )
|
def checked_emit ( self , value : Any ) -> asyncio . Future :
"""Casting and checking in one call"""
|
if not isinstance ( self . _subject , Subscriber ) :
raise TypeError ( 'Topic %r has to be a subscriber' % self . _path )
value = self . cast ( value )
self . check ( value )
return self . _subject . emit ( value , who = self )
|
def make_pymol ( pdb_file , cutoff = 7.0 , min_kihs = 2 , outfile = None ) :
"""Pymol script for viewing classic coiled - coil Socket output .
Notes
For examples of these views , browse the CC + database here : http : / / coiledcoils . chm . bris . ac . uk / ccplus / search / .
Parameters
pdb _ file : str
Path to a pdb _ file .
cutoff : float
Socket cutoff in Angstroms .
min _ kihs : int
Mininmum number of KnobIntoHole interactions between pairs of helices needed to define a coiled coil .
outfile : None or str
Path to a output file to save the pml script .
Returns
script _ string : str
Pymol commands for classic coiled - coil view ."""
|
a = convert_pdb_to_ampal ( pdb = pdb_file , path = True )
kg = KnobGroup . from_helices ( a , cutoff = cutoff )
g = kg . filter_graph ( kg . graph , cutoff = cutoff , min_kihs = min_kihs )
ccs = sorted_connected_components ( g )
# Opens pymol script , initial set up of screen
script_lines = [ 'load {0}' . format ( pdb_file ) ]
script_lines . append ( "hide all" )
script_lines . append ( "bg_color white" )
script_lines . append ( "set antialias, 1" )
script_lines . append ( "set cartoon_dumbbell_length, 0.35" )
script_lines . append ( "set_color lightgrey, [240,240,240]" )
script_lines . append ( "set depth_cue, 0" )
script_lines . append ( "color lightgrey, all" )
script_lines . append ( "cartoon dumbbell" )
script_lines . append ( "show cartoon" )
for cc_number , cc in enumerate ( ccs ) :
helices = [ x for x in g . nodes ( ) if x . number in cc . nodes ( ) ]
# helices = cc . nodes ( )
cc_region = kg . get_coiledcoil_region ( cc_number = cc_number , cutoff = cutoff , min_kihs = min_kihs )
tag_residues_with_heptad_register ( cc_region )
assigned_regions = kg . get_assigned_regions ( include_alt_states = False , complementary_only = False , helices = helices )
helix_starts = [ int ( h [ 0 ] . id ) for h in helices ]
helix_ends = [ int ( h [ - 1 ] . id ) for h in helices ]
chains = [ h . ampal_parent . id for h in helices ]
assigned_starts = [ assigned_regions [ h . number ] [ 0 ] for h in helices ]
assigned_ends = [ assigned_regions [ h . number ] [ 1 ] for h in helices ]
assigned_selections = [ '{0}/{1}-{2}/' . format ( chain , assigned_start , assigned_end ) for chain , assigned_start , assigned_end in zip ( chains , assigned_starts , assigned_ends ) ]
script_lines . append ( "select cc{0}, {1}" . format ( cc_number , ' ' . join ( assigned_selections ) ) )
script_lines . append ( "cartoon automatic, cc{0}" . format ( cc_number ) )
for h_number , h in enumerate ( helices ) :
chain = chains [ h_number ]
helix_start = helix_starts [ h_number ]
helix_end = helix_ends [ h_number ]
assigned_start = assigned_starts [ h_number ]
assigned_end = assigned_ends [ h_number ]
selection = '{0}/{1}-{2}/' . format ( chain , helix_start , helix_end )
script_lines . append ( "select cc{0}eh{1}, {2}" . format ( cc_number , h_number , selection ) )
selection = '{0}/{1}-{2}/' . format ( chain , assigned_start , assigned_end )
script_lines . append ( "select cc{0}ah{1}, {2}" . format ( cc_number , h_number , selection ) )
kihs = [ x for x in kg if x . knob_helix == h ]
for x in kihs :
knob_selection_name = 'cc{0}ah{1}k{2}' . format ( cc_number , h_number , x . knob_residue . id )
hole_selection_name = knob_selection_name + 'hole'
knob_selection = '{0}/{1}/' . format ( chain , x . knob_residue . id )
script_lines . append ( 'select {0}, {1}' . format ( knob_selection_name , knob_selection ) )
hole_selection = ' ' . join ( [ '{0}/{1}/' . format ( x . hole_chain , y . id ) for y in x . hole_residues ] )
script_lines . append ( 'select {0}, {1}' . format ( hole_selection_name , hole_selection ) )
script_lines . append ( 'show sticks, {0}' . format ( knob_selection_name ) )
script_lines . append ( 'show sticks, {0}' . format ( hole_selection_name ) )
for r in h . get_monomers ( ) :
if 'register' in r . tags :
color = _heptad_colours [ r . tags [ 'register' ] ]
script_lines . append ( 'color {0}, {1}/{2}/' . format ( color , chain , r . id ) )
script_lines . append ( 'deselect' )
script_lines . append ( 'orient' )
script_lines . append ( 'rotate z, 90' )
script_lines . append ( 'zoom complete=1' )
script_string = '\n' . join ( script_lines )
if outfile is not None :
if isinstance ( outfile , str ) and outfile [ - 3 : ] == 'pml' :
with open ( outfile , 'w' ) as foo :
foo . write ( script_string )
return script_string
|
def _filter_version_specific_options ( self , tmos_ver , ** kwargs ) :
'''Filter version - specific optional parameters
Some optional parameters only exist in v12.1.0 and greater ,
filter these out for earlier versions to allow backward comatibility .'''
|
if LooseVersion ( tmos_ver ) < LooseVersion ( '12.1.0' ) :
for k , parms in self . _meta_data [ 'optional_parameters' ] . items ( ) :
for r in kwargs . get ( k , [ ] ) :
for parm in parms :
value = r . pop ( parm , None )
if value is not None :
logger . info ( "Policy parameter %s:%s is invalid for v%s" , k , parm , tmos_ver )
|
def _lock_renewer ( lockref , interval , stop ) :
"""Renew the lock key in redis every ` interval ` seconds for as long
as ` self . _ lock _ renewal _ thread . should _ exit ` is False ."""
|
log = getLogger ( "%s.lock_refresher" % __name__ )
while not stop . wait ( timeout = interval ) :
log . debug ( "Refreshing lock" )
lock = lockref ( )
if lock is None :
log . debug ( "The lock no longer exists, " "stopping lock refreshing" )
break
lock . extend ( expire = lock . _expire )
del lock
log . debug ( "Exit requested, stopping lock refreshing" )
|
def all ( self , page = 1 , per_page = 10 , order_by = "latest" ) :
"""Get a single page from the list of all photos .
: param page [ integer ] : Page number to retrieve . ( Optional ; default : 1)
: param per _ page [ integer ] : Number of items per page . ( Optional ; default : 10)
: param order _ by [ string ] : How to sort the photos . Optional .
( Valid values : latest , oldest , popular ; default : latest )
: return : [ Array ] : A single page of the Photo list ."""
|
return self . _all ( "/photos" , page = page , per_page = per_page , order_by = order_by )
|
def post_mark_translated ( self , post_id , check_translation , partially_translated ) :
"""Mark post as translated ( Requires login ) ( UNTESTED ) .
If you set check _ translation and partially _ translated to 1 post will
be tagged as ' translated _ request '
Parameters :
post _ id ( int ) :
check _ translation ( int ) : Can be 0 , 1.
partially _ translated ( int ) : Can be 0 , 1"""
|
param = { 'post[check_translation]' : check_translation , 'post[partially_translated]' : partially_translated }
return self . _get ( 'posts/{0}/mark_as_translated.json' . format ( post_id ) , param , method = 'PUT' , auth = True )
|
def Profiler_setSamplingInterval ( self , interval ) :
"""Function path : Profiler . setSamplingInterval
Domain : Profiler
Method name : setSamplingInterval
Parameters :
Required arguments :
' interval ' ( type : integer ) - > New sampling interval in microseconds .
No return value .
Description : Changes CPU profiler sampling interval . Must be called before CPU profiles recording started ."""
|
assert isinstance ( interval , ( int , ) ) , "Argument 'interval' must be of type '['int']'. Received type: '%s'" % type ( interval )
subdom_funcs = self . synchronous_command ( 'Profiler.setSamplingInterval' , interval = interval )
return subdom_funcs
|
def _isClauseFinal ( wordID , clauseTokens ) :
'''Teeb kindlaks , kas etteantud ID - ga s6na on osalause l6pus :
- - s6nale ei j2rgne ykski teine s6na ;
- - s6nale j2rgnevad vaid punktuatsioonim2rgid ja / v6i sidendid JA / NING / EGA / VÕI ;
Tagastab True , kui eeltoodud tingimused on t2idetud , vastasel juhul False ;'''
|
jaNingEgaVoi = WordTemplate ( { ROOT : '^(ja|ning|ega|v[\u014D\u00F5]i)$' , POSTAG : '[DJ]' } )
punktuatsioon = WordTemplate ( { POSTAG : 'Z' } )
for i in range ( len ( clauseTokens ) ) :
token = clauseTokens [ i ]
if token [ WORD_ID ] == wordID :
if i + 1 == len ( clauseTokens ) :
return True
else :
for j in range ( i + 1 , len ( clauseTokens ) ) :
token2 = clauseTokens [ j ]
if not ( jaNingEgaVoi . matches ( token2 ) or punktuatsioon . matches ( token2 ) ) :
return False
return True
return False
|
def StopHuntIfCPUOrNetworkLimitsExceeded ( hunt_id ) :
"""Stops the hunt if average limites are exceeded ."""
|
hunt_obj = data_store . REL_DB . ReadHuntObject ( hunt_id )
# Do nothing if the hunt is already stopped .
if hunt_obj . hunt_state == rdf_hunt_objects . Hunt . HuntState . STOPPED :
return hunt_obj
hunt_counters = data_store . REL_DB . ReadHuntCounters ( hunt_id )
# Check global hunt network bytes limit first .
if ( hunt_obj . total_network_bytes_limit and hunt_counters . total_network_bytes_sent > hunt_obj . total_network_bytes_limit ) :
reason = ( "Hunt %s reached the total network bytes sent limit of %d and " "was stopped." ) % ( hunt_obj . hunt_id , hunt_obj . total_network_bytes_limit )
return StopHunt ( hunt_obj . hunt_id , reason = reason )
# Check that we have enough clients to apply average limits .
if hunt_counters . num_clients < MIN_CLIENTS_FOR_AVERAGE_THRESHOLDS :
return hunt_obj
# Check average per - client results count limit .
if hunt_obj . avg_results_per_client_limit :
avg_results_per_client = ( hunt_counters . num_results / hunt_counters . num_clients )
if avg_results_per_client > hunt_obj . avg_results_per_client_limit : # Stop the hunt since we get too many results per client .
reason = ( "Hunt %s reached the average results per client " "limit of %d and was stopped." ) % ( hunt_obj . hunt_id , hunt_obj . avg_results_per_client_limit )
return StopHunt ( hunt_obj . hunt_id , reason = reason )
# Check average per - client CPU seconds limit .
if hunt_obj . avg_cpu_seconds_per_client_limit :
avg_cpu_seconds_per_client = ( hunt_counters . total_cpu_seconds / hunt_counters . num_clients )
if avg_cpu_seconds_per_client > hunt_obj . avg_cpu_seconds_per_client_limit : # Stop the hunt since we use too many CPUs per client .
reason = ( "Hunt %s reached the average CPU seconds per client " "limit of %d and was stopped." ) % ( hunt_obj . hunt_id , hunt_obj . avg_cpu_seconds_per_client_limit )
return StopHunt ( hunt_obj . hunt_id , reason = reason )
# Check average per - client network bytes limit .
if hunt_obj . avg_network_bytes_per_client_limit :
avg_network_bytes_per_client = ( hunt_counters . total_network_bytes_sent / hunt_counters . num_clients )
if ( avg_network_bytes_per_client > hunt_obj . avg_network_bytes_per_client_limit ) : # Stop the hunt since we use too many network bytes sent
# per client .
reason = ( "Hunt %s reached the average network bytes per client " "limit of %d and was stopped." ) % ( hunt_obj . hunt_id , hunt_obj . avg_network_bytes_per_client_limit )
return StopHunt ( hunt_obj . hunt_id , reason = reason )
return hunt_obj
|
def largest_compartment_id_met ( model ) :
"""Return the ID of the compartment with the most metabolites .
Parameters
model : cobra . Model
The metabolic model under investigation .
Returns
string
Compartment ID of the compartment with the most metabolites ."""
|
# Sort compartments by decreasing size and extract the largest two .
candidate , second = sorted ( ( ( c , len ( metabolites_per_compartment ( model , c ) ) ) for c in model . compartments ) , reverse = True , key = itemgetter ( 1 ) ) [ : 2 ]
# Compare the size of the compartments .
if candidate [ 1 ] == second [ 1 ] :
raise RuntimeError ( "There is a tie for the largest compartment. " "Compartment {} and {} have equal amounts of " "metabolites." . format ( candidate [ 0 ] , second [ 0 ] ) )
else :
return candidate [ 0 ]
|
def get_queryset ( self ) :
"Reduce the number of queries and speed things up ."
|
qs = super ( ) . get_queryset ( )
qs = qs . select_related ( 'publication__series' ) . prefetch_related ( 'publication__roles__creator' )
return qs
|
def _preprocess_individuals ( self , individuals ) :
"""Preprocess DEAP individuals before pipeline evaluation .
Parameters
individuals : a list of DEAP individual
One individual is a list of pipeline operators and model parameters that can be
compiled by DEAP into a callable function
Returns
operator _ counts : dictionary
a dictionary of operator counts in individuals for evaluation
eval _ individuals _ str : list
a list of string of individuals for evaluation
sklearn _ pipeline _ list : list
a list of scikit - learn pipelines converted from DEAP individuals for evaluation
stats _ dicts : dictionary
A dict where ' key ' is the string representation of an individual and ' value ' is a dict containing statistics about the individual"""
|
# update self . _ pbar . total
if not ( self . max_time_mins is None ) and not self . _pbar . disable and self . _pbar . total <= self . _pbar . n :
self . _pbar . total += self . _lambda
# Check we do not evaluate twice the same individual in one pass .
_ , unique_individual_indices = np . unique ( [ str ( ind ) for ind in individuals ] , return_index = True )
unique_individuals = [ ind for i , ind in enumerate ( individuals ) if i in unique_individual_indices ]
# update number of duplicate pipelines
self . _update_pbar ( pbar_num = len ( individuals ) - len ( unique_individuals ) )
# a dictionary for storing operator counts
operator_counts = { }
stats_dicts = { }
# 2 lists of DEAP individuals ' string , their sklearn pipelines for parallel computing
eval_individuals_str = [ ]
sklearn_pipeline_list = [ ]
for individual in unique_individuals : # Disallow certain combinations of operators because they will take too long or take up too much RAM
# This is a fairly hacky way to prevent TPOT from getting stuck on bad pipelines and should be improved in a future release
individual_str = str ( individual )
if not len ( individual ) : # a pipeline cannot be randomly generated
self . evaluated_individuals_ [ individual_str ] = self . _combine_individual_stats ( 5000. , - float ( 'inf' ) , individual . statistics )
self . _update_pbar ( pbar_msg = 'Invalid pipeline encountered. Skipping its evaluation.' )
continue
sklearn_pipeline_str = generate_pipeline_code ( expr_to_tree ( individual , self . _pset ) , self . operators )
if sklearn_pipeline_str . count ( 'PolynomialFeatures' ) > 1 :
self . evaluated_individuals_ [ individual_str ] = self . _combine_individual_stats ( 5000. , - float ( 'inf' ) , individual . statistics )
self . _update_pbar ( pbar_msg = 'Invalid pipeline encountered. Skipping its evaluation.' )
# Check if the individual was evaluated before
elif individual_str in self . evaluated_individuals_ :
self . _update_pbar ( pbar_msg = ( 'Pipeline encountered that has previously been evaluated during the ' 'optimization process. Using the score from the previous evaluation.' ) )
else :
try : # Transform the tree expression into an sklearn pipeline
sklearn_pipeline = self . _toolbox . compile ( expr = individual )
# Fix random state when the operator allows
self . _set_param_recursive ( sklearn_pipeline . steps , 'random_state' , 42 )
# Setting the seed is needed for XGBoost support because XGBoost currently stores
# both a seed and random _ state , and they ' re not synced correctly .
# XGBoost will raise an exception if random _ state ! = seed .
if 'XGB' in sklearn_pipeline_str :
self . _set_param_recursive ( sklearn_pipeline . steps , 'seed' , 42 )
# Count the number of pipeline operators as a measure of pipeline complexity
operator_count = self . _operator_count ( individual )
operator_counts [ individual_str ] = max ( 1 , operator_count )
stats_dicts [ individual_str ] = individual . statistics
except Exception :
self . evaluated_individuals_ [ individual_str ] = self . _combine_individual_stats ( 5000. , - float ( 'inf' ) , individual . statistics )
self . _update_pbar ( )
continue
eval_individuals_str . append ( individual_str )
sklearn_pipeline_list . append ( sklearn_pipeline )
return operator_counts , eval_individuals_str , sklearn_pipeline_list , stats_dicts
|
def calculate_totals ( self , children , local_children = None ) :
"""Calculate our cumulative totals from children and / or local children"""
|
for field , local_field in ( ( 'recursive' , 'calls' ) , ( 'cumulative' , 'local' ) ) :
values = [ ]
for child in children :
if isinstance ( child , PStatGroup ) or not self . LOCAL_ONLY :
values . append ( getattr ( child , field , 0 ) )
elif isinstance ( child , PStatRow ) and self . LOCAL_ONLY :
values . append ( getattr ( child , local_field , 0 ) )
value = sum ( values )
setattr ( self , field , value )
if self . recursive :
self . cumulativePer = self . cumulative / float ( self . recursive )
else :
self . recursive = 0
if local_children :
for field in ( 'local' , 'calls' ) :
value = sum ( [ getattr ( child , field , 0 ) for child in children ] )
setattr ( self , field , value )
if self . calls :
self . localPer = self . local / self . calls
else :
self . local = 0
self . calls = 0
self . localPer = 0
|
def hide ( input_image : Union [ str , IO [ bytes ] ] , message : str , encoding : str = "UTF-8" , auto_convert_rgb : bool = False , ) :
"""Hide a message ( string ) in an image with the
LSB ( Least Significant Bit ) technique ."""
|
message_length = len ( message )
assert message_length != 0 , "message length is zero"
img = tools . open_image ( input_image )
if img . mode not in [ "RGB" , "RGBA" ] :
if not auto_convert_rgb :
print ( "The mode of the image is not RGB. Mode is {}" . format ( img . mode ) )
answer = input ( "Convert the image to RGB ? [Y / n]\n" ) or "Y"
if answer . lower ( ) == "n" :
raise Exception ( "Not a RGB image." )
img = img . convert ( "RGB" )
encoded = img . copy ( )
width , height = img . size
index = 0
message = str ( message_length ) + ":" + str ( message )
message_bits = "" . join ( tools . a2bits_list ( message , encoding ) )
message_bits += "0" * ( ( 3 - ( len ( message_bits ) % 3 ) ) % 3 )
npixels = width * height
len_message_bits = len ( message_bits )
if len_message_bits > npixels * 3 :
raise Exception ( "The message you want to hide is too long: {}" . format ( message_length ) )
for row in range ( height ) :
for col in range ( width ) :
if index + 3 <= len_message_bits : # Get the colour component .
pixel = img . getpixel ( ( col , row ) )
r = pixel [ 0 ]
g = pixel [ 1 ]
b = pixel [ 2 ]
# Change the Least Significant Bit of each colour component .
r = tools . setlsb ( r , message_bits [ index ] )
g = tools . setlsb ( g , message_bits [ index + 1 ] )
b = tools . setlsb ( b , message_bits [ index + 2 ] )
# Save the new pixel
if img . mode == "RGBA" :
encoded . putpixel ( ( col , row ) , ( r , g , b , pixel [ 3 ] ) )
else :
encoded . putpixel ( ( col , row ) , ( r , g , b ) )
index += 3
else :
img . close ( )
return encoded
|
def add_agent_pool ( self , pool ) :
"""AddAgentPool .
[ Preview API ] Create an agent pool .
: param : class : ` < TaskAgentPool > < azure . devops . v5_1 . task _ agent . models . TaskAgentPool > ` pool : Details about the new agent pool
: rtype : : class : ` < TaskAgentPool > < azure . devops . v5_1 . task - agent . models . TaskAgentPool > `"""
|
content = self . _serialize . body ( pool , 'TaskAgentPool' )
response = self . _send ( http_method = 'POST' , location_id = 'a8c47e17-4d56-4a56-92bb-de7ea7dc65be' , version = '5.1-preview.1' , content = content )
return self . _deserialize ( 'TaskAgentPool' , response )
|
def dispatch ( self , state_change : StateChange ) -> List [ Event ] :
"""Apply the ` state _ change ` in the current machine and return the
resulting events .
Args :
state _ change : An object representation of a state
change .
Return :
A list of events produced by the state transition .
It ' s the upper layer ' s responsibility to decided how to handle
these events ."""
|
assert isinstance ( state_change , StateChange )
# the state objects must be treated as immutable , so make a copy of the
# current state and pass the copy to the state machine to be modified .
next_state = deepcopy ( self . current_state )
# update the current state by applying the change
iteration = self . state_transition ( next_state , state_change , )
assert isinstance ( iteration , TransitionResult )
self . current_state = iteration . new_state
events = iteration . events
assert isinstance ( self . current_state , ( State , type ( None ) ) )
assert all ( isinstance ( e , Event ) for e in events )
return events
|
def _compute_dk_dtau_on_partition ( self , tau , p ) :
"""Evaluate the term inside the sum of Faa di Bruno ' s formula for the given partition .
Parameters
tau : : py : class : ` Matrix ` , ( ` M ` , ` D ` )
` M ` inputs with dimension ` D ` .
p : list of : py : class : ` Array `
Each element is a block of the partition representing the
derivative orders to use .
Returns
dk _ dtau : : py : class : ` Array ` , ( ` M ` , )
The specified derivatives over the given partition at the specified
locations ."""
|
y , r2l2 = self . _compute_y ( tau , return_r2l2 = True )
# Compute the d ^ ( | pi | ) f / dy term :
dk_dtau = self . _compute_dk_dy ( y , len ( p ) )
# Multiply in each of the block terms :
for b in p :
dk_dtau *= self . _compute_dy_dtau ( tau , b , r2l2 )
return dk_dtau
|
def pause_with_reason ( self , reason ) :
"""Internal method for triggering a VM pause with a specified reason code .
The reason code can be interpreted by device / drivers and thus it might
behave slightly differently than a normal VM pause .
: py : func : ` IConsole . pause `
in reason of type : class : ` Reason `
Specify the best matching reason code please .
raises : class : ` VBoxErrorInvalidVmState `
Virtual machine not in Running state .
raises : class : ` VBoxErrorVmError `
Virtual machine error in suspend operation ."""
|
if not isinstance ( reason , Reason ) :
raise TypeError ( "reason can only be an instance of type Reason" )
self . _call ( "pauseWithReason" , in_p = [ reason ] )
|
def patched ( attrs , updates ) :
"""A context in which some attributes temporarily have a modified value ."""
|
orig = patch ( attrs , updates . items ( ) )
try :
yield orig
finally :
patch ( attrs , orig . items ( ) )
|
def parse_instancepath ( self , tup_tree ) :
"""Parse an INSTANCEPATH element and return the instance path it
represents as a CIMInstanceName object .
< ! ELEMENT INSTANCEPATH ( NAMESPACEPATH , INSTANCENAME ) >"""
|
self . check_node ( tup_tree , 'INSTANCEPATH' )
k = kids ( tup_tree )
if len ( k ) != 2 :
raise CIMXMLParseError ( _format ( "Element {0!A} has invalid number of child elements " "{1!A} (expecting two child elements " "(NAMESPACEPATH, INSTANCENAME))" , name ( tup_tree ) , k ) , conn_id = self . conn_id )
host , namespace = self . parse_namespacepath ( k [ 0 ] )
inst_path = self . parse_instancename ( k [ 1 ] )
inst_path . host = host
inst_path . namespace = namespace
return inst_path
|
def schoice ( self , seq : str , end : int = 10 ) -> str :
"""Choice function which returns string created from sequence .
: param seq : Sequence of letters or digits .
: type seq : tuple or list
: param end : Max value .
: return : Single string ."""
|
return '' . join ( self . choice ( list ( seq ) ) for _ in range ( end ) )
|
def restore_default_settings ( ) :
"""Restore settings to default values ."""
|
global __DEFAULTS
__DEFAULTS . CACHE_DIR = defaults . CACHE_DIR
__DEFAULTS . SET_SEED = defaults . SET_SEED
__DEFAULTS . SEED = defaults . SEED
logging . info ( 'Settings reverted to their default values.' )
|
def invoke ( self , ctx ) :
"""Given a context , this invokes the attached callback ( if it exists )
in the right way ."""
|
_maybe_show_deprecated_notice ( self )
if self . callback is not None :
return ctx . invoke ( self . callback , ** ctx . params )
|
def echo ( text , fg = None , bg = None , style = None , file = None , err = False , color = None ) :
"""Write the given text to the provided stream or * * sys . stdout * * by default .
Provides optional foreground and background colors from the ansi defaults :
* * grey * * , * * red * * , * * green * * , * * yellow * * , * * blue * * , * * magenta * * , * * cyan * *
or * * white * * .
Available styles include * * bold * * , * * dark * * , * * underline * * , * * blink * * , * * reverse * * ,
* * concealed * *
: param str text : Text to write
: param str fg : Foreground color to use ( default : None )
: param str bg : Foreground color to use ( default : None )
: param str style : Style to use ( default : None )
: param stream file : File to write to ( default : None )
: param bool color : Whether to force color ( i . e . ANSI codes are in the text )"""
|
if file and not hasattr ( file , "write" ) :
raise TypeError ( "Expected a writable stream, received {0!r}" . format ( file ) )
if not file :
if err :
file = _text_stderr ( )
else :
file = _text_stdout ( )
if text and not isinstance ( text , ( six . string_types , bytes , bytearray ) ) :
text = six . text_type ( text )
text = "" if not text else text
if isinstance ( text , six . text_type ) :
text += "\n"
else :
text += b"\n"
if text and six . PY3 and is_bytes ( text ) :
buffer = _get_binary_buffer ( file )
if buffer is not None :
file . flush ( )
buffer . write ( text )
buffer . flush ( )
return
if text and not is_bytes ( text ) :
can_use_color = _can_use_color ( file , color = color )
if any ( [ fg , bg , style ] ) :
text = colorize ( text , fg = fg , bg = bg , attrs = style )
if not can_use_color or ( os . name == "nt" and not _wrap_for_color ) :
text = ANSI_REMOVAL_RE . sub ( "" , text )
elif os . name == "nt" and _wrap_for_color :
file = _wrap_for_color ( file , color = color )
if text :
file . write ( text )
file . flush ( )
|
def ceiling ( value , mod = 1 ) :
"""RETURN SMALLEST INTEGER GREATER THAN value"""
|
if value == None :
return None
mod = int ( mod )
v = int ( math_floor ( value + mod ) )
return v - ( v % mod )
|
def AddSourceRestriction ( self , cidr ) :
"""Add and commit a single source IP restriction policy .
> > > clc . v2 . Server ( " WA1BTDIX01 " ) . PublicIPs ( ) . public _ ips [ 0]
. AddSourceRestriction ( cidr = " 132.200.20.1/32 " ) . WaitUntilComplete ( )"""
|
self . source_restrictions . append ( SourceRestriction ( self , cidr ) )
return ( self . Update ( ) )
|
def pmllpmbb_to_pmrapmdec ( pmll , pmbb , l , b , degree = False , epoch = 2000.0 ) :
"""NAME :
pmllpmbb _ to _ pmrapmdec
PURPOSE :
rotate proper motions in ( l , b ) into proper motions in ( ra , dec )
INPUT :
pmll - proper motion in l ( multplied with cos ( b ) ) [ mas / yr ]
pmbb - proper motion in b [ mas / yr ]
l - Galactic longitude
b - Galactic lattitude
degree - if True , l and b are given in degrees ( default = False )
epoch - epoch of ra , dec ( right now only 2000.0 and 1950.0 are supported when not using astropy ' s transformations internally ; when internally using astropy ' s coordinate transformations , epoch can be None for ICRS , ' JXXXX ' for FK5 , and ' BXXXX ' for FK4)
OUTPUT :
( pmra x cos ( dec ) , pmdec ) , for vector inputs [ : , 2]
HISTORY :
2010-04-07 - Written - Bovy ( NYU )
2014-06-14 - Re - written w / numpy functions for speed and w / decorators for beauty - Bovy ( IAS )"""
|
theta , dec_ngp , ra_ngp = get_epoch_angles ( epoch )
# Whether to use degrees and scalar input is handled by decorators
radec = lb_to_radec ( l , b , degree = False , epoch = epoch )
ra = radec [ : , 0 ]
dec = radec [ : , 1 ]
dec [ dec == dec_ngp ] += 10. ** - 16
# deal w / pole .
sindec_ngp = nu . sin ( dec_ngp )
cosdec_ngp = nu . cos ( dec_ngp )
sindec = nu . sin ( dec )
cosdec = nu . cos ( dec )
sinrarangp = nu . sin ( ra - ra_ngp )
cosrarangp = nu . cos ( ra - ra_ngp )
# These were replaced by Poleski ( 2013 ) ' s equivalent form that is better at the poles
# cosphi = ( sindec _ ngp - sindec * sinb ) / cosdec / cosb
# sinphi = sinrarangp * cosdec _ ngp / cosb
cosphi = sindec_ngp * cosdec - cosdec_ngp * sindec * cosrarangp
sinphi = sinrarangp * cosdec_ngp
norm = nu . sqrt ( cosphi ** 2. + sinphi ** 2. )
cosphi /= norm
sinphi /= norm
return ( nu . array ( [ [ cosphi , sinphi ] , [ - sinphi , cosphi ] ] ) . T * nu . array ( [ [ pmll , pmll ] , [ pmbb , pmbb ] ] ) . T ) . sum ( - 1 )
|
def format ( self , indent_level , indent_size = 4 ) :
"""Format this verifier
Returns :
string : A formatted string"""
|
name = self . format_name ( 'Boolean' , indent_size )
if self . _require_value is not None :
if self . long_desc is not None :
name += '\n'
name += self . wrap_lines ( 'must be %s\n' % str ( self . _require_value ) . lower ( ) , 1 , indent_size )
return self . wrap_lines ( name , indent_level , indent_size )
|
def compute_result_enum ( self ) -> RobotScanResultEnum :
"""Look at the server ' s response to each ROBOT payload and return the conclusion of the analysis ."""
|
# Ensure the results were consistent
for payload_enum , server_responses in self . _payload_responses . items ( ) : # We ran the check twice per payload and the two responses should be the same
if server_responses [ 0 ] != server_responses [ 1 ] :
return RobotScanResultEnum . UNKNOWN_INCONSISTENT_RESULTS
# Check if the server acts as an oracle by checking if the server replied differently to the payloads
if len ( set ( [ server_responses [ 0 ] for server_responses in self . _payload_responses . values ( ) ] ) ) == 1 : # All server responses were identical - no oracle
return RobotScanResultEnum . NOT_VULNERABLE_NO_ORACLE
# All server responses were NOT identical , server is vulnerable
# Check to see if it is a weak oracle
response_1 = self . _payload_responses [ RobotPmsPaddingPayloadEnum . WRONG_FIRST_TWO_BYTES ] [ 0 ]
response_2 = self . _payload_responses [ RobotPmsPaddingPayloadEnum . WRONG_POSITION_00 ] [ 0 ]
response_3 = self . _payload_responses [ RobotPmsPaddingPayloadEnum . NO_00_IN_THE_MIDDLE ] [ 0 ]
# From the original script :
# If the response to the invalid PKCS # 1 request ( oracle _ bad1 ) is equal to both
# requests starting with 0002 , we have a weak oracle . This is because the only
# case where we can distinguish valid from invalid requests is when we send
# correctly formatted PKCS # 1 message with 0x00 on a correct position . This
# makes our oracle weak
if response_1 == response_2 == response_3 :
return RobotScanResultEnum . VULNERABLE_WEAK_ORACLE
else :
return RobotScanResultEnum . VULNERABLE_STRONG_ORACLE
|
def pivot ( self , binned = False ) :
"""Calculate : ref : ` pysynphot - formula - pivwv ` .
Parameters
binned : bool
This is reserved for use by ` ~ pysynphot . observation . Observation ` .
If ` True ` , binned wavelength set is used . Default is ` False ` .
Returns
ans : float
Pivot wavelength .
Raises
AttributeError
Binned wavelength set requested but not found ."""
|
if binned :
try :
wave = self . binwave
except AttributeError :
raise AttributeError ( 'Class ' + str ( type ( self ) ) + ' does not support binning.' )
else :
wave = self . wave
countmulwave = self ( wave ) * wave
countdivwave = self ( wave ) / wave
num = self . trapezoidIntegration ( wave , countmulwave )
den = self . trapezoidIntegration ( wave , countdivwave )
if num == 0.0 or den == 0.0 :
return 0.0
return math . sqrt ( num / den )
|
def _clone ( self ) :
"""Make a ( shallow ) copy of the set .
There is a ' clone protocol ' that subclasses of this class
should use . To make a copy , first call your super ' s _ clone ( )
method , and use the object returned as the new instance . Then
make shallow copies of the attributes defined in the subclass .
This protocol allows us to write the set algorithms that
return new instances ( e . g . union ) once , and keep using them in
subclasses ."""
|
cls = self . __class__
obj = cls . __new__ ( cls )
obj . items = list ( self . items )
return obj
|
def GetScriptHashesForVerifying ( self ) :
"""Get a list of script hashes for verifying transactions .
Raises :
Exception : if there are no valid transactions to claim from .
Returns :
list : of UInt160 type script hashes ."""
|
hashes = super ( ClaimTransaction , self ) . GetScriptHashesForVerifying ( )
for hash , group in groupby ( self . Claims , lambda x : x . PrevHash ) :
tx , height = Blockchain . Default ( ) . GetTransaction ( hash )
if tx is None :
raise Exception ( "Invalid Claim Operation" )
for claim in group :
if len ( tx . outputs ) <= claim . PrevIndex :
raise Exception ( "Invalid Claim Operation" )
script_hash = tx . outputs [ claim . PrevIndex ] . ScriptHash
if script_hash not in hashes :
hashes . append ( script_hash )
hashes . sort ( )
return hashes
|
def pop_event ( self ) :
"""Pop an event from event _ list ."""
|
if len ( self . event_list ) > 0 :
evt = self . event_list . pop ( 0 )
return evt
return None
|
def makeParser ( ) :
"""Create the SCOOP module arguments parser ."""
|
# TODO : Add environment variable ( all + selection )
parser = argparse . ArgumentParser ( description = "Starts a parallel program using SCOOP." , prog = "{0} -m scoop" . format ( sys . executable ) , )
group = parser . add_mutually_exclusive_group ( )
group . add_argument ( '--hosts' , '--host' , help = "The list of hosts. The first host will execute " "the origin. (default is 127.0.0.1)" , metavar = "Address" , nargs = '*' )
group . add_argument ( '--hostfile' , help = "The hostfile name" , metavar = "FileName" )
parser . add_argument ( '--path' , '-p' , help = "The path to the executable on remote hosts " "(default is local directory)" , default = os . getcwd ( ) )
parser . add_argument ( '--nice' , type = int , metavar = "NiceLevel" , help = "*nix niceness level (-20 to 19) to run the " "executable" )
parser . add_argument ( '--verbose' , '-v' , action = 'count' , help = "Verbosity level of this launch script (-vv for " "more)" , default = 1 )
parser . add_argument ( '--quiet' , '-q' , action = 'store_true' )
parser . add_argument ( '-n' , help = "Total number of workers to launch on the hosts. " "Workers are spawned sequentially over the hosts. " "(ie. -n 3 with 2 hosts will spawn 2 workers on " "the first host and 1 on the second.) (default: " "Number of CPUs on current machine)" , type = int , metavar = "NumberOfWorkers" )
parser . add_argument ( '-b' , help = "Total number of brokers to launch on the hosts. " "Brokers are spawned sequentially over the hosts. " "(ie. -b 3 with 2 hosts will spawn 2 brokers on " "the first host and 1 on the second.) (default: " "1)" , type = int , default = 1 , metavar = "NumberOfBrokers" )
parser . add_argument ( '--rsh' , help = "Use RSH instead of SSH for the launching process. " "Not compatible with --tunnel flag." , action = 'store_true' )
parser . add_argument ( '--ssh-executable' , help = "Name of the ssh executable. (default: 'ssh')" , default = "ssh" , metavar = "SSHExecutable" )
parser . add_argument ( '--tunnel' , help = "Activate ssh tunnels to route toward the broker " "sockets over remote connections (may eliminate " "routing problems and activate encryption but " "slows down communications)" , action = 'store_true' )
parser . add_argument ( '--external-hostname' , nargs = 1 , help = "The externally routable hostname / ip of this " "machine. (defaults to the local hostname)" , metavar = "Address" )
parser . add_argument ( '--python-interpreter' , nargs = 1 , help = "The python interpreter executable with which to " "execute the script" , default = [ sys . executable ] , metavar = "Path" )
parser . add_argument ( '--pythonpath' , nargs = 1 , help = "The PYTHONPATH environment variable (default is " "current PYTHONPATH)" , default = [ os . environ . get ( 'PYTHONPATH' , '' ) ] )
parser . add_argument ( '--prolog' , nargs = 1 , help = "Absolute Path to a shell script or executable " "that will be executed at the launch of every " "worker" , default = [ None ] )
parser . add_argument ( '--debug' , help = argparse . SUPPRESS , action = 'store_true' )
parser . add_argument ( '--profile' , help = ( "Turn on the profiling. SCOOP will call " "cProfile.run on the executable for every worker and" " will produce files in directory profile/ named " "workerX where X is the number of the worker." ) , action = 'store_true' )
parser . add_argument ( '--backend' , help = "Choice of communication backend" , choices = [ 'ZMQ' , 'TCP' ] , default = 'ZMQ' )
parser . add_argument ( 'executable' , nargs = '?' , help = 'The executable to start with SCOOP' )
parser . add_argument ( 'args' , nargs = argparse . REMAINDER , help = 'The arguments to pass to the executable' , default = [ ] , metavar = "args" )
return parser
|
def as_nonlinear ( self , params = None ) :
"""Return a ` Model ` equivalent to this object . The nonlinear solver is less
efficient , but lets you freeze parameters , compute uncertainties , etc .
If the ` params ` argument is provided , solve ( ) will be called on the
returned object with those parameters . If it is ` None ` and this object
has parameters in ` self . params ` , those will be use . Otherwise , solve ( )
will not be called on the returned object ."""
|
if params is None :
params = self . params
nlm = Model ( None , self . data , self . invsigma )
nlm . set_func ( lambda p , x : npoly . polyval ( x , p ) , self . pnames , args = ( self . x , ) )
if params is not None :
nlm . solve ( params )
return nlm
|
def _parse_document ( self ) :
"""Parse system . profile doc , copy all values to member variables ."""
|
self . _reset ( )
doc = self . _profile_doc
self . _split_tokens_calculated = True
self . _split_tokens = None
self . _duration_calculated = True
self . _duration = doc [ u'millis' ]
self . _datetime_calculated = True
self . _datetime = doc [ u'ts' ]
if self . _datetime . tzinfo is None :
self . _datetime = self . _datetime . replace ( tzinfo = tzutc ( ) )
self . _datetime_format = None
self . _reformat_timestamp ( 'ctime' , force = True )
self . _thread_calculated = True
self . _thread = doc [ 'thread' ]
self . _operation_calculated = True
self . _operation = doc [ u'op' ]
self . _namespace = doc [ u'ns' ]
self . _command_calculated = True
if self . operation == 'command' :
self . _command = doc [ u'command' ] . keys ( ) [ 0 ]
# query pattern for system . profile events , all three cases .
# See SERVER - 13245
if 'query' in doc :
if 'query' in doc [ 'query' ] and isinstance ( doc [ 'query' ] [ 'query' ] , dict ) :
self . _pattern = str ( doc [ 'query' ] [ 'query' ] ) . replace ( "'" , '"' )
elif '$query' in doc [ 'query' ] :
self . _pattern = str ( doc [ 'query' ] [ '$query' ] ) . replace ( "'" , '"' )
else :
self . _pattern = str ( doc [ 'query' ] ) . replace ( "'" , '"' )
# sort pattern
if ( 'orderby' in doc [ 'query' ] and isinstance ( doc [ 'query' ] [ 'orderby' ] , dict ) ) :
self . _sort_pattern = str ( doc [ 'query' ] [ 'orderby' ] ) . replace ( "'" , '"' )
elif '$orderby' in doc [ 'query' ] :
self . _sort_pattern = str ( doc [ 'query' ] [ '$orderby' ] ) . replace ( "'" , '"' )
else :
self . _sort_pattern = None
self . _counters_calculated = True
self . _nscanned = doc [ u'nscanned' ] if 'nscanned' in doc else None
self . _ntoreturn = doc [ u'ntoreturn' ] if 'ntoreturn' in doc else None
self . _nupdated = doc [ u'nupdated' ] if 'nupdated' in doc else None
self . _nreturned = doc [ u'nreturned' ] if 'nreturned' in doc else None
self . _ninserted = doc [ u'ninserted' ] if 'ninserted' in doc else None
self . _ndeleted = doc [ u'ndeleted' ] if 'ndeleted' in doc else None
self . _numYields = doc [ u'numYield' ] if 'numYield' in doc else None
if u'lockStats' in doc :
self . _r = doc [ u'lockStats' ] [ u'timeLockedMicros' ] [ u'r' ]
self . _w = doc [ u'lockStats' ] [ u'timeLockedMicros' ] [ u'w' ]
self . _r_acquiring = doc [ u'lockStats' ] [ 'timeAcquiringMicros' ] [ u'r' ]
self . _w_acquiring = doc [ u'lockStats' ] [ 'timeAcquiringMicros' ] [ u'w' ]
locks = 'w:%i' % self . w if self . w is not None else 'r:%i' % self . r
elif u'locks' in doc :
locks = json . dumps ( doc [ u'locks' ] )
else :
locks = ''
# build a fake line _ str
payload = ''
if 'query' in doc :
payload += ( 'query: %s' % str ( doc [ u'query' ] ) . replace ( "u'" , "'" ) . replace ( "'" , '"' ) )
if 'command' in doc :
payload += ( 'command: %s' % str ( doc [ u'command' ] ) . replace ( "u'" , "'" ) . replace ( "'" , '"' ) )
if 'updateobj' in doc :
payload += ( ' update: %s' % str ( doc [ u'updateobj' ] ) . replace ( "u'" , "'" ) . replace ( "'" , '"' ) )
scanned = 'nscanned:%i' % self . _nscanned if 'nscanned' in doc else ''
yields = 'numYields:%i' % self . _numYields if 'numYield' in doc else ''
duration = '%ims' % self . duration if self . duration is not None else ''
self . _line_str = ( "[{thread}] {operation} {namespace} {payload} " "{scanned} {yields} locks(micros) {locks} " "{duration}" . format ( datetime = self . datetime , thread = self . thread , operation = self . operation , namespace = self . namespace , payload = payload , scanned = scanned , yields = yields , locks = locks , duration = duration ) )
|
def set_rss_element ( self ) :
"""Set each of the basic rss elements ."""
|
self . set_author ( )
self . set_categories ( )
self . set_comments ( )
self . set_creative_commons ( )
self . set_description ( )
self . set_enclosure ( )
self . set_guid ( )
self . set_link ( )
self . set_published_date ( )
self . set_title ( )
|
def is_archlinux ( ) :
"""return True if the current distribution is running on debian like OS ."""
|
if platform . system ( ) . lower ( ) == 'linux' :
if platform . linux_distribution ( ) == ( '' , '' , '' ) : # undefined distribution . Fixed in python 3.
if os . path . exists ( '/etc/arch-release' ) :
return True
return False
|
def consumer ( site , uri ) :
"""Consume URI using site config ."""
|
config = load_site_config ( site )
model = _get_model ( 'consume' , config , uri )
consumestore = get_consumestore ( model = model , method = _config . get ( 'storage' , 'file' ) , bucket = _config . get ( 's3_data_bucket' , None ) )
consumestore . save_media ( )
consumestore . save_data ( )
|
def get_domain ( self ) :
"""Returns the dictionary of variables with keys as variable name
and values as domain of the variables .
Returns
dict : dictionary containing variables and their domains
Example
> > > reader = UAIReader ( ' TestUAI . uai ' )
> > > reader . get _ domain ( )
{ ' var _ 0 ' : ' 2 ' , ' var _ 1 ' : ' 2 ' , ' var _ 2 ' : ' 3 ' }"""
|
domain = { }
var_domain = self . grammar . parseString ( self . network ) [ 'domain_variables' ]
for var in range ( 0 , len ( var_domain ) ) :
domain [ "var_" + str ( var ) ] = var_domain [ var ]
return domain
|
def get_klass_parents ( gi_name ) :
'''Returns a sorted list of qualified symbols representing
the parents of the klass - like symbol named gi _ name'''
|
res = [ ]
parents = __HIERARCHY_GRAPH . predecessors ( gi_name )
if not parents :
return [ ]
__get_parent_link_recurse ( parents [ 0 ] , res )
return res
|
def getOr ( subject , predicate , * args , ** kwargs ) :
"""Retrieve a metadata node or generate a new one
: param subject : Subject to which the metadata node should be connected
: param predicate : Predicate by which the metadata node should be connected
: return : Metadata for given node
: rtype : Metadata"""
|
if ( subject , predicate , None ) in get_graph ( ) :
return Metadata ( node = get_graph ( ) . objects ( subject , predicate ) . __next__ ( ) )
return Metadata ( * args , ** kwargs )
|
def is_opening_code_fence ( line : str , parser : str = 'github' ) :
r"""Determine if the given line is possibly the opening of a fenced code block .
: parameter line : a single markdown line to evaluate .
: parameter parser : decides rules on how to generate the anchor text .
Defaults to ` ` github ` ` .
: type line : str
: type parser : str
: returns : None if the input line is not an opening code fence . Otherwise ,
returns the string which will identify the closing code fence
according to the input parsers ' rules .
: rtype : typing . Optional [ str ]
: raises : a built - in exception ."""
|
if ( parser == 'github' or parser == 'cmark' or parser == 'gitlab' or parser == 'commonmarker' ) :
markers = md_parser [ 'github' ] [ 'code fence' ] [ 'marker' ]
marker_min_length = md_parser [ 'github' ] [ 'code fence' ] [ 'min_marker_characters' ]
if not is_valid_code_fence_indent ( line ) :
return None
line = line . lstrip ( ' ' ) . rstrip ( '\n' )
if not line . startswith ( ( markers [ 0 ] * marker_min_length , markers [ 1 ] * marker_min_length ) ) :
return None
if line == len ( line ) * line [ 0 ] :
info_string = str ( )
else :
info_string = line . lstrip ( line [ 0 ] )
# Backticks or tildes in info string are explicitly forbidden .
if markers [ 0 ] in info_string or markers [ 1 ] in info_string :
return None
# Solves example 107 . See :
# https : / / github . github . com / gfm / # example - 107
if line . rstrip ( markers [ 0 ] ) != line and line . rstrip ( markers [ 1 ] ) != line :
return None
return line . rstrip ( info_string )
elif parser == 'redcarpet' : # TODO .
return None
|
def confirm_operation ( prompt , prefix = None , assume_yes = False , err = False ) :
"""Prompt the user for confirmation for dangerous actions ."""
|
if assume_yes :
return True
prefix = prefix or click . style ( "Are you %s certain you want to" % ( click . style ( "absolutely" , bold = True ) ) )
prompt = "%(prefix)s %(prompt)s?" % { "prefix" : prefix , "prompt" : prompt }
if click . confirm ( prompt , err = err ) :
return True
click . echo ( err = err )
click . secho ( "OK, phew! Close call. :-)" , fg = "green" , err = err )
return False
|
def position_result_list ( change_list ) :
"""Returns a template which iters through the models and appends a new
position column ."""
|
result = result_list ( change_list )
# Remove sortable attributes
for x in range ( 0 , len ( result [ 'result_headers' ] ) ) :
result [ 'result_headers' ] [ x ] [ 'sorted' ] = False
if result [ 'result_headers' ] [ x ] [ 'sortable' ] :
result [ 'result_headers' ] [ x ] [ 'class_attrib' ] = mark_safe ( ' class="sortable"' )
# Append position < th > element
result [ 'result_headers' ] . append ( { 'url_remove' : '?o=' , 'sort_priority' : 1 , 'sortable' : True , 'class_attrib' : mark_safe ( ' class="sortable sorted ascending"' ) , 'sorted' : True , 'text' : 'position' , 'ascending' : True , 'url_primary' : '?o=-1' , 'url_toggle' : '?o=-1' , } )
# Append the editable field to every result item
for x in range ( 0 , len ( result [ 'results' ] ) ) :
obj = change_list . result_list [ x ]
# Get position object
c_type = ContentType . objects . get_for_model ( obj )
try :
object_position = ObjectPosition . objects . get ( content_type__pk = c_type . id , object_id = obj . id )
except ObjectPosition . DoesNotExist :
object_position = ObjectPosition . objects . create ( content_object = obj )
# Add the < td >
html = ( '<td><input class="vTextField" id="id_position-{0}"' ' maxlength="10" name="position-{0}" type="text"' ' value="{1}" /></td>' ) . format ( object_position . id , object_position . position )
result [ 'results' ] [ x ] . append ( mark_safe ( html ) )
return result
|
def main ( ) :
"""Start the DQL client ."""
|
parse = argparse . ArgumentParser ( description = main . __doc__ )
parse . add_argument ( "-c" , "--command" , help = "Run this command and exit" )
region = os . environ . get ( "AWS_REGION" , "us-west-1" )
parse . add_argument ( "-r" , "--region" , default = region , help = "AWS region to connect to (default %(default)s)" , )
parse . add_argument ( "-H" , "--host" , default = None , help = "Host to connect to if using a local instance " "(default %(default)s)" , )
parse . add_argument ( "-p" , "--port" , default = 8000 , type = int , help = "Port to connect to " "(default %(default)d)" , )
parse . add_argument ( "--version" , action = "store_true" , help = "Print the version and exit" )
args = parse . parse_args ( )
if args . version :
print ( __version__ )
return
logging . config . dictConfig ( LOG_CONFIG )
cli = DQLClient ( )
cli . initialize ( region = args . region , host = args . host , port = args . port )
if args . command :
command = args . command . strip ( )
try :
cli . run_command ( command )
if cli . engine . partial :
cli . run_command ( ";" )
except KeyboardInterrupt :
pass
else :
cli . start ( )
|
def _get_designation_type ( self ) :
"""Extracts the designation type of the stored routine ."""
|
positions = self . _get_specification_positions ( )
if positions [ 0 ] != - 1 and positions [ 1 ] != - 1 :
pattern = re . compile ( r'^\s*--\s+type\s*:\s*(\w+)\s*(.+)?\s*' , re . IGNORECASE )
for line_number in range ( positions [ 0 ] , positions [ 1 ] + 1 ) :
matches = pattern . findall ( self . _routine_source_code_lines [ line_number ] )
if matches :
self . _designation_type = matches [ 0 ] [ 0 ] . lower ( )
tmp = str ( matches [ 0 ] [ 1 ] )
if self . _designation_type == 'bulk_insert' :
n = re . compile ( r'([a-zA-Z0-9_]+)\s+([a-zA-Z0-9_,]+)' , re . IGNORECASE )
info = n . findall ( tmp )
if not info :
raise LoaderException ( 'Expected: -- type: bulk_insert <table_name> <columns> in file {0}' . format ( self . _source_filename ) )
self . _table_name = info [ 0 ] [ 0 ]
self . _columns = str ( info [ 0 ] [ 1 ] ) . split ( ',' )
elif self . _designation_type == 'rows_with_key' or self . _designation_type == 'rows_with_index' :
self . _columns = str ( matches [ 0 ] [ 1 ] ) . split ( ',' )
else :
if matches [ 0 ] [ 1 ] :
raise LoaderException ( 'Expected: -- type: {}' . format ( self . _designation_type ) )
if not self . _designation_type :
raise LoaderException ( "Unable to find the designation type of the stored routine in file {0}" . format ( self . _source_filename ) )
|
def cart2spol ( X ) :
"""Performs coordinate transformation from cartesian
to spherical polar coordinates with ( r , phi , theta ) having
usual meanings ."""
|
x , y , z , vx , vy , vz = X
r = np . sqrt ( x * x + y * y + z * z )
p = np . arctan2 ( y , x )
t = np . arccos ( z / r )
vr = ( vx * np . cos ( p ) + vy * np . sin ( p ) ) * np . sin ( t ) + np . cos ( t ) * vz
vp = - vx * np . sin ( p ) + vy * np . cos ( p )
vt = ( vx * np . cos ( p ) + vy * np . sin ( p ) ) * np . cos ( t ) - np . sin ( t ) * vz
return np . array ( [ r , p , t , vr , vp , vt ] )
|
def load_cufflinks_fpkm_dict ( * args , ** kwargs ) :
"""Returns dictionary mapping feature identifier ( either transcript or gene ID )
to FPKM expression value ."""
|
return { row . id : row . fpkm for ( _ , row ) in load_cufflinks_dataframe ( * args , ** kwargs ) . iterrows ( ) }
|
async def create_vm ( self , preset_name : str , image : str , flavor : str , security_groups : List = None , userdata : Dict = None , key_name : str = None , availability_zone : str = None , subnets : List = None ) -> Any :
"""Create ( boot ) a new server .
: arg string preset _ name : Name of vm group where vm is created .
: arg string image : Image name .
: arg string flavor : Flavor ( or instance _ type in AWS ) name .
: arg list security _ groups : A list of security group names .
: arg dict userdata : A dict of arbitrary key / value metadata to store in grains .
: arg string key _ name : ( optional extension ) name of previously created
keypair to inject into the instance .
: arg string availability _ zone : Name of the availability zone for instance
placement .
: arg string subnets : List of the subnets for instance placement .
Returns Any vm _ id ."""
|
raise NotImplementedError
|
def _ParseCommon2003CachedEntry ( self , value_data , cached_entry_offset ) :
"""Parses the cached entry structure common for Windows 2003 , Vista and 7.
Args :
value _ data ( bytes ) : value data .
cached _ entry _ offset ( int ) : offset of the first cached entry data
relative to the start of the value data .
Returns :
appcompatcache _ cached _ entry _ 2003 _ common : cached entry structure common
for Windows 2003 , Windows Vista and Windows 7.
Raises :
ParseError : if the value data could not be parsed ."""
|
data_type_map = self . _GetDataTypeMap ( 'appcompatcache_cached_entry_2003_common' )
try :
cached_entry = self . _ReadStructureFromByteStream ( value_data [ cached_entry_offset : ] , cached_entry_offset , data_type_map )
except ( ValueError , errors . ParseError ) as exception :
raise errors . ParseError ( 'Unable to parse cached entry value with error: {0!s}' . format ( exception ) )
if cached_entry . path_size > cached_entry . maximum_path_size :
raise errors . ParseError ( 'Path size value out of bounds.' )
path_end_of_string_size = ( cached_entry . maximum_path_size - cached_entry . path_size )
if cached_entry . path_size == 0 or path_end_of_string_size != 2 :
raise errors . ParseError ( 'Unsupported path size values.' )
return cached_entry
|
def month_roll ( self ) :
"""Define default roll function to be called in apply method ."""
|
if self . _prefix . endswith ( 'S' ) : # MonthBegin
roll_func = self . m_offset . rollback
else : # MonthEnd
roll_func = self . m_offset . rollforward
return roll_func
|
def event_loop ( self ) :
"""Event loop ."""
|
try :
zmq . eventloop . ioloop . IOLoop . current ( ) . start ( )
except KeyboardInterrupt :
zmq . eventloop . ioloop . IOLoop . current ( ) . stop ( )
|
def autoset_margins ( self ) :
"""auto - set margins left , bottom , right , top
according to the specified margins ( in pixels )
and axes extent ( taking into account labels ,
title , axis )"""
|
if not self . conf . auto_margins :
return
# coordinates in px - > [ 0,1 ] in figure coordinates
trans = self . fig . transFigure . inverted ( ) . transform
# Static margins
if not self . use_dates :
self . conf . margins = l , t , r , b = self . get_default_margins ( )
self . gridspec . update ( left = l , top = 1 - t , right = 1 - r , bottom = b )
# Axes positions update
for ax in self . fig . get_axes ( ) :
try :
ax . update_params ( )
except ValueError :
pass
ax . set_position ( ax . figbox )
|
def get_first_language ( self , site_id = None ) :
"""Return the first language for the current site .
This can be used for user interfaces , where the languages are displayed in tabs ."""
|
if site_id is None :
site_id = getattr ( settings , 'SITE_ID' , None )
try :
return self [ site_id ] [ 0 ] [ 'code' ]
except ( KeyError , IndexError ) : # No configuration , always fallback to default language .
# This is essentially a non - multilingual configuration .
return self [ 'default' ] [ 'code' ]
|
def enclosure_directed ( self ) :
"""Networkx DiGraph of polygon enclosure"""
|
root , enclosure = polygons . enclosure_tree ( self . polygons_closed )
self . _cache [ 'root' ] = root
return enclosure
|
def __branch_point_dfs ( dfs_data ) :
"""DFS that calculates the b ( u ) and N ( u ) lookups , and also reorders the adjacency lists ."""
|
u = dfs_data [ 'ordering' ] [ 0 ]
large_n = { }
large_n [ u ] = 0
stem = { }
stem [ u ] = u
b = { }
b [ u ] = 1
__branch_point_dfs_recursive ( u , large_n , b , stem , dfs_data )
dfs_data [ 'N_u_lookup' ] = large_n
dfs_data [ 'b_u_lookup' ] = b
return
|
def set_environ ( inherit = True , append = { } ) :
"""Helper method for passing environment variables to the subprocess ."""
|
_environ = { } if not inherit else environ
for k , v in append . iteritems ( ) :
_environ [ k ] = v
return _environ
|
def get_pickup_time_estimates ( self , latitude , longitude , ride_type = None ) :
"""Get pickup time estimates ( ETA ) for products at a given location .
Parameters
latitude ( float )
The latitude component of a location .
longitude ( float )
The longitude component of a location .
ride _ type ( str )
Optional specific ride type pickup estimate only .
Returns
( Response )
A Response containing each product ' s pickup time estimates ."""
|
args = OrderedDict ( [ ( 'lat' , latitude ) , ( 'lng' , longitude ) , ( 'ride_type' , ride_type ) , ] )
return self . _api_call ( 'GET' , 'v1/eta' , args = args )
|
def _build_url_rewriter ( cls , session : AppSession ) :
'''Build URL rewriter if needed .'''
|
if session . args . escaped_fragment or session . args . strip_session_id :
return session . factory . new ( 'URLRewriter' , hash_fragment = session . args . escaped_fragment , session_id = session . args . strip_session_id )
|
def _get_beacons ( self , include_opts = True , include_pillar = True ) :
'''Return the beacons data structure'''
|
beacons = { }
if include_pillar :
pillar_beacons = self . opts . get ( 'pillar' , { } ) . get ( 'beacons' , { } )
if not isinstance ( pillar_beacons , dict ) :
raise ValueError ( 'Beacons must be of type dict.' )
beacons . update ( pillar_beacons )
if include_opts :
opts_beacons = self . opts . get ( 'beacons' , { } )
if not isinstance ( opts_beacons , dict ) :
raise ValueError ( 'Beacons must be of type dict.' )
beacons . update ( opts_beacons )
return beacons
|
def normalize ( self ) :
"""Returns a new normalized ( sorted and compacted ) : class : ` FrameSet ` .
Returns :
: class : ` FrameSet ` :"""
|
return FrameSet ( FrameSet . framesToFrameRange ( self . items , sort = True , compress = False ) )
|
def facilityNetToMs ( ) :
"""FACILITY Section 9.3.9.1"""
|
a = TpPd ( pd = 0x3 )
b = MessageType ( mesType = 0x3a )
# 00111010
c = Facility ( )
packet = a / b / c
return packet
|
def clone ( self , label ) :
"""Clones this volume to a new volume in the same region with the given label
: param label : The label for the new volume .
: returns : The new volume object ."""
|
result = self . _client . post ( '{}/clone' . format ( Volume . api_endpoint ) , model = self , data = { 'label' : label } )
if not 'id' in result :
raise UnexpectedResponseError ( 'Unexpected response cloning volume!' )
return Volume ( self . _client , result [ 'id' ] , result )
|
def _none_accepter ( validation_callable # type : Callable
) : # type : ( . . . ) - > Callable
"""Wraps the given validation callable to accept None values silently . When a None value is received by the wrapper ,
it is not passed to the validation _ callable and instead this function will return True . When any other value is
received the validation _ callable is called as usual .
Note : the created wrapper has the same same than the validation callable for more user - friendly error messages
: param validation _ callable :
: return :"""
|
# option ( a ) use the ` decorate ( ) ` helper method to preserve name and signature of the inner object
# = = > NO , we want to support also non - function callable objects
# option ( b ) simply create a wrapper manually
def accept_none ( x ) :
if x is not None : # proceed with validation as usual
return validation_callable ( x )
else : # value is None : skip validation
return True
# set a name so that the error messages are more user - friendly
accept_none . __name__ = 'skip_on_none({})' . format ( get_callable_name ( validation_callable ) )
return accept_none
|
def area ( boxes ) :
"""Args :
boxes : nx4 floatbox
Returns :"""
|
x_min , y_min , x_max , y_max = tf . split ( boxes , 4 , axis = 1 )
return tf . squeeze ( ( y_max - y_min ) * ( x_max - x_min ) , [ 1 ] )
|
def resend_transaction_frames ( self , connection , transaction ) :
"""Resend the messages that were ACK ' d in specified transaction .
This is called by the engine when there is an abort command .
@ param connection : The client connection that aborted the transaction .
@ type connection : L { coilmq . server . StompConnection }
@ param transaction : The transaction id ( which was aborted ) .
@ type transaction : C { str }"""
|
for frame in self . _transaction_frames [ connection ] [ transaction ] :
self . send ( frame )
|
def all ( cls ) :
"""Wrapper around _ all ( ) to cache and return all results of something
> > > ec2 . instances . all ( )"""
|
if not hasattr ( cls , '_cache' ) :
cls . _cache = cls . _all ( )
return cls . _cache
|
def children ( args ) :
"""% prog children gff _ file
Get the children that have the same parent ."""
|
p = OptionParser ( children . __doc__ )
p . add_option ( "--parents" , default = "gene" , help = "list of features to extract, use comma to separate (e.g." "'gene,mRNA') [default: %default]" )
opts , args = p . parse_args ( args )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
gff_file , = args
g = make_index ( gff_file )
parents = set ( opts . parents . split ( ',' ) )
for feat in get_parents ( gff_file , parents ) :
cc = [ c . id for c in g . children ( feat . id , 1 ) ]
if len ( cc ) <= 1 :
continue
print ( "\t" . join ( str ( x ) for x in ( feat . id , feat . start , feat . stop , "|" . join ( cc ) ) ) )
|
def _close_connection ( self , frame_in ) :
"""Connection Close .
: param specification . Connection . Close frame _ in : Amqp frame .
: return :"""
|
self . _set_connection_state ( Stateful . CLOSED )
if frame_in . reply_code != 200 :
reply_text = try_utf8_decode ( frame_in . reply_text )
message = ( 'Connection was closed by remote server: %s' % reply_text )
exception = AMQPConnectionError ( message , reply_code = frame_in . reply_code )
self . _connection . exceptions . append ( exception )
|
def notify_mail ( title , message , recipient = None , sender = None , smtp_host = None , smtp_port = None , ** kwargs ) :
"""Mail notification method taking a * title * and a string * message * . * recipient * , * sender * ,
* smtp _ host * and * smtp _ port * default to the configuration values in the [ notifications ] section ."""
|
cfg = Config . instance ( )
if not recipient :
recipient = cfg . get_expanded ( "notifications" , "mail_recipient" )
if not sender :
sender = cfg . get_expanded ( "notifications" , "mail_sender" )
if not smtp_host :
smtp_host = cfg . get_expanded ( "notifications" , "mail_smtp_host" )
if not smtp_port :
smtp_port = cfg . get_expanded ( "notifications" , "mail_smtp_port" )
if not recipient or not sender :
logger . warning ( "cannot send mail notification, recipient ({}) or sender ({}) empty" . format ( recipient , sender ) )
return False
return send_mail ( recipient , sender , title , message , smtp_host = smtp_host , smtp_port = smtp_port )
|
def global_variable_id_generator ( size = 10 , chars = string . ascii_uppercase ) :
"""Create a new and unique global variable id
Generates an id for a global variable . It randomly samples from random ascii uppercase letters size times
and concatenates them . If the id already exists it draws a new one .
: param size : the length of the generated keys
: param chars : the set of characters a sample draws from"""
|
new_global_variable_id = '' . join ( random . choice ( chars ) for x in range ( size ) )
while new_global_variable_id in used_global_variable_ids :
new_global_variable_id = '' . join ( random . choice ( chars ) for x in range ( size ) )
used_global_variable_ids . append ( new_global_variable_id )
return new_global_variable_id
|
def read_pixels ( viewport = None , alpha = True , out_type = 'unsigned_byte' ) :
"""Read pixels from the currently selected buffer .
Under most circumstances , this function reads from the front buffer .
Unlike all other functions in vispy . gloo , this function directly executes
an OpenGL command .
Parameters
viewport : array - like | None
4 - element list of x , y , w , h parameters . If None ( default ) ,
the current GL viewport will be queried and used .
alpha : bool
If True ( default ) , the returned array has 4 elements ( RGBA ) .
If False , it has 3 ( RGB ) .
out _ type : str | dtype
Can be ' unsigned _ byte ' or ' float ' . Note that this does not
use casting , but instead determines how values are read from
the current buffer . Can also be numpy dtypes ` ` np . uint8 ` ` ,
` ` np . ubyte ` ` , or ` ` np . float32 ` ` .
Returns
pixels : array
3D array of pixels in np . uint8 or np . float32 format .
The array shape is ( h , w , 3 ) or ( h , w , 4 ) , with the top - left corner
of the framebuffer at index [ 0 , 0 ] in the returned array ."""
|
# Check whether the GL context is direct or remote
context = get_current_canvas ( ) . context
if context . shared . parser . is_remote ( ) :
raise RuntimeError ( 'Cannot use read_pixels() with remote GLIR parser' )
finish ( )
# noqa - finish first , also flushes GLIR commands
type_dict = { 'unsigned_byte' : gl . GL_UNSIGNED_BYTE , np . uint8 : gl . GL_UNSIGNED_BYTE , 'float' : gl . GL_FLOAT , np . float32 : gl . GL_FLOAT }
type_ = _check_conversion ( out_type , type_dict )
if viewport is None :
viewport = gl . glGetParameter ( gl . GL_VIEWPORT )
viewport = np . array ( viewport , int )
if viewport . ndim != 1 or viewport . size != 4 :
raise ValueError ( 'viewport should be 1D 4-element array-like, not %s' % ( viewport , ) )
x , y , w , h = viewport
gl . glPixelStorei ( gl . GL_PACK_ALIGNMENT , 1 )
# PACK , not UNPACK
fmt = gl . GL_RGBA if alpha else gl . GL_RGB
im = gl . glReadPixels ( x , y , w , h , fmt , type_ )
gl . glPixelStorei ( gl . GL_PACK_ALIGNMENT , 4 )
# reshape , flip , and return
if not isinstance ( im , np . ndarray ) :
np_dtype = np . uint8 if type_ == gl . GL_UNSIGNED_BYTE else np . float32
im = np . frombuffer ( im , np_dtype )
im . shape = h , w , ( 4 if alpha else 3 )
# RGBA vs RGB
im = im [ : : - 1 , : , : ]
# flip the image
return im
|
def pick_best_methods ( stochastic ) :
"""Picks the StepMethods best suited to handle
a stochastic variable ."""
|
# Keep track of most competent methohd
max_competence = 0
# Empty set of appropriate StepMethods
best_candidates = set ( [ ] )
# Loop over StepMethodRegistry
for method in StepMethodRegistry : # Parse method and its associated competence
try :
competence = method . competence ( stochastic )
except :
competence = 0
# If better than current best method , promote it
if competence > max_competence :
best_candidates = set ( [ method ] )
max_competence = competence
# If same competence , add it to the set of best methods
elif competence == max_competence :
best_candidates . add ( method )
if max_competence <= 0 :
raise ValueError ( 'Maximum competence reported for stochastic %s is <= 0... you may need to write a custom step method class.' % stochastic . __name__ )
# print _ ( s . _ _ name _ _ + ' : ' , best _ candidates , ' ' , max _ competence )
return best_candidates
|
def __add_tokenization ( self , tree ) :
"""adds a node for each token ID in the document"""
|
for token_id in self . get_token_ids ( tree ) :
self . add_node ( token_id , layers = { self . ns } )
self . tokens . append ( token_id )
|
def soundex ( self , name , length = 8 ) :
'''Calculate soundex of given string
This function calculates soundex for Indian language string
as well as English string .
This function is exposed as service method for JSONRPC in
SILPA framework .
: param name : String whose Soundex value to be calculated
: param length : Length of final Soundex string , if soundex
caculated is more than this it will be
truncated to length .
: return : Soundex string of ` name ' '''
|
sndx = [ ]
fc = name [ 0 ]
# translate alpha chars in name to soundex digits
for c in name [ 1 : ] . lower ( ) :
d = str ( self . soundexCode ( c ) )
# remove all 0s from the soundex code
if d == '0' :
continue
# duplicate consecutive soundex digits are skipped
if len ( sndx ) == 0 :
sndx . append ( d )
elif d != sndx [ - 1 ] :
sndx . append ( d )
# append first character to result
sndx . insert ( 0 , fc )
if get_language ( name [ 0 ] ) == 'en_US' : # Don ' t padd
return '' . join ( sndx )
if len ( sndx ) < length :
sndx . extend ( repeat ( '0' , length ) )
return '' . join ( sndx [ : length ] )
return '' . join ( sndx [ : length ] )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.