signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def assoc ( objects , sitecol , assoc_dist , mode , asset_refs = ( ) ) :
"""Associate geographic objects to a site collection .
: param objects :
something with . lons , . lats or [ ' lon ' ] [ ' lat ' ] , or a list of lists
of objects with a . location attribute ( i . e . assets _ by _ site )
: param assoc _ dist :
the maximum distance for association
: param mode :
if ' strict ' fail if at least one site is not associated
if ' error ' fail if all sites are not associated
: returns : ( filtered site collection , filtered objects )""" | if isinstance ( objects , numpy . ndarray ) or hasattr ( objects , 'lons' ) : # objects is a geo array with lon , lat fields or a mesh - like instance
return _GeographicObjects ( objects ) . assoc ( sitecol , assoc_dist , mode )
else : # objects is the list assets _ by _ site
return _GeographicObjects ( sitecol ) . assoc2 ( objects , assoc_dist , mode , asset_refs ) |
def check_next_match ( self , match , new_relations , subject_graph , one_match ) :
"""Check if the ( onset for a ) match can be a valid ( part of a ) ring""" | # avoid duplicate rings ( order of traversal )
if len ( match ) == 3 :
if match . forward [ 1 ] < match . forward [ 2 ] : # print " RingPattern . check _ next _ match : duplicate order " , match . forward [ 1 ] , match . forward [ 2]
return False
# avoid duplicate rings ( starting point )
for vertex1 in new_relations . values ( ) :
if vertex1 < match . forward [ 0 ] : # print " RingPattern . check _ next _ match : duplicate start " , vertex1 , match . forward [ 0]
return False
# can this ever become a strong ring ?
for vertex1 in new_relations . values ( ) :
paths = list ( subject_graph . iter_shortest_paths ( vertex1 , match . forward [ 0 ] ) )
if len ( paths ) != 1 : # print " RingPattern . check _ next _ match : not strong 1"
return False
if len ( paths [ 0 ] ) != ( len ( match ) + 1 ) // 2 : # print " RingPattern . check _ next _ match : not strong 2"
return False
return True |
def get_config ( basedir , files ) :
"""Returns the config object for the selected docker - compose . yml
This is an instance of ` compose . config . config . Config ` .""" | config_details = config . find ( basedir , files , environment . Environment . from_env_file ( basedir ) )
return config . load ( config_details ) |
def get_certificate ( self , ** kwargs ) :
"""GetCertificate .
[ Preview API ]
: rtype : object""" | response = self . _send ( http_method = 'GET' , location_id = '2e0dbce7-a327-4bc0-a291-056139393f6d' , version = '5.0-preview.1' , accept_media_type = 'application/octet-stream' )
if "callback" in kwargs :
callback = kwargs [ "callback" ]
else :
callback = None
return self . _client . stream_download ( response , callback = callback ) |
def discover_datasource_columns ( datastore_str , datasource_id ) :
"""Loop through the datastore ' s datasources to find
the datasource identified by datasource _ id , return
the matching datasource ' s columns .""" | datastore = DataStore ( datastore_str )
datasource = datastore . get_datasource ( datasource_id )
if datasource . type != "RASTER" :
return datasource . list_columns ( )
else :
return [ ] |
def job_tasks ( self , job_id , type = None ) :
"""With the tasks API , you can obtain a collection of resources that
represent a task within a job .
: param str job _ id : The job id
: param str type : type of task , valid values are m or r . m for map
task or r for reduce task
: returns : API response object with JSON data
: rtype : : py : class : ` yarn _ api _ client . base . Response `""" | path = '/ws/v1/history/mapreduce/jobs/{jobid}/tasks' . format ( jobid = job_id )
# m - for map
# r - for reduce
valid_types = [ 'm' , 'r' ]
if type is not None and type not in valid_types :
msg = 'Job type %s is illegal' % ( type , )
raise IllegalArgumentError ( msg )
params = { }
if type is not None :
params [ 'type' ] = type
return self . request ( path , ** params ) |
def getContactItems ( self , person ) :
"""Return an iterable of L { PhoneNumber } items that are associated with
C { person } .
@ type person : L { Person }""" | return person . store . query ( PhoneNumber , PhoneNumber . person == person ) |
def match ( self , url ) :
"""Return a list of all active Messages which match the given
URL .""" | return list ( { message for message in self . active ( ) if message . is_global or message . match ( url ) } ) |
def ToHtml ( self , columns_order = None , order_by = ( ) ) :
"""Writes the data table as an HTML table code string .
Args :
columns _ order : Optional . Specifies the order of columns in the
output table . Specify a list of all column IDs in the order
in which you want the table created .
Note that you must list all column IDs in this parameter ,
if you use it .
order _ by : Optional . Specifies the name of the column ( s ) to sort by .
Passed as is to _ PreparedData .
Returns :
An HTML table code string .
Example result ( the result is without the newlines ) :
< html > < body > < table border = " 1 " >
< thead > < tr > < th > a < / th > < th > b < / th > < th > c < / th > < / tr > < / thead >
< tbody >
< tr > < td > 1 < / td > < td > " z " < / td > < td > 2 < / td > < / tr >
< tr > < td > " 3 $ " < / td > < td > " w " < / td > < td > < / td > < / tr >
< / tbody >
< / table > < / body > < / html >
Raises :
DataTableException : The data does not match the type .""" | table_template = "<html><body><table border=\"1\">%s</table></body></html>"
columns_template = "<thead><tr>%s</tr></thead>"
rows_template = "<tbody>%s</tbody>"
row_template = "<tr>%s</tr>"
header_cell_template = "<th>%s</th>"
cell_template = "<td>%s</td>"
if columns_order is None :
columns_order = [ col [ "id" ] for col in self . __columns ]
col_dict = dict ( [ ( col [ "id" ] , col ) for col in self . __columns ] )
columns_list = [ ]
for col in columns_order :
columns_list . append ( header_cell_template % html . escape ( col_dict [ col ] [ "label" ] ) )
columns_html = columns_template % "" . join ( columns_list )
rows_list = [ ]
# We now go over the data and add each row
for row , unused_cp in self . _PreparedData ( order_by ) :
cells_list = [ ]
# We add all the elements of this row by their order
for col in columns_order : # For empty string we want empty quotes ( " " ) .
value = ""
if col in row and row [ col ] is not None :
value = self . CoerceValue ( row [ col ] , col_dict [ col ] [ "type" ] )
if isinstance ( value , tuple ) : # We have a formatted value and we ' re going to use it
cells_list . append ( cell_template % html . escape ( self . ToString ( value [ 1 ] ) ) )
else :
cells_list . append ( cell_template % html . escape ( self . ToString ( value ) ) )
rows_list . append ( row_template % "" . join ( cells_list ) )
rows_html = rows_template % "" . join ( rows_list )
return table_template % ( columns_html + rows_html ) |
def _sim_prediction ( self , mu , Y , h , t_z , simulations ) :
"""Simulates a h - step ahead mean prediction
Parameters
mu : np . ndarray
The past predicted values
Y : np . ndarray
The past data
h : int
How many steps ahead for the prediction
t _ z : np . ndarray
A vector of ( transformed ) latent variables
simulations : int
How many simulations to perform
Returns
Matrix of simulations""" | model_scale , model_shape , model_skewness = self . _get_scale_and_shape ( t_z )
sim_vector = np . zeros ( [ simulations , h ] )
for n in range ( 0 , simulations ) : # Create arrays to iteratre over
Y_exp = Y . copy ( )
# Loop over h time periods
for t in range ( 0 , h ) :
if self . ar != 0 :
Y_exp_normalized = ( Y_exp [ - self . ar : ] [ : : - 1 ] - self . _norm_mean ) / self . _norm_std
new_value = self . predict_new ( np . append ( 1.0 , Y_exp_normalized ) , self . latent_variables . get_z_values ( ) )
else :
new_value = self . predict_new ( np . array ( [ 1.0 ] ) , self . latent_variables . get_z_values ( ) )
new_value += np . random . randn ( 1 ) * t_z [ - 1 ]
if self . model_name2 == "Exponential" :
rnd_value = self . family . draw_variable ( 1.0 / self . link ( new_value ) , model_scale , model_shape , model_skewness , 1 ) [ 0 ]
else :
rnd_value = self . family . draw_variable ( self . link ( new_value ) , model_scale , model_shape , model_skewness , 1 ) [ 0 ]
Y_exp = np . append ( Y_exp , [ rnd_value ] )
sim_vector [ n ] = Y_exp [ - h : ]
return np . transpose ( sim_vector ) |
def update ( self , title = None , description = None , images = None , cover = None , layout = None , privacy = None ) :
"""Update the album ' s information .
Arguments with the value None will retain their old values .
: param title : The title of the album .
: param description : A description of the album .
: param images : A list of the images we want the album to contain .
Can be Image objects , ids or a combination of the two . Images that
images that you cannot set ( non - existing or not owned by you ) will
not cause exceptions , but fail silently .
: param privacy : The albums privacy level , can be public , hidden or
secret .
: param cover : The id of the cover image .
: param layout : The way the album is displayed , can be blog , grid ,
horizontal or vertical .""" | url = ( self . _imgur . _base_url + "/3/album/" "{0}" . format ( self . _delete_or_id_hash ) )
is_updated = self . _imgur . _send_request ( url , params = locals ( ) , method = 'POST' )
if is_updated :
self . title = title or self . title
self . description = description or self . description
self . layout = layout or self . layout
self . privacy = privacy or self . privacy
if cover is not None :
self . cover = ( cover if isinstance ( cover , Image ) else Image ( { 'id' : cover } , self . _imgur , has_fetched = False ) )
if images :
self . images = [ img if isinstance ( img , Image ) else Image ( { 'id' : img } , self . _imgur , False ) for img in images ]
return is_updated |
def dct_spectrum ( spec ) :
"""Convert a spectrum into a cepstrum via type - III DCT ( following HTK ) .
Parameters
spec : array
Spectrum to perform a DCT on .
Returns
array
Cepstrum of the input spectrum .""" | ncep = spec . shape [ 0 ]
dctm = np . zeros ( ( ncep , ncep ) )
for i in range ( ncep ) :
dctm [ i , : ] = np . cos ( i * np . arange ( 1 , 2 * ncep , 2 ) / ( 2 * ncep ) * np . pi ) * np . sqrt ( 2 / ncep )
dctm *= 0.230258509299405
cep = np . dot ( dctm , ( 10 * np . log10 ( spec + np . spacing ( 1 ) ) ) )
return cep |
def list_settings ( self ) :
"""Get list of all appropriate settings and their default values .""" | result = super ( ) . list_settings ( )
result . append ( ( self . SETTING_FLAG_BORDER , True ) )
result . append ( ( self . SETTING_BORDER_FORMATING , { } ) )
result . append ( ( self . SETTING_BORDER_STYLE , 'utf8.a' ) )
return result |
def writer ( datasets , dataset_names , output_dir ) :
"""Write the results .""" | for dataset , dataset_name in zip ( datasets , dataset_names ) :
if dataset :
filepath = output_dir + '/' + dataset_name + '.txt'
with open ( filepath , 'w+' ) as out_file :
joined = '\n' . join ( dataset )
out_file . write ( str ( joined . encode ( 'utf-8' ) . decode ( 'utf-8' ) ) )
out_file . write ( '\n' ) |
def get_ip ( ) :
"""Return machine ' s origin IP address .""" | try :
r = requests . get ( HTTPBIN_URL )
ip , _ = r . json ( ) [ 'origin' ] . split ( ',' )
return ip if r . status_code == 200 else None
except requests . exceptions . ConnectionError :
return None |
def close_umanager ( self , force = False ) :
"""Used to close an uManager session .
: param force : try to close a session regardless of a connection object internal state""" | if not ( force or self . umanager_opened ) :
return
# make sure we ' ve got a fresh prompt
self . ser . write ( self . cr )
if self . read_loop ( lambda x : x . endswith ( self . umanager_prompt ) , self . timeout ) :
self . ser . write ( '' . join ( ( self . cmd_umanager_termination , self . cr ) ) )
if self . read_loop ( lambda x : x . endswith ( self . buf_on_exit ) , self . timeout ) :
log . debug ( "uManager closed" )
else :
raise Dam1021Error ( 2 , "Failed to close uManager" )
else :
log . debug ( "uManager already closed" )
self . umanager_opened = False |
def local_path ( path ) :
"""Return the absolute path relative to the root of this project""" | current = os . path . dirname ( __file__ )
root = current
return os . path . abspath ( os . path . join ( root , path ) ) |
def timed_grep_nodes_for_patterns ( self , versions_to_patterns , timeout_seconds , filename = "system.log" ) :
"""Searches all nodes in the cluster for a specific regular expression based on the node ' s version .
Params :
@ versions _ to _ patterns : an instance of LogPatternToVersionMap , specifying the different log patterns based on a node ' s version .
@ version : the earliest version the new pattern was introduced .
@ timeout _ seconds : the amount of time to spend searching the logs for .
@ filename : the name of the file to search for the patterns . Defaults to " system . log " .
Returns the first node where the pattern was found , along with the matching lines .
Raises a TimeoutError if the pattern is not found within the specified timeout period .""" | end_time = time . time ( ) + timeout_seconds
while True :
if time . time ( ) > end_time :
raise TimeoutError ( time . strftime ( "%d %b %Y %H:%M:%S" , time . gmtime ( ) ) + " Unable to find: " + versions_to_patterns . patterns + " in any node log within " + str ( timeout_seconds ) + "s" )
for node in self . nodelist ( ) :
pattern = versions_to_patterns ( node . get_cassandra_version ( ) )
matchings = node . grep_log ( pattern , filename )
if matchings :
ret = namedtuple ( 'Node_Log_Matching' , 'node matchings' )
return ret ( node = node , matchings = matchings )
time . sleep ( 1 ) |
def import_data ( self , dataset , dry_run = False , raise_errors = False , use_transactions = None , collect_failed_rows = False , ** kwargs ) :
"""Imports data from ` ` tablib . Dataset ` ` . Refer to : doc : ` import _ workflow `
for a more complete description of the whole import process .
: param dataset : A ` ` tablib . Dataset ` `
: param raise _ errors : Whether errors should be printed to the end user
or raised regularly .
: param use _ transactions : If ` ` True ` ` the import process will be processed
inside a transaction .
: param collect _ failed _ rows : If ` ` True ` ` the import process will collect
failed rows .
: param dry _ run : If ` ` dry _ run ` ` is set , or an error occurs , if a transaction
is being used , it will be rolled back .""" | if use_transactions is None :
use_transactions = self . get_use_transactions ( )
connection = connections [ DEFAULT_DB_ALIAS ]
supports_transactions = getattr ( connection . features , "supports_transactions" , False )
if use_transactions and not supports_transactions :
raise ImproperlyConfigured
using_transactions = ( use_transactions or dry_run ) and supports_transactions
with atomic_if_using_transaction ( using_transactions ) :
return self . import_data_inner ( dataset , dry_run , raise_errors , using_transactions , collect_failed_rows , ** kwargs ) |
def get_tasks_with_tag ( self , tag , params = { } , ** options ) :
"""Returns the compact task records for all tasks with the given tag .
Tasks can have more than one tag at a time .
Parameters
tag : { Id } The tag to fetch tasks from .
[ params ] : { Object } Parameters for the request""" | path = "/tags/%s/tasks" % ( tag )
return self . client . get_collection ( path , params , ** options ) |
def post_parse ( self , uri , params = { } , data = { } ) :
'''Convenience method to call post ( ) on an arbitrary URI and parse the response
into a JSON object . Raises an error on non - 200 response status .''' | return self . _request_parse ( self . post , uri , params , data ) |
def type ( subtag , type ) :
"""Get a : class : ` language _ tags . Subtag . Subtag ` by subtag and type . Can be None if not exists .
: param str subtag : subtag .
: param str type : type of the subtag .
: return : : class : ` language _ tags . Subtag . Subtag ` if exists , otherwise None .""" | subtag = subtag . lower ( )
if subtag in index :
types = index [ subtag ]
if type in types :
return Subtag ( subtag , type )
return None |
def add_node ( self , node ) :
"""Add a node to this network , let the node know which network it ' s on .""" | if _debug :
Network . _debug ( "add_node %r" , node )
self . nodes . append ( node )
node . lan = self
# update the node name
if not node . name :
node . name = '%s:%s' % ( self . name , node . address ) |
def rename ( self , name ) :
"""Set the name of the machine .""" | self . ec2 . create_tags ( Resources = [ self . instance_id ] , Tags = [ { 'Key' : 'Name' , 'Value' : name } ] )
self . refresh_info ( ) |
def collect_impl ( self ) :
"""overrides DistJarChange and DistClassChange from the underlying
DistChange with DistJarReport and DistClassReport instances""" | for c in DistChange . collect_impl ( self ) :
if isinstance ( c , DistJarChange ) :
if c . is_change ( ) :
ln = DistJarReport . report_name
nr = self . reporter . subreporter ( c . entry , ln )
c = DistJarReport ( c . ldata , c . rdata , c . entry , nr )
elif isinstance ( c , DistClassChange ) :
if c . is_change ( ) :
ln = DistClassReport . report_name
nr = self . reporter . subreporter ( c . entry , ln )
c = DistClassReport ( c . ldata , c . rdata , c . entry , nr )
yield c |
def partition_node ( node ) :
"""Split a host : port string into ( host , int ( port ) ) pair .""" | host = node
port = 27017
idx = node . rfind ( ':' )
if idx != - 1 :
host , port = node [ : idx ] , int ( node [ idx + 1 : ] )
if host . startswith ( '[' ) :
host = host [ 1 : - 1 ]
return host , port |
def _is_unordered ( collection ) :
"""Determine whether a collection appears to be unordered .
This is a conservative implementation , allowing for the possibility that
someone ' s implemented Mapping or Set , for example , and provided an
_ _ iter _ _ implementation that defines a consistent ordering of the
collection ' s elements .
: param object collection : Object to check as an unordered collection .
: return bool : Whether the given object appears to be unordered
: raises TypeError : If the given " collection " is non - iterable , it ' s
illogical to investigate whether it ' s ordered .""" | if not isinstance ( collection , Iterable ) :
raise TypeError ( "Non-iterable alleged collection: {}" . format ( type ( collection ) ) )
return isinstance ( collection , set ) or ( isinstance ( collection , dict ) and not isinstance ( collection , OrderedDict ) ) |
def hashZ ( * args ) :
"""Hash @ args into a BigInt using a cryptographic hash function .""" | TAG = "TAG_RELIC_HASH_Z"
MESSAGE = "MESSAGE_HASH_Z"
# Combine the arguments into a canonical string representation .
text = TAG . join ( [ str ( val ) for val in args ] )
# Hash the string using HMAC
# b : byte string
b = hmac ( text , MESSAGE )
return BigInt ( longFromString ( b ) ) |
async def connected_check ( ctx : commands . Context ) :
"""Check whether we are connected to VC in this guild .""" | voice = ctx . guild . voice_client
if not voice or not voice . is_connected ( ) :
raise commands . CheckFailure ( "Not connected to VC in this guild" )
return True |
def get_ip_info ( ip ) :
"""Return geo information about IP address .
` code ` - ISO country code
` name ` - Full name of country
` region _ code ` - ISO region code
` region _ name ` - Full name of region
` city _ name ` - Full name of city""" | # from pprint import pprint
try :
ipInfo = _mmdb_reader . get ( ip ) or { }
except ( maxminddb . errors . InvalidDatabaseError , ValueError ) :
ipInfo = { }
code , name = '--' , 'Unknown'
city_name , region_code , region_name = ( 'Unknown' , ) * 3
if 'country' in ipInfo :
code = ipInfo [ 'country' ] [ 'iso_code' ]
name = ipInfo [ 'country' ] [ 'names' ] [ 'en' ]
elif 'continent' in ipInfo :
code = ipInfo [ 'continent' ] [ 'code' ]
name = ipInfo [ 'continent' ] [ 'names' ] [ 'en' ]
if 'city' in ipInfo :
city_name = ipInfo [ 'city' ] [ 'names' ] [ 'en' ]
if 'subdivisions' in ipInfo :
region_code = ipInfo [ 'subdivisions' ] [ 0 ] [ 'iso_code' ]
region_name = ipInfo [ 'subdivisions' ] [ 0 ] [ 'names' ] [ 'en' ]
return GeoData ( code , name , region_code , region_name , city_name ) |
def get_league_scores ( self , league , time , show_upcoming , use_12_hour_format ) :
"""Queries the API and fetches the scores for fixtures
based upon the league and time parameter""" | time_frame = 'n' if show_upcoming else 'p'
if league :
try :
league_id = self . league_ids [ league ]
req = self . _get ( 'competitions/{id}/matches?timeFrame={time_frame}{time}' . format ( id = league_id , time_frame = time_frame , time = str ( time ) ) )
fixtures_results = req . json ( )
# no fixtures in the past week . display a help message and return
if len ( fixtures_results [ "matches" ] ) == 0 :
click . secho ( "No {league} matches in the past week." . format ( league = league ) , fg = "red" , bold = True )
return
self . writer . league_scores ( fixtures_results , time , show_upcoming , use_12_hour_format )
except APIErrorException :
click . secho ( "No data for the given league." , fg = "red" , bold = True )
else : # When no league specified . Print all available in time frame .
try :
req = self . _get ( 'matches?timeFrame={time_frame}{time}' . format ( time_frame = time_frame , time = str ( time ) ) )
fixtures_results = req . json ( )
self . writer . league_scores ( fixtures_results , time , show_upcoming , use_12_hour_format )
except APIErrorException :
click . secho ( "No data available." , fg = "red" , bold = True ) |
def init ( self ) :
"""Initialize the URL used to connect to SABnzbd .""" | self . url = self . url . format ( host = self . host , port = self . port , api_key = self . api_key ) |
def unset_key ( dotenv_path , key_to_unset , quote_mode = "always" ) :
"""Removes a given key from the given . env
If the . env path given doesn ' t exist , fails
If the given key doesn ' t exist in the . env , fails""" | if not os . path . exists ( dotenv_path ) :
warnings . warn ( "can't delete from %s - it doesn't exist." % dotenv_path )
return None , key_to_unset
removed = False
with rewrite ( dotenv_path ) as ( source , dest ) :
for mapping in parse_stream ( source ) :
if mapping . key == key_to_unset :
removed = True
else :
dest . write ( mapping . original )
if not removed :
warnings . warn ( "key %s not removed from %s - key doesn't exist." % ( key_to_unset , dotenv_path ) )
return None , key_to_unset
return removed , key_to_unset |
def put ( self , result , decorated_function , * args , ** kwargs ) :
""": meth : ` WCacheStorage . put ` method implementation""" | self . __check ( decorated_function , * args , ** kwargs )
ref = weakref . ref ( args [ 0 ] )
if decorated_function not in self . _storage :
cache_entry = self . _cache_record_cls . create ( result , decorated_function , * args , ** kwargs )
self . _storage [ decorated_function ] = [ { 'instance' : ref , 'result' : cache_entry } ]
else :
instance_found = False
for i in self . _storage [ decorated_function ] :
if i [ 'instance' ] ( ) == args [ 0 ] :
cache_entry = i [ 'result' ]
cache_entry . update ( result , * args , ** kwargs )
instance_found = True
break
if instance_found is False :
cache_entry = self . _cache_record_cls . create ( result , decorated_function , * args , ** kwargs )
self . _storage [ decorated_function ] . append ( { 'instance' : ref , 'result' : cache_entry } )
def finalize_ref ( ) :
if decorated_function in self . _storage :
fn_list = self . _storage [ decorated_function ]
if len ( fn_list ) == 1 and fn_list [ 0 ] [ 'instance' ] == ref :
del self . _storage [ decorated_function ]
for i in range ( len ( fn_list ) ) :
if fn_list [ i ] [ 'instance' ] == ref :
fn_list . pop ( i )
return
weakref . finalize ( args [ 0 ] , finalize_ref ) |
def measurementReport ( ) :
"""MEASUREMENT REPORT Section 9.1.21""" | a = TpPd ( pd = 0x6 )
b = MessageType ( mesType = 0x15 )
# 00010101
c = MeasurementResults ( )
packet = a / b / c
return packet |
def reader ( self , sock , handler , arg ) :
"""Register socket reader with the reactor . When the reader has messages ,
the reactor will call the handler , passing the arg . Returns 0 if OK , - 1
if there was an error . If you register the same socket more than once ,
each instance will invoke its corresponding handler .""" | return lib . zloop_reader ( self . _as_parameter_ , sock , handler , arg ) |
def scale_flow ( flow , to_unit = True ) :
"Scale the coords in ` flow ` to - 1/1 or the image size depending on ` to _ unit ` ." | s = tensor ( [ flow . size [ 0 ] / 2 , flow . size [ 1 ] / 2 ] ) [ None ]
if to_unit :
flow . flow = flow . flow / s - 1
else :
flow . flow = ( flow . flow + 1 ) * s
return flow |
def length ( text , maxval = None , encoding = None ) :
'''Count the length of a str the way Twitter does ,
double - counting " wide " characters ( e . g . ideographs , emoji )
Args :
text ( str ) : Text to count . Must be a unicode string in Python 2
maxval ( int ) : The maximum encoding that will be counted as 1 character .
Defaults to 4351 ( ჿ GEORGIAN LETTER LABIAL SIGN , U + 10FF )
Returns :
int''' | maxval = maxval or 4351
try :
assert not isinstance ( text , six . binary_type )
except AssertionError :
raise TypeError ( 'helpers.length requires a unicode argument' )
return sum ( 2 if ord ( x ) > maxval else 1 for x in unicodedata . normalize ( 'NFC' , text ) ) |
def clean_pe_name ( self , nlog , root ) :
"""additional name cleaning for paired end data""" | use_output_name = getattr ( config , 'flash' , { } ) . get ( 'use_output_name' , False )
if use_output_name :
name = re . search ( r'Output files\:\n\[FLASH\]\s+(.+?)\n' , nlog )
else :
name = re . search ( r'Input files\:\n\[FLASH\]\s+(.+?)\n' , nlog )
if not name :
return None
name = name . group ( 1 )
name = self . clean_s_name ( name , root )
return name |
def on_for_rotations ( self , steering , speed , rotations , brake = True , block = True ) :
"""Rotate the motors according to the provided ` ` steering ` ` .
The distance each motor will travel follows the rules of : meth : ` MoveTank . on _ for _ rotations ` .""" | ( left_speed , right_speed ) = self . get_speed_steering ( steering , speed )
MoveTank . on_for_rotations ( self , SpeedNativeUnits ( left_speed ) , SpeedNativeUnits ( right_speed ) , rotations , brake , block ) |
def Getvar ( self , var , info_cb = DEFAULT_MESSAGE_CALLBACK ) :
"""Returns the given variable ' s definition .
Args :
var : A variable the bootloader tracks . Use ' all ' to get them all .
info _ cb : See Download . Usually no messages .
Returns :
Value of var according to the current bootloader .""" | return self . _SimpleCommand ( b'getvar' , arg = var , info_cb = info_cb ) |
def get_view_for_id ( self , view_class , element_id , parent_item = None ) :
"""Searches and returns the View for the given id and type
: param view _ class : The view type to search for
: param element _ id : The id of element of the searched view
: param gaphas . item . Item parent _ item : Restrict the search to this parent item
: return : The view for the given id or None if not found""" | from rafcon . gui . mygaphas . items . state import StateView
from rafcon . gui . mygaphas . items . connection import DataFlowView , TransitionView
if parent_item is None :
items = self . get_all_items ( )
else :
items = self . get_children ( parent_item )
for item in items :
if view_class is StateView and isinstance ( item , StateView ) and item . model . state . state_id == element_id :
return item
if view_class is TransitionView and isinstance ( item , TransitionView ) and item . model . transition . transition_id == element_id :
return item
if view_class is DataFlowView and isinstance ( item , DataFlowView ) and item . model . data_flow . data_flow_id == element_id :
return item
return None |
def remove_escapes ( self ) :
"""Removes everything except number and letters from string
: return : All numbers and letters in string""" | chars = [ ]
i = 0
while i < len ( self . string ) :
char = self . string [ i ]
if char == "\\" :
i += 1
else :
chars . append ( char )
i += 1
return "" . join ( chars ) |
def get_revision_of_build_configuration ( revision_id , id = None , name = None ) :
"""Get a specific audited revision of a BuildConfiguration""" | data = get_revision_of_build_configuration_raw ( revision_id , id , name )
if data :
return utils . format_json_list ( data ) |
def _is_final ( meta , arg ) :
"""Checks whether given class or method has been marked
with the ` ` @ final ` ` decorator .""" | if inspect . isclass ( arg ) and not isinstance ( arg , ObjectMetaclass ) :
return False
# of classes , only subclasses of Object can be final
# account for method wrappers , such as the one introduced by @ override
from taipan . objective . modifiers import _WrappedMethod
if isinstance ( arg , _WrappedMethod ) :
arg = arg . method
return getattr ( arg , '__final__' , False ) |
def get_current ( self ) :
"""Get current forecast .""" | now = dt . now ( ) . timestamp ( )
url = build_url ( self . api_key , self . spot_id , self . fields , self . unit , now , now )
return get_msw ( url ) |
def header_footer_exists ( filepath ) :
"""Check if directory - components is listed in requirements files .""" | with open ( filepath ) as f :
return re . search ( Utils . exp , f . read ( ) ) |
def parse_uri ( uri ) :
'''parse URI''' | if not uri or uri . find ( '://' ) <= 0 :
raise RuntimeError ( 'Incorrect URI definition: {}' . format ( uri ) )
backend , rest_uri = uri . split ( '://' )
if backend not in SUPPORTED_BACKENDS :
raise RuntimeError ( 'Unknown backend: {}' . format ( backend ) )
database , table = rest_uri . rsplit ( ':' , 1 )
return database , table |
def getHTML ( self ) :
'''getHTML - Get the full HTML as contained within this tree .
If parsed from a document , this will contain the original whitespacing .
@ returns - < str > of html
@ see getFormattedHTML
@ see getMiniHTML''' | root = self . getRoot ( )
if root is None :
raise ValueError ( 'Did not parse anything. Use parseFile or parseStr' )
if self . doctype :
doctypeStr = '<!%s>\n' % ( self . doctype )
else :
doctypeStr = ''
# 6.6.0 : If we have a real root tag , print the outerHTML . If we have a fake root tag ( for multiple root condition ) ,
# then print the innerHTML ( skipping the outer root tag ) . Otherwise , we will miss
# untagged text ( between the multiple root nodes ) .
rootNode = self . getRoot ( )
if rootNode . tagName == INVISIBLE_ROOT_TAG :
return doctypeStr + rootNode . innerHTML
else :
return doctypeStr + rootNode . outerHTML |
def _change_volume ( self , increase ) :
"""Change volume using amixer""" | sign = "+" if increase else "-"
delta = "%d%%%s" % ( self . volume_tick , sign )
self . _run ( [ "amixer" , "-q" , "sset" , "Master" , delta ] ) |
def Read ( self , length ) :
"""Read a block of data from the file .""" | result = b""
# The total available size in the file
length = int ( length )
length = min ( length , self . size - self . offset )
while length > 0 :
data = self . _ReadPartial ( length )
if not data :
break
length -= len ( data )
result += data
return result |
def D_dt ( self , H_0 , Om0 , Ode0 = None ) :
"""time delay distance
: param H _ 0 : Hubble parameter [ km / s / Mpc ]
: param Om0 : normalized matter density at present time
: return : float [ Mpc ]""" | lensCosmo = self . _get_cosom ( H_0 , Om0 , Ode0 )
return lensCosmo . D_dt |
def metadata ( request ) :
"""Returns an XML with the SAML 2.0 metadata for this Idp .
The metadata is constructed on - the - fly based on the config dict in the django settings .""" | conf = IdPConfig ( )
conf . load ( copy . deepcopy ( settings . SAML_IDP_CONFIG ) )
metadata = entity_descriptor ( conf )
return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" ) |
def rgba_to_int ( cls , red , green , blue , alpha ) :
"""Encodes the color as an Integer in RGBA encoding
Returns None if any of red , green or blue are None .
If alpha is None we use 255 by default .
: return : Integer
: rtype : int""" | red = unwrap ( red )
green = unwrap ( green )
blue = unwrap ( blue )
alpha = unwrap ( alpha )
if red is None or green is None or blue is None :
return None
if alpha is None :
alpha = 255
r = red << 24
g = green << 16
b = blue << 8
a = alpha << 0
rgba_int = r + g + b + a
if ( rgba_int > ( 2 ** 31 - 1 ) ) : # convert to signed 32 - bit int
rgba_int = rgba_int - 2 ** 32
return rgba_int |
def homogeneity ( transition_matrices , regime_names = [ ] , class_names = [ ] , title = "Markov Homogeneity Test" ) :
"""Test for homogeneity of Markov transition probabilities across regimes .
Parameters
transition _ matrices : list
of transition matrices for regimes , all matrices must
have same size ( r , c ) . r is the number of rows in the
transition matrix and c is the number of columns in
the transition matrix .
regime _ names : sequence
Labels for the regimes .
class _ names : sequence
Labels for the classes / states of the Markov chain .
title : string
name of test .
Returns
: implicit
an instance of Homogeneity _ Results .""" | return Homogeneity_Results ( transition_matrices , regime_names = regime_names , class_names = class_names , title = title ) |
def start ( self ) :
"""Starts the download .
: raises SbgError : If download is not in PREPARING state .""" | if self . _status == TransferState . PREPARING :
self . _running . set ( )
super ( Download , self ) . start ( )
self . _status = TransferState . RUNNING
self . _time_started = time . time ( )
else :
raise SbgError ( 'Unable to start. Download not in PREPARING state.' ) |
def get_objects ( self , instance ) :
"""Return a list of objects matching the faceted result .""" | view = self . context [ "view" ]
queryset = self . context [ "objects" ]
page = view . paginate_queryset ( queryset )
if page is not None :
serializer = view . get_facet_objects_serializer ( page , many = True )
return OrderedDict ( [ ( "count" , self . get_count ( queryset ) ) , ( "next" , view . paginator . get_next_link ( ) ) , ( "previous" , view . paginator . get_previous_link ( ) ) , ( "results" , serializer . data ) ] )
serializer = view . get_serializer ( queryset , many = True )
return serializer . data |
def get_output_mode ( output , mode ) :
"""From the output name and the mode returns a the function that will transform the intermediary
representation to the output .""" | if mode != 'auto' :
try :
return switch_output_mode_auto [ mode ]
except KeyError :
raise ValueError ( 'Mode "{}" is not supported.' )
extension = output . split ( '.' ) [ - 1 ]
try :
return switch_output_mode [ extension ]
except KeyError :
return intermediary_to_schema |
def get_num_names_owned ( state_engine , checked_ops , sender ) :
"""Find out how many preorders a given sender ( i . e . a script )
actually owns , as of this transaction .""" | count = 0
registers = find_by_opcode ( checked_ops , "NAME_REGISTRATION" )
for reg in registers :
if reg [ 'sender' ] == sender :
count += 1
count += len ( state_engine . get_names_owned_by_sender ( sender ) )
log . debug ( "Sender '%s' owns %s names" % ( sender , count ) )
return count |
def MAC ( self , days , rev = 0 ) :
"""Comparing yesterday price is high , low or equal .
return ↑ , ↓ or -
與前一天 days 日收盤價移動平均比較
rev = 0
回傳 ↑ , ↓ or -
rev = 1
回傳 1 , - 1 or 0""" | yesterday = self . raw_data [ : ]
yesterday . pop ( )
yes_MA = float ( sum ( yesterday [ - days : ] ) / days )
today_MA = self . MA ( days )
return self . high_or_low ( today_MA , yes_MA , rev ) |
def post ( self , request , * args , ** kwargs ) :
"""Handle a XML - RPC or JSON - RPC request .
: param request : Incoming request
: param args : Additional arguments
: param kwargs : Additional named arguments
: return : A HttpResponse containing XML - RPC or JSON - RPC response , depending on the incoming request""" | logger . debug ( 'RPC request received...' )
for handler_cls in self . get_handler_classes ( ) :
handler = handler_cls ( request , self . entry_point )
try :
if not handler . can_handle ( ) :
continue
logger . debug ( 'Request will be handled by {}' . format ( handler_cls . __name__ ) )
result = handler . process_request ( )
return handler . result_success ( result )
except AuthenticationFailed as e : # Customize HttpResponse instance used when AuthenticationFailed was raised
logger . warning ( e )
return handler . result_error ( e , HttpResponseForbidden )
except RPCException as e :
logger . warning ( 'RPC exception: {}' . format ( e ) , exc_info = settings . MODERNRPC_LOG_EXCEPTIONS )
return handler . result_error ( e )
except Exception as e :
logger . error ( 'Exception raised from a RPC method: "{}"' . format ( e ) , exc_info = settings . MODERNRPC_LOG_EXCEPTIONS )
return handler . result_error ( RPCInternalError ( str ( e ) ) )
logger . error ( 'Unable to handle incoming request.' )
return HttpResponse ( 'Unable to handle your request. Please ensure you called the right entry point. If not, ' 'this could be a server error.' ) |
def logout ( self ) :
"""Destroy the auth session against the box
: return : True if your logout is successful
: rtype : bool""" | self . bbox_auth . set_access ( BboxConstant . AUTHENTICATION_LEVEL_PUBLIC , BboxConstant . AUTHENTICATION_LEVEL_PUBLIC )
self . bbox_url . set_api_name ( "logout" , None )
api = BboxApiCall ( self . bbox_url , BboxConstant . HTTP_METHOD_POST , None , self . bbox_auth )
response = api . execute_api_request ( )
if response . status_code == 200 :
self . bbox_auth . set_cookie_id ( None )
return not self . bbox_auth . is_authentified ( ) |
def first ( self , ** kwargs ) :
"""Queries database for first result of view
: return :""" | result = yield self . get ( ** kwargs )
if not result [ 'rows' ] :
raise exceptions . NotFound ( )
raise Return ( result [ 'rows' ] [ 0 ] ) |
def set_continue ( self , name , action , seqno , value = None , default = False , disable = False ) :
"""Configures the routemap continue value
Args :
name ( string ) : The full name of the routemap .
action ( string ) : The action to take for this routemap clause .
seqno ( integer ) : The sequence number for the routemap clause .
value ( integer ) : The value to configure for the routemap continue
default ( bool ) : Specifies to default the routemap continue value
disable ( bool ) : Specifies to negate the routemap continue value
Returns :
True if the operation succeeds otherwise False is returned""" | commands = [ 'route-map %s %s %s' % ( name , action , seqno ) ]
if default :
commands . append ( 'default continue' )
elif disable :
commands . append ( 'no continue' )
else :
if not str ( value ) . isdigit ( ) or value < 1 :
raise ValueError ( 'seqno must be a positive integer unless ' 'default or disable is specified' )
commands . append ( 'continue %s' % value )
return self . configure ( commands ) |
def getDistinctPairs ( self ) :
"""Return a set consisting of unique feature / location pairs across all
objects""" | distinctPairs = set ( )
for pairs in self . objects . itervalues ( ) :
distinctPairs = distinctPairs . union ( set ( pairs ) )
return distinctPairs |
def canonical ( self ) :
"""Compute canonical preference representation
Uses auxiliary problem of 9.4.2 , with the preference shock process reintroduced
Calculates pihat , llambdahat and ubhat for the equivalent canonical household technology""" | Ac1 = np . hstack ( ( self . deltah , np . zeros ( ( self . nh , self . nz ) ) ) )
Ac2 = np . hstack ( ( np . zeros ( ( self . nz , self . nh ) ) , self . a22 ) )
Ac = np . vstack ( ( Ac1 , Ac2 ) )
Bc = np . vstack ( ( self . thetah , np . zeros ( ( self . nz , self . nc ) ) ) )
Cc = np . vstack ( ( np . zeros ( ( self . nh , self . nw ) ) , self . c2 ) )
Rc1 = np . hstack ( ( self . llambda . T . dot ( self . llambda ) , - self . llambda . T . dot ( self . ub ) ) )
Rc2 = np . hstack ( ( - self . ub . T . dot ( self . llambda ) , self . ub . T . dot ( self . ub ) ) )
Rc = np . vstack ( ( Rc1 , Rc2 ) )
Qc = self . pih . T . dot ( self . pih )
Nc = np . hstack ( ( self . pih . T . dot ( self . llambda ) , - self . pih . T . dot ( self . ub ) ) )
lq_aux = LQ ( Qc , Rc , Ac , Bc , N = Nc , beta = self . beta )
P1 , F1 , d1 = lq_aux . stationary_values ( )
self . F_b = F1 [ : , 0 : self . nh ]
self . F_f = F1 [ : , self . nh : ]
self . pihat = np . linalg . cholesky ( self . pih . T . dot ( self . pih ) + self . beta . dot ( self . thetah . T ) . dot ( P1 [ 0 : self . nh , 0 : self . nh ] ) . dot ( self . thetah ) ) . T
self . llambdahat = self . pihat . dot ( self . F_b )
self . ubhat = - self . pihat . dot ( self . F_f )
return |
def attach ( self , gui , name = None ) :
"""Attach the view to the GUI .""" | # Disable keyboard pan so that we can use arrows as global shortcuts
# in the GUI .
self . panzoom . enable_keyboard_pan = False
gui . add_view ( self )
self . gui = gui
# Set the view state .
self . set_state ( gui . state . get_view_state ( self ) )
# Call on _ select ( ) asynchronously after a delay , and set a busy
# cursor .
self . async_caller = AsyncCaller ( delay = self . _callback_delay )
@ gui . connect_
def on_select ( cluster_ids , ** kwargs ) : # Call this function after a delay unless there is another
# cluster selection in the meantime .
@ self . async_caller . set
def update_view ( ) :
with busy_cursor ( ) :
self . on_select ( cluster_ids , ** kwargs )
self . actions = Actions ( gui , name = name or self . __class__ . __name__ , menu = self . __class__ . __name__ , default_shortcuts = self . shortcuts )
# Update the GUI status message when the ` self . set _ status ( ) ` method
# is called , i . e . when the ` status ` event is raised by the VisPy
# view .
@ self . connect
def on_status ( e ) :
gui . status_message = e . message
# Save the view state in the GUI state .
@ gui . connect_
def on_close ( ) :
gui . state . update_view_state ( self , self . state )
# NOTE : create _ gui ( ) already saves the state , but the event
# is registered * before * we add all views .
gui . state . save ( )
self . show ( ) |
def _write_ieeg_json ( output_file ) :
"""Use only required fields""" | dataset_info = { "TaskName" : "unknown" , "Manufacturer" : "n/a" , "PowerLineFrequency" : 50 , "iEEGReference" : "n/a" , }
with output_file . open ( 'w' ) as f :
dump ( dataset_info , f , indent = ' ' ) |
def _remove ( self , obj ) :
"""Python 2.4 compatibility .""" | for idx , item in enumerate ( self . _queue ) :
if item == obj :
del self . _queue [ idx ]
break |
def _set_overlay_policy_map ( self , v , load = False ) :
"""Setter method for overlay _ policy _ map , mapped from YANG variable / overlay _ policy _ map ( list )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ overlay _ policy _ map is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ overlay _ policy _ map ( ) directly .
YANG Description : Define a policy - map [ Actions on the classified packet ] .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = YANGListType ( "pmap_name" , overlay_policy_map . overlay_policy_map , yang_name = "overlay-policy-map" , rest_name = "overlay-policy-map" , parent = self , is_container = 'list' , user_ordered = False , path_helper = self . _path_helper , yang_keys = 'pmap-name' , extensions = { u'tailf-common' : { u'info' : u'Overlay Policy Map Configuration' , u'cli-no-key-completion' : None , u'cli-full-no' : None , u'sort-priority' : u'75' , u'cli-suppress-list-no' : None , u'cli-full-command' : None , u'callpoint' : u'OverlayPolicyMapCallPoint' , u'cli-mode-name' : u'config-overlay-policymap-$(pmap-name)' } } ) , is_container = 'list' , yang_name = "overlay-policy-map" , rest_name = "overlay-policy-map" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Overlay Policy Map Configuration' , u'cli-no-key-completion' : None , u'cli-full-no' : None , u'sort-priority' : u'75' , u'cli-suppress-list-no' : None , u'cli-full-command' : None , u'callpoint' : u'OverlayPolicyMapCallPoint' , u'cli-mode-name' : u'config-overlay-policymap-$(pmap-name)' } } , namespace = 'urn:brocade.com:mgmt:brocade-overlay-policy' , defining_module = 'brocade-overlay-policy' , yang_type = 'list' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """overlay_policy_map must be of a type compatible with list""" , 'defined-type' : "list" , 'generated-type' : """YANGDynClass(base=YANGListType("pmap_name",overlay_policy_map.overlay_policy_map, yang_name="overlay-policy-map", rest_name="overlay-policy-map", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='pmap-name', extensions={u'tailf-common': {u'info': u'Overlay Policy Map Configuration', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'75', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OverlayPolicyMapCallPoint', u'cli-mode-name': u'config-overlay-policymap-$(pmap-name)'}}), is_container='list', yang_name="overlay-policy-map", rest_name="overlay-policy-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Overlay Policy Map Configuration', u'cli-no-key-completion': None, u'cli-full-no': None, u'sort-priority': u'75', u'cli-suppress-list-no': None, u'cli-full-command': None, u'callpoint': u'OverlayPolicyMapCallPoint', u'cli-mode-name': u'config-overlay-policymap-$(pmap-name)'}}, namespace='urn:brocade.com:mgmt:brocade-overlay-policy', defining_module='brocade-overlay-policy', yang_type='list', is_config=True)""" , } )
self . __overlay_policy_map = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def create_instance ( self , name , moduleName , settings ) :
"""Creates an instance of < moduleName > at < name > with
< settings > .""" | if name in self . insts :
raise ValueError ( "There's already an instance named %s" % name )
if moduleName not in self . modules :
raise ValueError ( "There's no module %s" % moduleName )
md = self . modules [ moduleName ]
deps = dict ( )
for k , v in six . iteritems ( md . deps ) :
if k not in settings :
settings [ k ] = self . _get_or_create_a ( v . type )
if settings [ k ] is None :
if not v . allow_null :
raise ValueError ( "`null' not allowed for %s" % k )
elif settings [ k ] not in self . insts :
raise ValueError ( "No such instance %s" % settings [ k ] )
else :
settings [ k ] = self . insts [ settings [ k ] ] . object
deps [ k ] = settings [ k ]
for k , v in six . iteritems ( md . vsettings ) :
if k not in settings :
settings [ k ] = v . default
if v . default is None :
self . l . warn ( '%s:%s not set' % ( name , k ) )
self . l . info ( 'create_instance %-15s %s' % ( name , md . implementedBy ) )
cl = get_by_path ( md . implementedBy )
il = logging . getLogger ( name )
obj = cl ( settings , il )
self . register_instance ( name , moduleName , obj , settings , deps )
return obj |
def _queue_task ( self , bucket , file_pair_list , transfer_config , subscribers , direction ) :
'''queue the upload / download - when get processed when resources available
Use class level transfer _ config if not defined .''' | config = transfer_config if transfer_config else self . _transfer_config
_call_args = CallArgs ( bucket = bucket , file_pair_list = file_pair_list , transfer_config = config , subscribers = subscribers , direction = direction , transfer_spec = None , transfer_spec_func = self . _create_transfer_spec , transfer_id = str ( uuid . uuid4 ( ) ) )
self . _validate_args ( _call_args )
return self . _coordinator_controller . _queue_task ( _call_args ) |
def gmean ( x , weights = None ) :
"""Return the weighted geometric mean of x""" | w_arr , x_arr = _preprocess_inputs ( x , weights )
return np . exp ( ( w_arr * np . log ( x_arr ) ) . sum ( axis = 0 ) / w_arr . sum ( axis = 0 ) ) |
def playback ( cls , filename ) :
""". . testcode : :
import io
import json
import requests
import httpretty
with httpretty . record ( ' / tmp / ip . json ' ) :
data = requests . get ( ' https : / / httpbin . org / ip ' ) . json ( )
with io . open ( ' / tmp / ip . json ' ) as fd :
assert data = = json . load ( fd )
: param filename : a string
: returns : a ` context - manager < https : / / docs . python . org / 3 / reference / datamodel . html # context - managers > ` _""" | cls . enable ( )
data = json . loads ( open ( filename ) . read ( ) )
for item in data :
uri = item [ 'request' ] [ 'uri' ]
method = item [ 'request' ] [ 'method' ]
body = item [ 'response' ] [ 'body' ]
headers = item [ 'response' ] [ 'headers' ]
cls . register_uri ( method , uri , body = body , forcing_headers = headers )
yield
cls . disable ( ) |
def acquisition_function ( self , x ) :
"""Takes an acquisition and weights it so the domain and cost are taken into account .""" | f_acqu = self . _compute_acq ( x )
cost_x , _ = self . cost_withGradients ( x )
return - ( f_acqu * self . space . indicator_constraints ( x ) ) / cost_x |
def state_delta ( self , selector = 'all' , power = None , duration = 1.0 , infrared = None , hue = None , saturation = None , brightness = None , kelvin = None ) :
"""Given a state delta , apply the modifications to lights ' state
over a given period of time .
selector : required String
The selector to limit which lights are controlled .
power : String
The power state you want to set on the selector . on or off
duration : Double
How long in seconds you want the power action to take .
Range : 0.0 – 3155760000.0 ( 100 years )
infrared : Double
The maximum brightness of the infrared channel .
hue : Double
Rotate the hue by this angle in degrees .
saturation : Double
Change the saturation by this additive amount ; the resulting
saturation is clipped to [ 0 , 1 ] .
brightness : Double
Change the brightness by this additive amount ; the resulting
brightness is clipped to [ 0 , 1 ] .
kelvin : Double
Change the kelvin by this additive amount ; the resulting kelvin is
clipped to [ 2500 , 9000 ] .""" | argument_tuples = [ ( "power" , power ) , ( "duration" , duration ) , ( "infrared" , infrared ) , ( "hue" , hue ) , ( "saturation" , saturation ) , ( "brightness" , brightness ) , ( "kelvin" , kelvin ) ]
return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/state/delta' , endpoint_args = [ selector ] , argument_tuples = argument_tuples ) |
def set_dims_from_tree_size ( self ) :
"Calculate reasonable height and width for tree given N tips" | tlen = len ( self . treelist [ 0 ] )
if self . style . orient in ( "right" , "left" ) : # long tip - wise dimension
if not self . style . height :
self . style . height = max ( 275 , min ( 1000 , 18 * ( tlen ) ) )
if not self . style . width :
self . style . width = max ( 300 , min ( 500 , 18 * ( tlen ) ) )
else : # long tip - wise dimension
if not self . style . width :
self . style . width = max ( 275 , min ( 1000 , 18 * ( tlen ) ) )
if not self . style . height :
self . style . height = max ( 225 , min ( 500 , 18 * ( tlen ) ) ) |
def normpath ( path ) :
"""Normalize given path in various different forms .
> > > normpath ( " / tmp / . . / etc / hosts " )
' / etc / hosts '
> > > normpath ( " ~ root / t " )
' / root / t '""" | funcs = [ os . path . normpath , os . path . abspath ]
if "~" in path :
funcs = [ os . path . expanduser ] + funcs
return chaincalls ( funcs , path ) |
def qhalf ( options , halfspaces , interior_point ) :
"""Similar to qvoronoi command in command - line qhull .
Args :
option :
An options string . Up to two options separated by spaces
are supported . See Qhull ' s qhalf help for info . Typically
used options are :
Fp
halfspaces :
List of Halfspaces as input .
interior _ point :
An interior point ( see qhalf documentation )
Returns :
Output as a list of strings .
E . g . , [ ' 3 ' , ' 4 ' , ' 1 1 0 ' , ' 1 - 1 2 ' ,
' - 1 1 2 ' , ' 1 1 2 ' ]""" | points = [ list ( h . normal ) + [ h . offset ] for h in halfspaces ]
data = [ [ len ( interior_point ) , 1 ] ]
data . append ( map ( repr , interior_point ) )
data . append ( [ len ( points [ 0 ] ) ] )
data . append ( [ len ( points ) ] )
data . extend ( [ map ( repr , row ) for row in points ] )
prep_str = [ " " . join ( map ( str , line ) ) for line in data ]
output = getattr ( hull , "qhalf" ) ( options , "\n" . join ( prep_str ) )
return list ( map ( str . strip , output . strip ( ) . split ( "\n" ) ) ) |
def get_hash_as_int ( * args , group : cmod . PairingGroup = None ) :
"""Enumerate over the input tuple and generate a hash using the tuple values
: param args : sequence of either group or integer elements
: param group : pairing group if an element is a group element
: return :""" | group = group if group else cmod . PairingGroup ( PAIRING_GROUP )
h_challenge = sha256 ( )
serialedArgs = [ group . serialize ( arg ) if isGroupElement ( arg ) else cmod . Conversion . IP2OS ( arg ) for arg in args ]
for arg in sorted ( serialedArgs ) :
h_challenge . update ( arg )
return bytes_to_int ( h_challenge . digest ( ) ) |
def check ( self , * exc_classes ) :
"""Check if any of ` ` exc _ classes ` ` caused the failure .
Arguments of this method can be exception types or type
names ( strings * * fully qualified * * ) . If captured exception is
an instance of exception of given type , the corresponding argument
is returned , otherwise ` ` None ` ` is returned .""" | for cls in exc_classes :
cls_name = utils . cls_to_cls_name ( cls )
if cls_name in self . _exc_type_names :
return cls
return None |
def load_pyfile ( self , path ) :
"""Load python file as config .
Args :
path ( string ) : path to the python file""" | with open ( path ) as config_file :
contents = config_file . read ( )
try :
exec ( compile ( contents , path , 'exec' ) , self )
except Exception as e :
raise MalformedConfig ( path , six . text_type ( e ) ) |
def save_otfs ( self , ufos , ttf = False , is_instance = False , interpolatable = False , use_afdko = False , autohint = None , subset = None , use_production_names = None , subroutinize = None , # deprecated
optimize_cff = CFFOptimization . NONE , cff_round_tolerance = None , remove_overlaps = True , overlaps_backend = None , reverse_direction = True , conversion_error = None , feature_writers = None , interpolate_layout_from = None , interpolate_layout_dir = None , output_path = None , output_dir = None , inplace = True , ) :
"""Build OpenType binaries from UFOs .
Args :
ufos : Font objects to compile .
ttf : If True , build fonts with TrueType outlines and . ttf extension .
is _ instance : If output fonts are instances , for generating paths .
interpolatable : If output is interpolatable , for generating paths .
use _ afdko : If True , use AFDKO to compile feature source .
autohint : Parameters to provide to ttfautohint . If not provided , the
autohinting step is skipped .
subset : Whether to subset the output according to data in the UFOs .
If not provided , also determined by flags in the UFOs .
use _ production _ names : Whether to use production glyph names in the
output . If not provided , determined by flags in the UFOs .
subroutinize : If True , subroutinize CFF outlines in output .
cff _ round _ tolerance ( float ) : controls the rounding of point
coordinates in CFF table . It is defined as the maximum absolute
difference between the original float and the rounded integer
value . By default , all floats are rounded to integer ( tolerance
0.5 ) ; a value of 0 completely disables rounding ; values in
between only round floats which are close to their integral
part within the tolerated range . Ignored if ttf = True .
remove _ overlaps : If True , remove overlaps in glyph shapes .
overlaps _ backend : name of the library to remove overlaps . Can be
either " booleanOperations " ( default ) or " pathops " .
reverse _ direction : If True , reverse contour directions when
compiling TrueType outlines .
conversion _ error : Error to allow when converting cubic CFF contours
to quadratic TrueType contours .
feature _ writers : list of ufo2ft - compatible feature writer classes
or pre - initialized objects that are passed on to ufo2ft
feature compiler to generate automatic feature code . The
default value ( None ) means that ufo2ft will use its built - in
default feature writers ( for kern , mark , mkmk , etc . ) . An empty
list ( [ ] ) will skip any automatic feature generation .
interpolate _ layout _ from : A DesignSpaceDocument object to give varLib
for interpolating layout tables to use in output .
interpolate _ layout _ dir : Directory containing the compiled master
fonts to use for interpolating binary layout tables .
output _ path : output font file path . Only works when the input
' ufos ' list contains a single font .
output _ dir : directory where to save output files . Mutually
exclusive with ' output _ path ' argument .""" | assert not ( output_path and output_dir ) , "mutually exclusive args"
if output_path is not None and len ( ufos ) > 1 :
raise ValueError ( "output_path requires a single input" )
if subroutinize is not None :
import warnings
warnings . warn ( "the 'subroutinize' argument is deprecated, use 'optimize_cff'" , UserWarning , )
if subroutinize :
optimize_cff = CFFOptimization . SUBROUTINIZE
else : # for b / w compatibility , we still run the charstring specializer
# even when - - no - subroutinize is used . Use the new - - optimize - cff
# option to disable both specilization and subroutinization
optimize_cff = CFFOptimization . SPECIALIZE
ext = "ttf" if ttf else "otf"
if interpolate_layout_from is not None :
if interpolate_layout_dir is None :
interpolate_layout_dir = self . _output_dir ( ext , is_instance = False , interpolatable = interpolatable )
finder = partial ( _varLib_finder , directory = interpolate_layout_dir , ext = ext )
# no need to generate automatic features in ufo2ft , since here we
# are interpolating precompiled GPOS table with fontTools . varLib .
# An empty ' featureWriters ' list tells ufo2ft to not generate any
# automatic features .
# TODO : Add an argument to ufo2ft . compileOTF / compileTTF to
# completely skip compiling features into OTL tables
feature_writers = [ ]
compiler_options = dict ( useProductionNames = use_production_names , reverseDirection = reverse_direction , cubicConversionError = conversion_error , featureWriters = feature_writers , inplace = True , # avoid extra copy
)
if use_afdko :
compiler_options [ "featureCompilerClass" ] = FDKFeatureCompiler
if interpolatable :
if not ttf :
raise NotImplementedError ( "interpolatable CFF not supported yet" )
logger . info ( "Building interpolation-compatible TTFs" )
fonts = ufo2ft . compileInterpolatableTTFs ( ufos , ** compiler_options )
else :
fonts = self . _iter_compile ( ufos , ttf , removeOverlaps = remove_overlaps , overlapsBackend = overlaps_backend , optimizeCFF = optimize_cff , roundTolerance = cff_round_tolerance , ** compiler_options )
do_autohint = ttf and autohint is not None
for font , ufo in zip ( fonts , ufos ) :
if interpolate_layout_from is not None :
master_locations , instance_locations = self . _designspace_locations ( interpolate_layout_from )
loc = instance_locations [ _normpath ( ufo . path ) ]
gpos_src = interpolate_layout ( interpolate_layout_from , loc , finder , mapped = True )
font [ "GPOS" ] = gpos_src [ "GPOS" ]
gsub_src = TTFont ( finder ( self . _closest_location ( master_locations , loc ) ) )
if "GDEF" in gsub_src :
font [ "GDEF" ] = gsub_src [ "GDEF" ]
if "GSUB" in gsub_src :
font [ "GSUB" ] = gsub_src [ "GSUB" ]
if do_autohint : # if we are autohinting , we save the unhinted font to a
# temporary path , and the hinted one to the final destination
fd , otf_path = tempfile . mkstemp ( "." + ext )
os . close ( fd )
elif output_path is None :
otf_path = self . _output_path ( ufo , ext , is_instance , interpolatable , output_dir = output_dir )
else :
otf_path = output_path
logger . info ( "Saving %s" , otf_path )
font . save ( otf_path )
# ' subset ' is an Optional [ bool ] , can be None , True or False .
# When False , we never subset ; when True , we always do ; when
# None ( default ) , we check the presence of custom parameters
if subset is False :
pass
elif subset is True or ( ( KEEP_GLYPHS_OLD_KEY in ufo . lib or KEEP_GLYPHS_NEW_KEY in ufo . lib ) or any ( glyph . lib . get ( GLYPH_EXPORT_KEY , True ) is False for glyph in ufo ) ) :
self . subset_otf_from_ufo ( otf_path , ufo )
if not do_autohint :
continue
if output_path is not None :
hinted_otf_path = output_path
else :
hinted_otf_path = self . _output_path ( ufo , ext , is_instance , interpolatable , autohinted = True , output_dir = output_dir , )
try :
ttfautohint ( otf_path , hinted_otf_path , args = autohint )
except TTFAError : # copy unhinted font to destination before re - raising error
shutil . copyfile ( otf_path , hinted_otf_path )
raise
finally : # must clean up temp file
os . remove ( otf_path ) |
def _recv_close ( self , method_frame ) :
'''Receive a close command from the broker .''' | self . channel . _close_info = { 'reply_code' : method_frame . args . read_short ( ) , 'reply_text' : method_frame . args . read_shortstr ( ) , 'class_id' : method_frame . args . read_short ( ) , 'method_id' : method_frame . args . read_short ( ) }
self . channel . _closed = True
self . channel . _closed_cb ( final_frame = MethodFrame ( self . channel_id , 20 , 41 ) ) |
def nsmallest ( self , n = 5 , keep = 'first' ) :
"""Return the smallest ` n ` elements .
Parameters
n : int , default 5
Return this many ascending sorted values .
keep : { ' first ' , ' last ' , ' all ' } , default ' first '
When there are duplicate values that cannot all fit in a
Series of ` n ` elements :
- ` ` first ` ` : return the first ` n ` occurrences in order
of appearance .
- ` ` last ` ` : return the last ` n ` occurrences in reverse
order of appearance .
- ` ` all ` ` : keep all occurrences . This can result in a Series of
size larger than ` n ` .
Returns
Series
The ` n ` smallest values in the Series , sorted in increasing order .
See Also
Series . nlargest : Get the ` n ` largest elements .
Series . sort _ values : Sort Series by values .
Series . head : Return the first ` n ` rows .
Notes
Faster than ` ` . sort _ values ( ) . head ( n ) ` ` for small ` n ` relative to
the size of the ` ` Series ` ` object .
Examples
> > > countries _ population = { " Italy " : 5900000 , " France " : 6500000,
. . . " Brunei " : 434000 , " Malta " : 434000,
. . . " Maldives " : 434000 , " Iceland " : 337000,
. . . " Nauru " : 11300 , " Tuvalu " : 11300,
. . . " Anguilla " : 11300 , " Monserat " : 5200}
> > > s = pd . Series ( countries _ population )
Italy 5900000
France 6500000
Brunei 434000
Malta 434000
Maldives 434000
Iceland 337000
Nauru 11300
Tuvalu 11300
Anguilla 11300
Monserat 5200
dtype : int64
The ` n ` smallest elements where ` ` n = 5 ` ` by default .
> > > s . nsmallest ( )
Monserat 5200
Nauru 11300
Tuvalu 11300
Anguilla 11300
Iceland 337000
dtype : int64
The ` n ` smallest elements where ` ` n = 3 ` ` . Default ` keep ` value is
' first ' so Nauru and Tuvalu will be kept .
> > > s . nsmallest ( 3)
Monserat 5200
Nauru 11300
Tuvalu 11300
dtype : int64
The ` n ` smallest elements where ` ` n = 3 ` ` and keeping the last
duplicates . Anguilla and Tuvalu will be kept since they are the last
with value 11300 based on the index order .
> > > s . nsmallest ( 3 , keep = ' last ' )
Monserat 5200
Anguilla 11300
Tuvalu 11300
dtype : int64
The ` n ` smallest elements where ` ` n = 3 ` ` with all duplicates kept . Note
that the returned Series has four elements due to the three duplicates .
> > > s . nsmallest ( 3 , keep = ' all ' )
Monserat 5200
Nauru 11300
Tuvalu 11300
Anguilla 11300
dtype : int64""" | return algorithms . SelectNSeries ( self , n = n , keep = keep ) . nsmallest ( ) |
def sentinel_get_master_ip ( master , host = None , port = None , password = None ) :
'''Get ip for sentinel master
. . versionadded : 2016.3.0
CLI Example :
. . code - block : : bash
salt ' * ' redis . sentinel _ get _ master _ ip ' mymaster ' ''' | server = _sconnect ( host , port , password )
ret = server . sentinel_get_master_addr_by_name ( master )
return dict ( list ( zip ( ( 'master_host' , 'master_port' ) , ret ) ) ) |
def path_join ( * args ) :
"""Wrapper around ` os . path . join ` .
Makes sure to join paths of the same type ( bytes ) .""" | args = ( paramiko . py3compat . u ( arg ) for arg in args )
return os . path . join ( * args ) |
def _check_custom_url_parameters ( self ) :
"""Checks if custom url parameters are valid parameters .
Throws ValueError if the provided parameter is not a valid parameter .""" | for param in self . custom_url_params :
if param not in CustomUrlParam :
raise ValueError ( 'Parameter %s is not a valid custom url parameter. Please check and fix.' % param )
if self . service_type is ServiceType . FIS and CustomUrlParam . GEOMETRY in self . custom_url_params :
raise ValueError ( '{} should not be a custom url parameter of a FIS request' . format ( CustomUrlParam . GEOMETRY ) ) |
def EMAIL_VERIFICATION ( self ) :
"""See e - mail verification method""" | ret = self . _setting ( "EMAIL_VERIFICATION" , self . EmailVerificationMethod . OPTIONAL )
# Deal with legacy ( boolean based ) setting
if ret is True :
ret = self . EmailVerificationMethod . MANDATORY
elif ret is False :
ret = self . EmailVerificationMethod . OPTIONAL
return ret |
def adjust ( self , ln ) :
"""Converts a parsing line number into an original line number .""" | adj_ln = ln
need_unskipped = 0
for i in self . skips :
if i <= ln :
need_unskipped += 1
elif adj_ln + need_unskipped < i :
break
else :
need_unskipped -= i - adj_ln - 1
adj_ln = i
return adj_ln + need_unskipped |
def BuildDefaultGlobals ( ) :
"""Create a dictionary containing all the default globals for
SConstruct and SConscript files .""" | global GlobalDict
if GlobalDict is None :
GlobalDict = { }
import SCons . Script
d = SCons . Script . __dict__
def not_a_module ( m , d = d , mtype = type ( SCons . Script ) ) :
return not isinstance ( d [ m ] , mtype )
for m in filter ( not_a_module , dir ( SCons . Script ) ) :
GlobalDict [ m ] = d [ m ]
return GlobalDict . copy ( ) |
def from_dict ( self , document ) :
"""Create image group object from JSON document retrieved from database .
Parameters
document : JSON
Json document in database
Returns
ImageGroupHandle
Handle for image group object""" | # Get object attributes from Json document
identifier = str ( document [ '_id' ] )
# Create list of group images from Json
images = list ( )
for grp_image in document [ 'images' ] :
images . append ( GroupImage ( grp_image [ 'identifier' ] , grp_image [ 'folder' ] , grp_image [ 'name' ] , os . path . join ( self . image_manager . get_directory ( grp_image [ 'identifier' ] ) , grp_image [ 'name' ] ) ) )
# Create list of properties and add group size
properties = document [ 'properties' ]
properties [ PROPERTY_GROUPSIZE ] = len ( document [ 'images' ] )
# Directories are simply named by object identifier
directory = os . path . join ( self . directory , identifier )
# Create image group handle .
return ImageGroupHandle ( identifier , properties , directory , images , attribute . attributes_from_dict ( document [ 'options' ] ) , timestamp = datetime . datetime . strptime ( document [ 'timestamp' ] , '%Y-%m-%dT%H:%M:%S.%f' ) , is_active = document [ 'active' ] ) |
def path_helper ( self , operations , view , ** kwargs ) :
"""Path helper that allows passing a bottle view function .""" | operations . update ( yaml_utils . load_operations_from_docstring ( view . __doc__ ) )
app = kwargs . get ( 'app' , _default_app )
route = self . _route_for_view ( app , view )
return self . bottle_path_to_openapi ( route . rule ) |
def load_dataset ( self , ds_str ) :
"""Returns an instantiated instance of either a netCDF file or an SOS
mapped DS object .
: param str ds _ str : URL of the resource to load""" | # If it ' s a remote URL load it as a remote resource , otherwise treat it
# as a local resource .
pr = urlparse ( ds_str )
if pr . netloc :
return self . load_remote_dataset ( ds_str )
return self . load_local_dataset ( ds_str ) |
def set_vm_ip ( name = None , ipv4_cidr = None , ipv4_gw = None , session = None , call = None ) :
'''Set the IP address on a virtual interface ( vif )''' | mode = 'static'
# TODO : Need to add support for IPv6
if call == 'function' :
raise SaltCloudException ( 'The function must be called with -a or --action.' )
log . debug ( 'Setting name: %s ipv4_cidr: %s ipv4_gw: %s mode: %s' , name , ipv4_cidr , ipv4_gw , mode )
if session is None :
log . debug ( 'New session being created' )
session = _get_session ( )
vm = _get_vm ( name , session )
# - - try to get ip from vif
# TODO : for now will take first interface
# addition consideration needed for
# multiple interface ( vif ) VMs
vifs = session . xenapi . VM . get_VIFs ( vm )
if vifs is not None :
log . debug ( 'There are %s vifs.' , len ( vifs ) )
for vif in vifs :
record = session . xenapi . VIF . get_record ( vif )
log . debug ( record )
try :
session . xenapi . VIF . configure_ipv4 ( vif , mode , ipv4_cidr , ipv4_gw )
except XenAPI . Failure :
log . info ( 'Static IP assignment could not be performed.' )
return True |
def read ( calc_id , mode = 'r' , datadir = None ) :
""": param calc _ id : calculation ID or filename
: param mode : ' r ' or ' w '
: param datadir : the directory where to look
: returns : the corresponding DataStore instance
Read the datastore , if it exists and it is accessible .""" | datadir = datadir or get_datadir ( )
dstore = DataStore ( calc_id , datadir , mode = mode )
try :
hc_id = dstore [ 'oqparam' ] . hazard_calculation_id
except KeyError : # no oqparam
hc_id = None
if hc_id :
dstore . parent = read ( hc_id , datadir = os . path . dirname ( dstore . filename ) )
return dstore |
def pdf ( x , mu , std ) :
"""Probability density function ( normal distribution )""" | return ( 1.0 / ( std * sqrt ( 2 * pi ) ) ) * np . exp ( - ( x - mu ) ** 2 / ( 2 * std ** 2 ) ) |
def check_failhard ( self , low , running ) :
'''Check if the low data chunk should send a failhard signal''' | tag = _gen_tag ( low )
if self . opts . get ( 'test' , False ) :
return False
if low . get ( 'failhard' , self . opts [ 'failhard' ] ) and tag in running :
if running [ tag ] [ 'result' ] is None :
return False
return not running [ tag ] [ 'result' ]
return False |
def begin_access ( self , cursor = None , offset = 0 , size = sys . maxsize , flags = 0 ) :
"""Call this before the first use of this instance . The method was already
called by the constructor in case sufficient information was provided .
For more information no the parameters , see the _ _ init _ _ method
: param path : if cursor is None the existing one will be used .
: return : True if the buffer can be used""" | if cursor :
self . _c = cursor
# END update our cursor
# reuse existing cursors if possible
if self . _c is not None and self . _c . is_associated ( ) :
res = self . _c . use_region ( offset , size , flags ) . is_valid ( )
if res : # if given size is too large or default , we computer a proper size
# If its smaller , we assume the combination between offset and size
# as chosen by the user is correct and use it !
# If not , the user is in trouble .
if size > self . _c . file_size ( ) :
size = self . _c . file_size ( ) - offset
# END handle size
self . _size = size
# END set size
return res
# END use our cursor
return False |
def convolution ( A ) :
m = A . shape [ 0 ]
n = A . shape [ 1 ]
B = numpy . zeros ( ( m , n ) )
c11 = 2.0
c21 = 5.0
c31 = - 8.0
c12 = - 3.0
c22 = 6.0
c32 = - 9.0
c13 = 4.0
c23 = 7.0
c33 = 10.0
"omp parallel for private ( j , i )" | for i in xrange ( 1 , m - 1 ) :
for j in xrange ( 1 , n - 1 ) :
B [ i , j ] = c11 * A [ i - 1 , j - 1 ] + c12 * A [ i + 0 , j - 1 ] + c13 * A [ i + 1 , j - 1 ] + c21 * A [ i - 1 , j + 0 ] + c22 * A [ i + 0 , j + 0 ] + c23 * A [ i + 1 , j + 0 ] + c31 * A [ i - 1 , j + 1 ] + c32 * A [ i + 0 , j + 1 ] + c33 * A [ i + 1 , j + 1 ]
return B |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.