signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def prep_seq2c_bed ( data ) :
"""Selecting the bed file , cleaning , and properly annotating for Seq2C""" | if dd . get_background_cnv_reference ( data , "seq2c" ) :
bed_file = _background_to_bed ( dd . get_background_cnv_reference ( data , "seq2c" ) , data )
else :
bed_file = regions . get_sv_bed ( data )
if bed_file :
bed_file = bedutils . clean_file ( bed_file , data , prefix = "svregions-" )
else :
bed_file = bedutils . clean_file ( dd . get_variant_regions ( data ) , data )
if not bed_file :
return None
col_num = bt . BedTool ( bed_file ) . field_count ( )
if col_num < 4 :
annotated_file = annotate . add_genes ( bed_file , data , max_distance = 0 )
if annotated_file == bed_file :
raise ValueError ( "BED file for Seq2C must be annotated with gene names, " "however the input BED is 3-columns and we have no transcript " "data to annotate with " + bed_file )
annotated_file = annotate . gene_one_per_line ( annotated_file , data )
else :
annotated_file = bed_file
ready_file = "%s-seq2cclean.bed" % ( utils . splitext_plus ( annotated_file ) [ 0 ] )
if not utils . file_uptodate ( ready_file , annotated_file ) :
bed = bt . BedTool ( annotated_file )
if col_num > 4 and col_num != 8 :
bed = bed . cut ( range ( 4 ) )
bed = bed . filter ( lambda x : x . name not in [ "" , "." , "-" ] )
with file_transaction ( data , ready_file ) as tx_out_file :
bed . saveas ( tx_out_file )
logger . debug ( "Saved Seq2C clean annotated ready input BED into " + ready_file )
return ready_file |
def make_auth_headers ( ) :
"""Make the authentication headers needed to use the Appveyor API .""" | if not os . path . exists ( ".appveyor.token" ) :
raise RuntimeError ( "Please create a file named `.appveyor.token` in the current directory. " "You can get the token from https://ci.appveyor.com/api-token" )
with open ( ".appveyor.token" ) as f :
token = f . read ( ) . strip ( )
headers = { 'Authorization' : 'Bearer {}' . format ( token ) , }
return headers |
def cache_file ( file_name ) :
"""Cache a given file for further use ( by storing them on disk )""" | remote_file_path = join ( join ( expanduser ( '~' ) , OCTOGRID_DIRECTORY ) , file_name )
try :
copyfile ( file_name , remote_file_path )
except Exception , e :
raise e |
def createSite ( self , username , password , fullname , email , description , securityQuestionIdx , secuirtyQuestionAns , contentDir ) :
"""The create site operation initializes and configures Portal for
ArcGIS for use . It must be the first operation invoked after
installation . Creating a new site involves :
Creating the initial administrator account
Creating a new database administrator account ( which is same as the
initial administrator account )
Creating token shared keys
Registering directories
This operation is time consuming , as the database is initialized
and populated with default templates and content . If the database
directory is not empty , this operation attempts to migrate the
database to the current version while keeping its data intact . At
the end of this operation , the web server that hosts the API is
restarted .
Inputs :
username - The initial administrator account name
password - The password for the initial administrator account
fullname - The full name for the initial administrator account
email - The account email address
description - An optional description for the account
securityQuestionIdx - The index of the secret question to retrieve
a forgotten password
securityQuestionAns - The answer to the secret question
contentDir - The path to the location of the site ' s content""" | params = { "username" : username , "password" : password , "fullname" : fullname , "email" : email , "description" : description , "secuirtyQuestionAns" : secuirtyQuestionAns , "securityQuestionIdx" : securityQuestionIdx , "contentDir" : contentDir }
url = self . _url + "/createNewSite"
return self . _get ( url = url , param_dict = params ) |
def build_hazard_stats ( pgetter , N , hstats , individual_curves , monitor ) :
""": param pgetter : an : class : ` openquake . commonlib . getters . PmapGetter `
: param N : the total number of sites
: param hstats : a list of pairs ( statname , statfunc )
: param individual _ curves : if True , also build the individual curves
: param monitor : instance of Monitor
: returns : a dictionary kind - > ProbabilityMap
The " kind " is a string of the form ' rlz - XXX ' or ' mean ' of ' quantile - XXX '
used to specify the kind of output .""" | with monitor ( 'combine pmaps' ) :
pgetter . init ( )
# if not already initialized
try :
pmaps = pgetter . get_pmaps ( )
except IndexError : # no data
return { }
if sum ( len ( pmap ) for pmap in pmaps ) == 0 : # no data
return { }
R = len ( pmaps )
imtls , poes , weights = pgetter . imtls , pgetter . poes , pgetter . weights
pmap_by_kind = { }
hmaps_stats = [ ]
hcurves_stats = [ ]
with monitor ( 'compute stats' ) :
for statname , stat in hstats . items ( ) :
pmap = compute_pmap_stats ( pmaps , [ stat ] , weights , imtls )
hcurves_stats . append ( pmap )
if pgetter . poes :
hmaps_stats . append ( calc . make_hmap ( pmap , pgetter . imtls , pgetter . poes ) )
if statname == 'mean' and R > 1 and N <= FEWSITES :
pmap_by_kind [ 'rlz_by_sid' ] = rlz = { }
for sid , pcurve in pmap . items ( ) :
rlz [ sid ] = util . closest_to_ref ( [ pm . setdefault ( sid , 0 ) . array for pm in pmaps ] , pcurve . array ) [ 'rlz' ]
if hcurves_stats :
pmap_by_kind [ 'hcurves-stats' ] = hcurves_stats
if hmaps_stats :
pmap_by_kind [ 'hmaps-stats' ] = hmaps_stats
if R > 1 and individual_curves or not hstats :
pmap_by_kind [ 'hcurves-rlzs' ] = pmaps
if pgetter . poes :
with monitor ( 'build individual hmaps' ) :
pmap_by_kind [ 'hmaps-rlzs' ] = [ calc . make_hmap ( pmap , imtls , poes ) for pmap in pmaps ]
return pmap_by_kind |
def list_all_eq_to ( list_ , val , strict = True ) :
"""checks to see if list is equal everywhere to a value
Args :
list _ ( list ) :
val : value to check against
Returns :
True if all items in the list are equal to val""" | if util_type . HAVE_NUMPY and isinstance ( val , np . ndarray ) :
return all ( [ np . all ( item == val ) for item in list_ ] )
try : # FUTURE WARNING
# FutureWarning : comparison to ` None ` will result in an elementwise object comparison in the future .
with warnings . catch_warnings ( ) :
warnings . filterwarnings ( 'ignore' , category = FutureWarning )
flags = [ item == val for item in list_ ]
return all ( [ np . all ( flag ) if hasattr ( flag , '__array__' ) else flag for flag in flags ] )
# return all ( [ item = = val for item in list _ ] )
except ValueError :
if not strict :
return all ( [ repr ( item ) == repr ( val ) for item in list_ ] )
else :
raise |
def revoke ( self ) :
"""* flag certificate as revoked
* fill in revoked _ at DateTimeField""" | now = timezone . now ( )
self . revoked = True
self . revoked_at = now
self . save ( ) |
def cli ( env , zonefile , dry_run ) :
"""Import zone based off a BIND zone file .""" | manager = SoftLayer . DNSManager ( env . client )
with open ( zonefile ) as zone_f :
zone_contents = zone_f . read ( )
zone , records , bad_lines = parse_zone_details ( zone_contents )
env . out ( "Parsed: zone=%s" % zone )
for record in records :
env . out ( "Parsed: %s" % RECORD_FMT . format ( ** record ) )
for line in bad_lines :
env . out ( "Unparsed: %s" % line )
if dry_run :
return
# Find zone id or create the zone if it doesn ' t exist
try :
zone_id = helpers . resolve_id ( manager . resolve_ids , zone , name = 'zone' )
except exceptions . CLIAbort :
zone_id = manager . create_zone ( zone ) [ 'id' ]
env . out ( click . style ( "Created: %s" % zone , fg = 'green' ) )
# Attempt to create each record
for record in records :
try :
manager . create_record ( zone_id , record [ 'record' ] , record [ 'type' ] , record [ 'data' ] , record [ 'ttl' ] )
env . out ( click . style ( "Created: %s" % RECORD_FMT . format ( ** record ) , fg = 'green' ) )
except SoftLayer . SoftLayerAPIError as ex :
env . out ( click . style ( "Failed: %s" % RECORD_FMT . format ( ** record ) , fg = 'red' ) )
env . out ( click . style ( str ( ex ) , fg = 'red' ) )
env . out ( click . style ( "Finished" , fg = 'green' ) ) |
def annealing_cos ( start : Number , end : Number , pct : float ) -> Number :
"Cosine anneal from ` start ` to ` end ` as pct goes from 0.0 to 1.0." | cos_out = np . cos ( np . pi * pct ) + 1
return end + ( start - end ) / 2 * cos_out |
def fetchPageInfo ( self , * page_ids ) :
"""Get pages ' info from IDs , unordered
. . warning : :
Sends two requests , to fetch all available info !
: param page _ ids : One or more page ID ( s ) to query
: return : : class : ` models . Page ` objects , labeled by their ID
: rtype : dict
: raises : FBchatException if request failed""" | threads = self . fetchThreadInfo ( * page_ids )
pages = { }
for id_ , thread in threads . items ( ) :
if thread . type == ThreadType . PAGE :
pages [ id_ ] = thread
else :
raise FBchatUserError ( "Thread {} was not a page" . format ( thread ) )
return pages |
def do_resource_delete ( client , args ) :
"""Remove resource""" | for resource_uri in args . uris :
client . delete_resource ( resource_uri , purge = args . purge )
print ( "Deleted {}" . format ( resource_uri ) )
return True |
def update_service_endpoint ( self , endpoint , project , endpoint_id , operation = None ) :
"""UpdateServiceEndpoint .
[ Preview API ] Update a service endpoint .
: param : class : ` < ServiceEndpoint > < azure . devops . v5_0 . service _ endpoint . models . ServiceEndpoint > ` endpoint : Service endpoint to update .
: param str project : Project ID or project name
: param str endpoint _ id : Id of the service endpoint to update .
: param str operation : Operation for the service endpoint .
: rtype : : class : ` < ServiceEndpoint > < azure . devops . v5_0 . service _ endpoint . models . ServiceEndpoint > `""" | route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if endpoint_id is not None :
route_values [ 'endpointId' ] = self . _serialize . url ( 'endpoint_id' , endpoint_id , 'str' )
query_parameters = { }
if operation is not None :
query_parameters [ 'operation' ] = self . _serialize . query ( 'operation' , operation , 'str' )
content = self . _serialize . body ( endpoint , 'ServiceEndpoint' )
response = self . _send ( http_method = 'PUT' , location_id = 'e85f1c62-adfc-4b74-b618-11a150fb195e' , version = '5.0-preview.2' , route_values = route_values , query_parameters = query_parameters , content = content )
return self . _deserialize ( 'ServiceEndpoint' , response ) |
def image_show ( self , image_id ) :
'''Show image details and metadata''' | nt_ks = self . compute_conn
image = nt_ks . images . get ( image_id )
links = { }
for link in image . links :
links [ link [ 'rel' ] ] = link [ 'href' ]
ret = { 'name' : image . name , 'id' : image . id , 'status' : image . status , 'progress' : image . progress , 'created' : image . created , 'updated' : image . updated , 'metadata' : image . metadata , 'links' : links , }
if hasattr ( image , 'minDisk' ) :
ret [ 'minDisk' ] = image . minDisk
if hasattr ( image , 'minRam' ) :
ret [ 'minRam' ] = image . minRam
return ret |
def apply ( self , func , mapping = None , new_dtype = None , ** kwargs ) :
"""Apply an element - wise UDF to the Series .
There are currently 6 options for using a UDF . First 4 are lazy ,
other 2 are eager and require the use of the raw decorator :
- One of the predefined functions in baloo . functions .
- Implementing a function which encodes the result . kwargs are automatically passed to it .
- Pure Weld code and mapping .
- Weld code and mapping along with a dynamically linked C + + lib containing the UDF .
- Using a NumPy function , which however is EAGER and hence requires self . values to be raw . Additionally , NumPy
does not support kwargs in ( all ) functions so must use raw decorator to strip away weld _ type .
- Implementing an eager function with the same precondition as above . Use the raw decorator to check this .
Parameters
func : function or str
Weld code as a str to encode or function from baloo . functions .
mapping : dict , optional
Additional mappings in the weld _ template to replace on execution .
self is added by default to reference to this Series .
new _ dtype : numpy . dtype , optional
Specify the new dtype of the result Series .
If None , it assumes it ' s the same dtype as before the apply .
Returns
Series
With UDF result .
Examples
> > > import baloo as bl
> > > sr = bl . Series ( [ 1 , 2 , 3 ] )
> > > weld _ template = ' map ( { self } , | e | e + { scalar } ) '
> > > mapping = { ' scalar ' : ' 2L ' }
> > > print ( sr . apply ( weld _ template , mapping ) . evaluate ( ) )
< BLANKLINE >
0 3
1 4
2 5
> > > weld _ template2 = ' map ( { self } , | e | e + 3L ) '
> > > print ( sr . apply ( weld _ template2 ) . evaluate ( ) )
< BLANKLINE >
0 4
1 5
2 6
> > > print ( bl . Series ( [ 1 . , 4 . , 100 . ] ) . apply ( bl . sqrt ) . evaluate ( ) ) # lazy predefined function
< BLANKLINE >
0 1
1 2
2 10
> > > sr = bl . Series ( [ 4 , 2 , 3 , 1 ] )
> > > print ( sr . apply ( bl . sort , kind = ' q ' ) . evaluate ( ) ) # eager wrapper over np . sort ( which uses raw decorator )
< BLANKLINE >
0 1
1 2
2 3
3 4
> > > print ( sr . apply ( bl . raw ( np . sort , kind = ' q ' ) ) . evaluate ( ) ) # np . sort directly
< BLANKLINE >
0 1
1 2
2 3
3 4
> > > print ( sr . apply ( bl . raw ( lambda x : np . sort ( x , kind = ' q ' ) ) ) . evaluate ( ) ) # lambda also works , with x = np . array
< BLANKLINE >
0 1
1 2
2 3
3 4
# check tests / core / cudf / * and tests / core / test _ series . test _ cudf for C UDF example""" | if callable ( func ) :
return Series ( func ( self . values , weld_type = self . weld_type , ** kwargs ) , self . index , self . dtype , self . name )
elif isinstance ( func , str ) :
check_type ( mapping , dict )
check_dtype ( new_dtype )
default_mapping = { 'self' : self . values }
if mapping is None :
mapping = default_mapping
else :
mapping . update ( default_mapping )
if new_dtype is None :
new_dtype = self . dtype
return Series ( weld_udf ( func , mapping ) , self . index , new_dtype , self . name )
else :
raise TypeError ( 'Expected function or str defining a weld_template' ) |
def finish ( self , width = 1 , color = None , fill = None , roundCap = False , dashes = None , even_odd = False , morph = None , closePath = True ) :
"""Finish the current drawing segment .
Notes :
Apply stroke and fill colors , dashes , line style and width , or
morphing . Also determines whether any open path should be closed
by a connecting line to its start point .""" | if self . draw_cont == "" : # treat empty contents as no - op
return
color_str = ColorCode ( color , "c" )
# ensure proper color string
fill_str = ColorCode ( fill , "f" )
# ensure proper fill string
if width != 1 :
self . draw_cont += "%g w\n" % width
if roundCap :
self . draw_cont += "%i J %i j\n" % ( roundCap , roundCap )
if dashes is not None and len ( dashes ) > 0 :
self . draw_cont += "%s d\n" % dashes
if closePath :
self . draw_cont += "h\n"
self . lastPoint = None
if color is not None :
self . draw_cont += color_str
if fill is not None :
self . draw_cont += fill_str
if not even_odd :
self . draw_cont += "B\n"
else :
self . draw_cont += "B*\n"
else :
self . draw_cont += "S\n"
if CheckMorph ( morph ) :
m1 = Matrix ( 1 , 0 , 0 , 1 , morph [ 0 ] . x + self . x , self . height - morph [ 0 ] . y - self . y )
mat = ~ m1 * morph [ 1 ] * m1
self . draw_cont = "%g %g %g %g %g %g cm\n" % JM_TUPLE ( mat ) + self . draw_cont
self . totalcont += "\nq\n" + self . draw_cont + "Q\n"
self . draw_cont = ""
self . lastPoint = None
return |
def register ( self ) :
"""Register a new user .
This method will ask for user attributes and create the user in
Napps server , when All required fields is filled .
Returns :
result ( string ) : Response of user registration process .""" | user = { }
print ( '--------------------------------------------------------------' )
print ( 'Welcome to the user registration process.' )
print ( '--------------------------------------------------------------' )
print ( "To continue you must fill the following fields." )
for attribute , value in self . attributes . items ( ) :
is_required = attribute in self . required
field_name = value [ 'field_name' ]
pattern = value [ 'pattern' ]
if attribute != 'password' :
user [ attribute ] = self . ask_question ( field_name , pattern , is_required )
else :
user [ attribute ] = self . ask_question ( field_name , pattern , password = True )
return self . _users_client . register ( user ) |
def urlForViewState ( self , person , viewState ) :
"""Return a url for L { OrganizerFragment } which will display C { person } in
state C { viewState } .
@ type person : L { Person }
@ type viewState : L { ORGANIZER _ VIEW _ STATES } constant .
@ rtype : L { url . URL }""" | # ideally there would be a more general mechanism for encoding state
# like this in a url , rather than ad - hoc query arguments for each
# fragment which needs to do it .
organizerURL = self . _webTranslator . linkTo ( self . storeID )
return url . URL ( netloc = '' , scheme = '' , pathsegs = organizerURL . split ( '/' ) [ 1 : ] , querysegs = ( ( 'initial-person' , person . name ) , ( 'initial-state' , viewState ) ) ) |
def innerHTML ( self ) :
'''innerHTML - Returns an HTML string of the inner contents of this tag , including children .
@ return - String of inner contents HTML''' | # If a self - closing tag , there are no contents
if self . isSelfClosing is True :
return ''
# Assemble all the blocks .
ret = [ ]
# Iterate through blocks
for block in self . blocks : # For each block :
# If a tag , append the outer html ( start tag , contents , and end tag )
# Else , append the text node directly
if isinstance ( block , AdvancedTag ) :
ret . append ( block . outerHTML )
else :
ret . append ( block )
return '' . join ( ret ) |
def precision_pct_pred_pos_curve ( self , interval = False , delta_tau = 0.001 ) :
"""Computes the relationship between precision
and the percent of positively classified datapoints .""" | # compute thresholds based on the sorted probabilities
orig_thresh = self . threshold
sorted_labels , sorted_probs = self . sorted_values
precisions = [ ]
pct_pred_pos = [ ]
taus = [ ]
tau = 0
if not interval :
for k in range ( len ( sorted_labels ) ) : # compute new accuracy
self . threshold = tau
precisions . append ( self . precision )
pct_pred_pos . append ( self . pct_pred_pos )
taus . append ( tau )
# update threshold
tau = sorted_probs [ k ]
else :
while tau < 1.0 : # compute new accuracy
self . threshold = tau
precisions . append ( self . precision )
pct_pred_pos . append ( self . pct_pred_pos )
taus . append ( tau )
# update threshold
tau += delta_tau
# add last datapoint
tau = 1.0
self . threshold = tau
precisions . append ( self . precision )
pct_pred_pos . append ( self . pct_pred_pos )
taus . append ( tau )
precisions . append ( 1.0 )
pct_pred_pos . append ( 0.0 )
taus . append ( 1.0 + 1e-12 )
self . threshold = orig_thresh
return precisions , pct_pred_pos , taus |
def avail_images ( ) :
'''Available images''' | response = _query ( 'grid' , 'image/list' )
ret = { }
for item in response [ 'list' ] :
name = item [ 'friendlyName' ]
ret [ name ] = item
return ret |
def register ( self , app , options ) :
"""Register the blueprint to the sanic app .
: param app : Instance of : class : ` sanic . app . Sanic ` class
: param options : Options to be used while registering the
blueprint into the app .
* url _ prefix * - URL Prefix to override the blueprint prefix""" | url_prefix = options . get ( "url_prefix" , self . url_prefix )
# Routes
for future in self . routes : # attach the blueprint name to the handler so that it can be
# prefixed properly in the router
future . handler . __blueprintname__ = self . name
# Prepend the blueprint URI prefix if available
uri = url_prefix + future . uri if url_prefix else future . uri
version = future . version or self . version
app . route ( uri = uri [ 1 : ] if uri . startswith ( "//" ) else uri , methods = future . methods , host = future . host or self . host , strict_slashes = future . strict_slashes , stream = future . stream , version = version , name = future . name , ) ( future . handler )
for future in self . websocket_routes : # attach the blueprint name to the handler so that it can be
# prefixed properly in the router
future . handler . __blueprintname__ = self . name
# Prepend the blueprint URI prefix if available
uri = url_prefix + future . uri if url_prefix else future . uri
app . websocket ( uri = uri , host = future . host or self . host , strict_slashes = future . strict_slashes , name = future . name , ) ( future . handler )
# Middleware
for future in self . middlewares :
if future . args or future . kwargs :
app . register_middleware ( future . middleware , * future . args , ** future . kwargs )
else :
app . register_middleware ( future . middleware )
# Exceptions
for future in self . exceptions :
app . exception ( * future . args , ** future . kwargs ) ( future . handler )
# Static Files
for future in self . statics : # Prepend the blueprint URI prefix if available
uri = url_prefix + future . uri if url_prefix else future . uri
app . static ( uri , future . file_or_directory , * future . args , ** future . kwargs )
# Event listeners
for event , listeners in self . listeners . items ( ) :
for listener in listeners :
app . listener ( event ) ( listener ) |
def _match_path ( pathname , included_patterns , excluded_patterns , case_sensitive = True ) :
"""Internal function same as : func : ` match _ path ` but does not check arguments .
Doctests : :
> > > _ match _ path ( " / users / gorakhargosh / foobar . py " , [ " * . py " ] , [ " * . PY " ] , True )
True
> > > _ match _ path ( " / users / gorakhargosh / FOOBAR . PY " , [ " * . py " ] , [ " * . PY " ] , True )
False
> > > _ match _ path ( " / users / gorakhargosh / foobar / " , [ " * . py " ] , [ " * . txt " ] , False )
False
> > > _ match _ path ( " / users / gorakhargosh / FOOBAR . PY " , [ " * . py " ] , [ " * . PY " ] , False )
Traceback ( most recent call last ) :
ValueError : conflicting patterns ` set ( [ ' * . py ' ] ) ` included and excluded""" | if not case_sensitive :
included_patterns = set ( map ( _string_lower , included_patterns ) )
excluded_patterns = set ( map ( _string_lower , excluded_patterns ) )
else :
included_patterns = set ( included_patterns )
excluded_patterns = set ( excluded_patterns )
common_patterns = included_patterns & excluded_patterns
if common_patterns :
raise ValueError ( 'conflicting patterns `%s` included and excluded' % common_patterns )
return ( match_path_against ( pathname , included_patterns , case_sensitive ) and not match_path_against ( pathname , excluded_patterns , case_sensitive ) ) |
def register ( self , params , target ) :
"""Expect observation with known target""" | self . _space . register ( params , target )
self . dispatch ( Events . OPTMIZATION_STEP ) |
async def _poll ( self ) :
"""Poll status of operation so long as operation is incomplete and
we have an endpoint to query .
: param callable update _ cmd : The function to call to retrieve the
latest status of the long running operation .
: raises : OperationFailed if operation status ' Failed ' or ' Cancelled ' .
: raises : BadStatus if response status invalid .
: raises : BadResponse if response invalid .""" | while not self . finished ( ) :
await self . _delay ( )
await self . update_status ( )
if failed ( self . _operation . status ) :
raise OperationFailed ( "Operation failed or cancelled" )
elif self . _operation . should_do_final_get ( ) :
if self . _operation . method == 'POST' and self . _operation . location_url :
final_get_url = self . _operation . location_url
else :
final_get_url = self . _operation . initial_response . request . url
self . _response = await self . request_status ( final_get_url )
self . _operation . get_status_from_resource ( self . _response ) |
def debug ( self , debug ) :
"""Enables / Disables debugging for this NIO .
: param debug : debug value ( 0 = disable , enable = 1)""" | yield from self . _hypervisor . send ( "nio set_debug {name} {debug}" . format ( name = self . _name , debug = debug ) ) |
def _long_by_position ( self , syllable , sentence ) :
"""Check if syllable is long by position .
Long by position includes :
1 ) Next syllable begins with two consonants , unless those consonants
are a stop + liquid combination
2 ) Next syllable begins with a double consonant
3 ) Syllable ends with a consonant and the next syllable begins with a
consonant
: param syllable : Current syllable
: param sentence : Current sentence
: return : True if syllable is long by position
: rtype : bool""" | try :
next_syll = sentence [ sentence . index ( syllable ) + 1 ]
# Long by position by case 1
if ( next_syll [ 0 ] in self . sing_cons and next_syll [ 1 ] in self . sing_cons ) and ( next_syll [ 0 ] not in self . stops and next_syll [ 1 ] not in self . liquids ) :
return True
# Long by position by case 2
elif syllable [ - 1 ] in self . vowels and next_syll [ 0 ] in self . doub_cons :
return True
# Long by position by case 3
elif syllable [ - 1 ] in self . sing_cons and ( next_syll [ 0 ] in self . sing_cons ) :
return True
else :
pass
except IndexError :
logger . info ( "IndexError while checking if syllable '%s' is long. Continuing." , syllable ) |
def _extractCreations ( self , dataSets ) :
"""Find the elements of C { dataSets } which represent the creation of new
objects .
@ param dataSets : C { list } of C { dict } mapping C { unicode } form submission
keys to form submission values .
@ return : iterator of C { tuple } s with the first element giving the opaque
identifier of an object which is to be created and the second
element giving a C { dict } of all the other creation arguments .""" | for dataSet in dataSets :
modelObject = self . _objectFromID ( dataSet [ self . _IDENTIFIER_KEY ] )
if modelObject is self . _NO_OBJECT_MARKER :
dataCopy = dataSet . copy ( )
identifier = dataCopy . pop ( self . _IDENTIFIER_KEY )
yield identifier , dataCopy |
def _dict_lookup ( cls , item , ** kwargs ) :
'''Retrieves the key or value from a dict based on the item
kwarg lookup dict to search for item
kwarg value _ lookup bool to determine if item should be compared to keys
or values''' | log . debug ( 'item == %s' , item )
value_lookup = kwargs . get ( 'value_lookup' , False )
if 'lookup' in kwargs :
for k , v in six . iteritems ( kwargs [ 'lookup' ] ) :
if value_lookup :
if six . text_type ( v ) . lower ( ) == six . text_type ( item ) . lower ( ) :
log . debug ( 'returning key %s' , k )
return k
else :
if six . text_type ( k ) . lower ( ) == six . text_type ( item ) . lower ( ) :
log . debug ( 'returning value %s' , v )
return v
return 'Invalid Value' |
def changelist_view ( self , request , extra_context = None ) :
"""If we only have a single preference object redirect to it ,
otherwise display listing .""" | model = self . model
if model . objects . all ( ) . count ( ) > 1 :
return super ( HTMLSitemapAdmin , self ) . changelist_view ( request )
else :
obj = model . singleton . get ( )
return redirect ( reverse ( 'admin:jmbo_sitemap_%s_change' % model . _meta . module_name , args = ( obj . id , ) ) ) |
def add_channel ( self , channel , owner , package = None , version = None , filename = None ) :
'''Add a channel to the specified files
: param channel : channel to add
: param owner : The user to add the channel to ( all files of all packages for this user )
: param package : The package to add the channel to ( all files in this package )
: param version : The version to add the channel to ( all files in this version of the package )
: param filename : The exact file to add the channel to''' | url = '%s/channels/%s/%s' % ( self . domain , owner , channel )
data , headers = jencode ( package = package , version = version , basename = filename )
res = self . session . post ( url , data = data , headers = headers )
self . _check_response ( res , [ 201 ] ) |
def verification_checks ( self ) :
"""Access the verification _ checks
: returns : twilio . rest . preview . acc _ security . service . verification _ check . VerificationCheckList
: rtype : twilio . rest . preview . acc _ security . service . verification _ check . VerificationCheckList""" | if self . _verification_checks is None :
self . _verification_checks = VerificationCheckList ( self . _version , service_sid = self . _solution [ 'sid' ] , )
return self . _verification_checks |
def attr_matches ( self , text ) :
"""Compute matches when text contains a dot .
Assuming the text is of the form NAME . NAME . . . . [ NAME ] , and is
evaluable in self . namespace , it will be evaluated and its attributes
( as revealed by dir ( ) ) are used as possible completions . ( For class
instances , class members are also considered . )
WARNING : this can still invoke arbitrary C code , if an object
with a _ _ getattr _ _ hook is evaluated .""" | def _method_or_attr ( thisobject , item ) : # decide whether to append a ' ( ' to the end of the attr based
# on whether its callable
if hasattr ( getattr ( thisobject , item ) , '__call__' ) :
return item + '('
else :
return item
tb_compl_commands = { '.' : { } , '[' : { } , '.get(' : { } , '.set(' : { } , '.filter(' : { } , '.filter_or_get(' : { } , '.get_parameter(' : { } , '.remove_parameter(' : { } , '.remove_parameters_all(' : { } , '.get_value(' : { } , '.set_value(' : { } , '.set_value_all(' : { } , # TODO : default _ unit , adjust , prior , posterior , enabled ?
'.get_history(' : { 'context' : 'history' } , '.remove_history(' : { 'context' : 'history' } , '.get_component(' : { 'context' : 'system' } , '.remove_component(' : { 'context' : 'system' } , '.get_mesh(' : { 'context' : 'mesh' } , '.remove_mesh(' : { 'context' : 'mesh' } , '.get_constraint(' : { 'context' : 'constraint' } , '.remove_constraint(' : { 'context' : 'constraint' } , '.flip_constraint(' : { 'context' : 'constraint' } , '.run_constraint(' : { 'context' : 'constraint' } , '.get_compute(' : { 'context' : 'compute' } , '.remove_compute(' : { 'context' : 'compute' } , '.run_compute(' : { 'context' : 'compute' } , '.get_prior(' : { 'context' : 'prior' } , # TODO : remove _ prior , run _ prior , enable _ prior , disable _ prior
'.get_fitting(' : { 'context' : 'fitting' } , '.remove_fitting(' : { 'context' : 'fitting' } , '.run_fitting(' : { 'context' : 'fitting' } , '.get_posterior(' : { 'context' : 'posterior' } , # TODO : remove _ posterior , draw _ from _ posterior
'.get_feedback(' : { 'context' : 'feedback' } , '.remove_feedback(' : { 'context' : 'feedback' } , # TODO : plots , plugins
}
expr = None
for cmd , filter_kwargs in tb_compl_commands . items ( ) :
if cmd in text :
expr , attr = text . rsplit ( cmd , 1 )
# ~ if len ( attr ) = = 0:
# ~ return [ ]
if attr [ 0 ] not in [ "'" , '"' ] and cmd != '.' :
return [ ]
else :
if cmd == '.' : # then we ' re just looking for attributes and don ' t
# need to offset for the ' or "
stringchar = ''
attr = attr
else : # then we ' re the first argument of some method
# and need to account for the starting ' or "
stringchar = attr [ 0 ]
attr = attr [ 1 : ]
break
if expr is None : # then we haven ' t found a match
return [ ]
try :
thisobject = eval ( expr , self . namespace )
except Exception :
return [ ]
if cmd == '.' : # then we ' re looking for attributes of thisobject ( PS or bundle ) that start with attr
words = [ _method_or_attr ( thisobject , item ) for item in dir ( thisobject ) if item [ : len ( attr ) ] == attr ]
else : # then we ' re looking to autocomplete the twig attr for thisobject ( PS or bundle )
words = thisobject . filter_or_get ( attr , autocomplete = True , ** filter_kwargs )
matches = [ ]
n = len ( attr )
for word in words :
matches . append ( '{}{}{}{}' . format ( expr , cmd , stringchar , word ) )
return matches |
def get_values ( self , set , selected_meta ) :
"""Retrieves the selected metadata values of the given set
: param set : cluster that contains the data
: param selected _ meta : the values of the selected _ meta
: return : the values of the selected meta of the cluster""" | warnings . warn ( "\n\nThis method assumes that the last level of the index is the sample_id.\n" "In case of single index, the index itself should be the sample_id" )
sample_ids = set . index . get_level_values ( - 1 )
corresponding_meta = self . meta . loc [ sample_ids ]
values = corresponding_meta [ selected_meta ]
try :
values = values . astype ( float )
except ValueError :
print ( "the values should be numeric" )
return values |
def _process_event ( event : Event , sdp_state : SDPState , service_states : List [ ServiceState ] ) :
"""Process a SDP state change event .""" | LOG . debug ( 'Event detected! (id : "%s", type: "%s", data: "%s")' , event . object_id , event . type , event . data )
if event . object_id == 'SDP' and event . type == 'current_state_updated' :
LOG . info ( 'SDP current state updated, no action required!' )
if event . object_id == 'SDP' and event . type == 'target_state_updated' :
LOG . info ( "SDP target state changed to '%s'" , sdp_state . target_state )
# If the sdp is already in the target state do nothing
if sdp_state . target_state == sdp_state . current_state :
LOG . warning ( 'SDP already in %s state' , sdp_state . current_state )
return
# Check that a transition to the target state is allowed in the
# current state .
if not sdp_state . is_target_state_allowed ( sdp_state . target_state ) :
LOG . error ( 'Transition to %s is not allowed when in state %s' , sdp_state . target_state , sdp_state . current_state )
sdp_state . target_state = sdp_state . current_state
return
_update_services_target_state ( sdp_state . target_state )
# If asking SDP to turn off , also turn off services .
if sdp_state . target_state == 'off' :
LOG . info ( 'Turning off services!' )
for service_state in service_states :
service_state . update_target_state ( 'off' )
service_state . update_current_state ( 'off' )
LOG . info ( 'Processing target state change request ...' )
time . sleep ( 0.1 )
LOG . info ( 'Done processing target state change request!' )
# Assuming that the SDP has responding to the target
# target state command by now , set the current state
# to the target state .
sdp_state . update_current_state ( sdp_state . target_state )
if sdp_state . current_state == 'alarm' :
LOG . debug ( 'raising SDP state alarm' )
SIP_STATE_ALARM . set ( 1 )
else :
SIP_STATE_ALARM . set ( 0 )
try : # FIXME ( BMo ) the pushgateway host should not be hardcoded !
push_to_gateway ( 'platform_pushgateway:9091' , job = 'SIP' , registry = COLLECTOR_REGISTRY )
except urllib . error . URLError :
LOG . warning ( "Unable to connect to the Alarms service!" ) |
def read ( filename ) :
"""Return the whitespace - stripped content of ` filename ` .""" | path = join ( here , filename )
with open ( path , "r" ) as fin :
return fin . read ( ) . strip ( ) |
def _maybe_append_chunk ( chunk_info , line_index , column , contents , chunks ) :
"""Append chunk _ info to chunks if it is set .""" | if chunk_info :
chunks . append ( _chunk_from_ranges ( contents , chunk_info [ 0 ] , chunk_info [ 1 ] , line_index , column ) ) |
def failback_from_replicant ( self , volume_id , replicant_id ) :
"""Failback from a volume replicant .
: param integer volume _ id : The id of the volume
: param integer replicant _ id : ID of replicant to failback from
: return : Returns whether failback was successful or not""" | return self . client . call ( 'Network_Storage' , 'failbackFromReplicant' , replicant_id , id = volume_id ) |
def adjustNorthPointer ( self ) :
'''Adjust the position and orientation of
the north pointer .''' | self . headingNorthText . set_size ( self . fontSize )
headingRotate = mpl . transforms . Affine2D ( ) . rotate_deg_around ( 0.0 , 0.0 , self . heading ) + self . axes . transData
self . headingNorthText . set_transform ( headingRotate )
if ( self . heading > 90 ) and ( self . heading < 270 ) :
headRot = self . heading - 180
else :
headRot = self . heading
self . headingNorthText . set_rotation ( headRot )
self . headingNorthTri . set_transform ( headingRotate )
# Adjust if overlapping with heading pointer
if ( self . heading <= 10.0 ) or ( self . heading >= 350.0 ) :
self . headingNorthText . set_text ( '' )
else :
self . headingNorthText . set_text ( 'N' ) |
def _iter_unfolded_lines ( self ) :
"""Iter input unfoled lines . Skip comments .""" | line = self . _input_file . readline ( )
while line :
self . line_counter += 1
self . byte_counter += len ( line )
line = self . _strip_line_sep ( line )
nextline = self . _input_file . readline ( )
while nextline and nextline [ : 1 ] == b' ' :
line += self . _strip_line_sep ( nextline ) [ 1 : ]
nextline = self . _input_file . readline ( )
if not line . startswith ( b'#' ) :
yield line
line = nextline |
def parse_filename ( filename ) :
"""Parse media filename for metadata .
: param str filename : the name of media file
: returns : dict of metadata attributes found in filename
or None if no matching expression .
: rtype : dict""" | _patterns = patterns . get_expressions ( )
result = { }
for cmatcher in _patterns :
match = cmatcher . match ( filename )
if match :
namedgroups = match . groupdict ( ) . keys ( )
result [ 'pattern' ] = cmatcher . pattern
result [ 'series_name' ] = match . group ( 'seriesname' )
result [ 'season_number' ] = _get_season_no ( match , namedgroups )
result [ 'episode_numbers' ] = _get_episodes ( match , namedgroups )
break
else :
result = None
return result |
def find_element ( list , index , index2 = 1 ) :
"""When you have list like : a = [ ( 0 , 10 ) , ( 1 , 20 ) , ( 2 , 30 ) ] and you need to get value from tuple with first value = = index
Usage :
{ % find _ element 1 % } will return 20""" | for x in list :
if x [ 0 ] == index :
return x [ index2 ]
return None |
def process_dynamodb_differ_record ( record , current_model , durable_model , diff_func = None ) :
"""Processes a DynamoDB NewImage record ( for Differ events ) .
This will ONLY process the record if the record exists in one of the regions defined by the PROXY _ REGIONS of
the current Proxy function .""" | diff_func = diff_func or default_diff
# Nothing special needs to be done for deletions as far as items that are too big for SNS are concerned .
# This is because the deletion will remove the ` configuration ` field and save the item without it .
if record [ 'eventName' ] == 'REMOVE' : # We are * ONLY * tracking the deletions from the DynamoDB TTL service .
# Why ? Because when we process deletion records , we are first saving a new " empty " revision to the " Current "
# table . The " empty " revision will then trigger this Lambda as a " MODIFY " event . Then , right after it saves
# the " empty " revision , it will then delete the item from the " Current " table . At that point ,
# we have already saved the " deletion revision " to the " Durable " table . Thus , no need to process
# the deletion events - - except for TTL expirations ( which should never happen - - but if they do , you need
# to investigate why . . . )
if record . get ( 'userIdentity' ) :
if record [ 'userIdentity' ] [ 'type' ] == 'Service' :
if record [ 'userIdentity' ] [ 'principalId' ] == 'dynamodb.amazonaws.com' :
LOG . error ( f"[TTL] We received a TTL delete. Old Image: {record['dynamodb']['OldImage']}" )
old_image = remove_current_specific_fields ( record [ 'dynamodb' ] [ 'OldImage' ] )
delete_differ_record ( old_image , durable_model )
if record [ 'eventName' ] in [ 'INSERT' , 'MODIFY' ] :
arn = record [ 'dynamodb' ] [ 'Keys' ] [ 'arn' ] [ 'S' ]
current_revision = deserialize_current_record_to_durable_model ( record , current_model , durable_model )
if not current_revision :
LOG . error ( f'[?] Received item too big for SNS, and was not able to find the original item with ARN: {arn}' )
return
if record [ 'eventName' ] == 'INSERT' :
current_revision . save ( )
LOG . debug ( '[+] Saving new revision to durable table.' )
elif record [ 'eventName' ] == 'MODIFY' :
modify_record ( durable_model , current_revision , arn , current_revision . eventTime , diff_func ) |
def encode_request ( name , expected , updated ) :
"""Encode request into client _ message""" | client_message = ClientMessage ( payload_size = calculate_size ( name , expected , updated ) )
client_message . set_message_type ( REQUEST_TYPE )
client_message . set_retryable ( RETRYABLE )
client_message . append_str ( name )
client_message . append_long ( expected )
client_message . append_long ( updated )
client_message . update_frame_length ( )
return client_message |
def _log_exception ( self , exception ) :
"""Logs an exception .
: param Exception exception : The exception .
: rtype : None""" | self . _io . error ( str ( exception ) . strip ( ) . split ( os . linesep ) ) |
def create_dev_vlan ( devid , vlanid , vlan_name ) :
"""function takes devid and vlanid vlan _ name of specific device and 802.1q VLAN tag and issues a RESTFUL call to add the
specified VLAN from the target device . VLAN Name MUST be valid on target device .
: param devid : int or str value of the target device
: param vlanid : int or str value of target 802.1q VLAN
: param vlan _ name : str value of the target 802.1q VLAN name . MUST be valid name on target device .
: return : HTTP Status code of 201 with no values .""" | if auth is None or url is None : # checks to see if the imc credentials are already available
set_imc_creds ( )
create_dev_vlan_url = "/imcrs/vlan?devId=" + str ( devid )
f_url = url + create_dev_vlan_url
payload = '''{ "vlanId": "''' + str ( vlanid ) + '''", "vlanName" : "''' + str ( vlan_name ) + '''"}'''
r = requests . post ( f_url , data = payload , auth = auth , headers = headers )
# creates the URL using the payload variable as the contents
print ( r . status_code )
if r . status_code == 201 :
print ( 'Vlan Created' )
return r . status_code
elif r . status_code == 409 :
return '''Unable to create VLAN.\nVLAN Already Exists\nDevice does not support VLAN function'''
else :
print ( "An Error has occured" ) |
def order_by_first_occurrence ( self ) :
""": return : The event in order of minimum occurrence .""" | def _key ( e ) :
try :
return e . occurrence_list [ 0 ] . start
except IndexError : # no occurrences ; put last
return localize ( datetime . max - timedelta ( days = 365 ) )
return sorted ( list ( self ) , key = _key ) |
def get_fields ( cls ) :
"""Returns a dictionary of fields and field instances for this schema .""" | fields = { }
for field_name in dir ( cls ) :
if isinstance ( getattr ( cls , field_name ) , Field ) :
field = getattr ( cls , field_name )
field_name = field . field_name or field_name
fields [ field_name ] = field
return fields |
async def get_pushed_stream_ids ( self , parent_stream_id : int ) -> List [ int ] :
"""Return a list of all streams pushed by the remote peer that are
children of the specified stream . If no streams have been pushed when
this method is called , waits until at least one stream has been pushed .""" | if parent_stream_id not in self . _streams :
logger . error ( f'Parent stream {parent_stream_id} unknown to this connection' )
raise NoSuchStreamError ( parent_stream_id )
parent = self . _get_stream ( parent_stream_id )
await parent . pushed_streams_available . wait ( )
pushed_streams_ids = self . _pushed_stream_ids [ parent . id ]
stream_ids : List [ int ] = [ ]
if len ( pushed_streams_ids ) > 0 :
stream_ids . extend ( pushed_streams_ids )
pushed_streams_ids . clear ( )
parent . pushed_streams_available . clear ( )
return stream_ids |
def version_downloads ( self ) :
"""Return a dictionary of version : download _ count pairs .""" | ret = OrderedDict ( )
for release , info in self . release_info :
download_count = sum ( file_ [ 'downloads' ] for file_ in info )
ret [ release ] = download_count
return ret |
def svg ( self , file , scale = 1 , module_color = '#000' , background = None , quiet_zone = 4 , xmldecl = True , svgns = True , title = None , svgclass = 'pyqrcode' , lineclass = 'pyqrline' , omithw = False , debug = False ) :
"""This method writes the QR code out as an SVG document . The
code is drawn by drawing only the modules corresponding to a 1 . They
are drawn using a line , such that contiguous modules in a row
are drawn with a single line .
The * file * parameter is used to specify where to write the document
to . It can either be a writable stream or a file path .
The * scale * parameter sets how large to draw
a single module . By default one pixel is used to draw a single
module . This may make the code too small to be read efficiently .
Increasing the scale will make the code larger . Unlike the png ( ) method ,
this method will accept fractional scales ( e . g . 2.5 ) .
Note , three things are done to make the code more appropriate for
embedding in a HTML document . The " white " part of the code is actually
transparent . The code itself has a class given by * svgclass * parameter .
The path making up the QR code uses the class set using the * lineclass * .
These should make the code easier to style using CSS .
By default the output of this function is a complete SVG document . If
only the code itself is desired , set the * xmldecl * to false . This will
result in a fragment that contains only the " drawn " portion of the code .
Likewise , you can set the * title * of the document . The SVG name space
attribute can be suppressed by setting * svgns * to False .
When True the * omithw * indicates if width and height attributes should
be omitted . If these attributes are omitted , a ` ` viewBox ` ` attribute
will be added to the document .
You can also set the colors directly using the * module _ color * and
* background * parameters . The * module _ color * parameter sets what color to
use for the data modules ( the black part on most QR codes ) . The
* background * parameter sets what color to use for the background ( the
white part on most QR codes ) . The parameters can be set to any valid
SVG or HTML color . If the background is set to None , then no background
will be drawn , i . e . the background will be transparent . Note , many color
combinations are unreadable by scanners , so be careful .
The * quiet _ zone * parameter sets how wide the quiet zone around the code
should be . According to the standard this should be 4 modules . It is
left settable because such a wide quiet zone is unnecessary in many
applications where the QR code is not being printed .
Example :
> > > code = pyqrcode . create ( ' Hello . Uhh , can we have your liver ? ' )
> > > code . svg ( ' live - organ - transplants . svg ' , 3.6)
> > > code . svg ( ' live - organ - transplants . svg ' , scale = 4,
module _ color = ' brown ' , background = ' 0xFFFFF ' )""" | builder . _svg ( self . code , self . version , file , scale = scale , module_color = module_color , background = background , quiet_zone = quiet_zone , xmldecl = xmldecl , svgns = svgns , title = title , svgclass = svgclass , lineclass = lineclass , omithw = omithw , debug = debug ) |
def _commitData ( self , changeset ) :
"""Get data from a commit object
: param changeset : tuple with changeset data
: type changeset : tuple""" | ( rev , node , tags , branch , author , desc , date ) = changeset
ts = int ( time . mktime ( date . timetuple ( ) ) )
return { "hexsha" : node , "adate" : ts , "cdate" : ts , "author" : author , "message" : desc } |
def api_start ( working_dir , host , port , thread = True ) :
"""Start the global API server
Returns the API server thread""" | api_srv = BlockstackdAPIServer ( working_dir , host , port )
log . info ( "Starting API server on port {}" . format ( port ) )
if thread :
api_srv . start ( )
return api_srv |
def poll ( self ) :
"""Check if the pod is still running .
Uses the same interface as subprocess . Popen . poll ( ) : if the pod is
still running , returns None . If the pod has exited , return the
exit code if we can determine it , or 1 if it has exited but we
don ' t know how . These are the return values JupyterHub expects .
Note that a clean exit will have an exit code of zero , so it is
necessary to check that the returned value is None , rather than
just Falsy , to determine that the pod is still running .""" | # have to wait for first load of data before we have a valid answer
if not self . pod_reflector . first_load_future . done ( ) :
yield self . pod_reflector . first_load_future
data = self . pod_reflector . pods . get ( self . pod_name , None )
if data is not None :
if data . status . phase == 'Pending' :
return None
ctr_stat = data . status . container_statuses
if ctr_stat is None : # No status , no container ( we hope )
# This seems to happen when a pod is idle - culled .
return 1
for c in ctr_stat : # return exit code if notebook container has terminated
if c . name == 'notebook' :
if c . state . terminated : # call self . stop to delete the pod
if self . delete_stopped_pods :
yield self . stop ( now = True )
return c . state . terminated . exit_code
break
# None means pod is running or starting up
return None
# pod doesn ' t exist or has been deleted
return 1 |
def deprecatedmethod ( classname = '' , info = '' ) :
"""Defines a particular method as being deprecated - the
method will exist for backwards compatibility , but will
contain information as to how update code to become
compatible with the current system .
Code that is deprecated will only be supported through the
end of a minor release cycle and will be cleaned during a
major release upgrade .
: usage | from projex . decorators import deprecated
| class A ( object ) :
| @ deprecatedmethod ( ' A ' , ' Use A . printout instead ' )
| def format ( self ) :
| print ' test '
| def printout ( self ) :
: print ' new test '""" | def decorated ( func ) :
@ wraps ( func )
def wrapped ( * args , ** kwds ) :
frame = last_frame = None
try :
frame = inspect . currentframe ( )
last_frame = frame . f_back
fname = last_frame . f_code . co_filename
func_file = func . func_code . co_filename
opts = { 'func' : func . __name__ , 'line' : last_frame . f_lineno , 'file' : fname , 'class' : classname , 'info' : info , 'package' : projex . packageFromPath ( func_file ) }
msg = 'Deprecated method called from %(file)s, line %(line)d.' '\n %(package)s.%(class)s.%(func)s is deprecated.' ' %(info)s' % opts
logger . warning ( errors . DeprecatedMethodWarning ( msg ) )
finally :
del frame
del last_frame
return func ( * args , ** kwds )
wrapped . __name__ = func . __name__
wrapped . __doc__ = ':warning This method is deprecated! %s\n\n' % info
if func . __doc__ :
wrapped . __doc__ += func . __doc__
wrapped . __dict__ . update ( func . __dict__ )
wrapped . __dict__ [ 'func_type' ] = 'deprecated method'
return wrapped
return decorated |
def get_event ( self , timeout = None , block = True ) :
"""Fetch the next available : class : ` Event ` from any source , or raise
: class : ` mitogen . core . TimeoutError ` if no value is available within
` timeout ` seconds .
On success , the message ' s : attr : ` receiver
< mitogen . core . Message . receiver > ` attribute is set to the receiver .
: param float timeout :
Timeout in seconds .
: param bool block :
If : data : ` False ` , immediately raise
: class : ` mitogen . core . TimeoutError ` if the select is empty .
: return :
: class : ` Event ` .
: raises mitogen . core . TimeoutError :
Timeout was reached .
: raises mitogen . core . LatchError :
: meth : ` close ` has been called , and the underlying latch is no
longer valid .""" | if not self . _receivers :
raise Error ( self . empty_msg )
event = Event ( )
while True :
recv = self . _latch . get ( timeout = timeout , block = block )
try :
if isinstance ( recv , Select ) :
event = recv . get_event ( block = False )
else :
event . source = recv
event . data = recv . get ( block = False )
if self . _oneshot :
self . remove ( recv )
if isinstance ( recv , mitogen . core . Receiver ) : # Remove in 0.3 . x .
event . data . receiver = recv
return event
except mitogen . core . TimeoutError : # A receiver may have been queued with no result if another
# thread drained it before we woke up , or because another
# thread drained it between add ( ) calling recv . empty ( ) and
# self . _ put ( ) . In this case just sleep again .
continue |
def fromexportunreg ( cls , bundle , exporterid , rsid , export_ref , exception , endpoint ) : # type : ( Bundle , Tuple [ str , str ] , Tuple [ Tuple [ str , str ] , int ] , ExportReference , Optional [ Tuple [ Any , Any , Any ] ] , EndpointDescription ) - > RemoteServiceAdminEvent
"""Creates a RemoteServiceAdminEvent object from the departure of an
ExportRegistration""" | return RemoteServiceAdminEvent ( typ = RemoteServiceAdminEvent . EXPORT_UNREGISTRATION , bundle = bundle , cid = exporterid , rsid = rsid , export_ref = export_ref , exception = exception , endpoint = endpoint , ) |
def get_functions_cards ( self , what , allfuncs ) :
"""Return a pair of lists of Card widgets for used and unused functions .
: param what : a string : ' trigger ' , ' prereq ' , or ' action '
: param allfuncs : a sequence of functions ' ( name , sourcecode , signature )""" | if not self . rule :
return [ ] , [ ]
rulefuncnames = getattr ( self . rule , what + 's' )
unused = [ Card ( ud = { 'type' : what , 'funcname' : name , 'signature' : sig } , headline_text = name , show_art = False , midline_text = what . capitalize ( ) , text = source ) for ( name , source , sig ) in allfuncs if name not in rulefuncnames ]
used = [ Card ( ud = { 'type' : what , 'funcname' : name , } , headline_text = name , show_art = False , midline_text = what . capitalize ( ) , text = str ( getattr ( getattr ( self . engine , what ) , name ) ) ) for name in rulefuncnames ]
return used , unused |
def angle_iter ( self , g_nums , ats_1 , ats_2 , ats_3 , invalid_error = False ) :
"""Iterator over selected atomic angles .
Angles are in degrees as with : meth : ` angle _ single ` .
See ` above < toc - generators _ > ` _ for more information on
calling options .
Parameters
g _ nums
| int | or iterable | int | or | None | - -
Index of the desired geometry
ats _ 1
| int | or iterable | int | or | None | - -
Index of the first atom
ats _ 2
| int | or iterable | int | or | None | - -
Index of the second atom
ats _ 3
| int | or iterable | int | or | None | - -
Index of the third atom
invalid _ error
| bool | , optional - -
If | False | ( the default ) , | None | values are returned for
results corresponding to invalid indices . If | True | ,
exceptions are raised per normal .
Yields
angle
| npfloat _ | - -
Spanning angles in degrees between corresponding | br |
` ats _ 1 ` - ` ats _ 2 ` - ` ats _ 3 ` , from geometry / geometries ` g _ nums `
Raises
~ exceptions . IndexError
If an invalid ( out - of - range ) ` g _ num ` or ` at _ # ` is provided .
~ exceptions . ValueError
If all iterable objects are not the same length .
~ exceptions . ValueError
If any ` ats _ 2 ` element is equal to either the corresponding ` ats _ 1 `
or ` ats _ 3 ` element .""" | # Suitability of ats _ n indices will be checked within the
# self . angle _ single ( ) calls and thus no check is needed here .
# Import the tuple - generating function
from . utils import pack_tups
# Print the function inputs if debug mode is on
if _DEBUG : # pragma : no cover
print ( "g_nums = {0}" . format ( g_nums ) )
print ( "ats_1 = {0}" . format ( ats_1 ) )
print ( "ats_2 = {0}" . format ( ats_2 ) )
print ( "ats_3 = {0}" . format ( ats_3 ) )
# # end if
# Perform the None substitution
arglist = self . _none_subst ( g_nums , ats_1 , ats_2 , ats_3 )
# Expand / pack the tuples from the inputs
tups = pack_tups ( * arglist )
# Dump the results if debug mode is on
if _DEBUG : # pragma : no cover
print ( tups )
# # end if
# Construct the generator using the packed tuples .
for tup in tups :
if _DEBUG : # pragma : no cover
print ( tup )
# # end if
yield self . _iter_return ( tup , self . angle_single , invalid_error ) |
def show_current_number ( parser , token ) :
"""Show the current page number , or insert it in the context .
This tag can for example be useful to change the page title according to
the current page number .
To just show current page number :
. . code - block : : html + django
{ % show _ current _ number % }
If you use multiple paginations in the same page , you can get the page
number for a specific pagination using the querystring key , e . g . :
. . code - block : : html + django
{ % show _ current _ number using mykey % }
The default page when no querystring is specified is 1 . If you changed it
in the ` paginate ` _ template tag , you have to call ` ` show _ current _ number ` `
according to your choice , e . g . :
. . code - block : : html + django
{ % show _ current _ number starting from page 3 % }
This can be also achieved using a template variable you passed to the
context , e . g . :
. . code - block : : html + django
{ % show _ current _ number starting from page page _ number % }
You can of course mix it all ( the order of arguments is important ) :
. . code - block : : html + django
{ % show _ current _ number starting from page 3 using mykey % }
If you want to insert the current page number in the context , without
actually displaying it in the template , use the * as * argument , i . e . :
. . code - block : : html + django
{ % show _ current _ number as page _ number % }
{ % show _ current _ number
starting from page 3 using mykey as page _ number % }""" | # Validate args .
try :
tag_name , args = token . contents . split ( None , 1 )
except ValueError :
key = None
number = None
tag_name = token . contents [ 0 ]
var_name = None
else : # Use a regexp to catch args .
match = SHOW_CURRENT_NUMBER_EXPRESSION . match ( args )
if match is None :
msg = 'Invalid arguments for %r tag' % tag_name
raise template . TemplateSyntaxError ( msg )
# Retrieve objects .
groupdict = match . groupdict ( )
key = groupdict [ 'key' ]
number = groupdict [ 'number' ]
var_name = groupdict [ 'var_name' ]
# Call the node .
return ShowCurrentNumberNode ( number , key , var_name ) |
def reload ( self , client = None , retry = DEFAULT_RETRY ) :
"""API call : refresh job properties via a GET request .
See
https : / / cloud . google . com / bigquery / docs / reference / rest / v2 / jobs / get
: type client : : class : ` ~ google . cloud . bigquery . client . Client ` or
` ` NoneType ` `
: param client : the client to use . If not passed , falls back to the
` ` client ` ` stored on the current dataset .
: type retry : : class : ` google . api _ core . retry . Retry `
: param retry : ( Optional ) How to retry the RPC .""" | client = self . _require_client ( client )
extra_params = { }
if self . location :
extra_params [ "location" ] = self . location
api_response = client . _call_api ( retry , method = "GET" , path = self . path , query_params = extra_params )
self . _set_properties ( api_response ) |
def get_iso_packet_buffer_list ( transfer_p ) :
"""Python - specific helper extracting a list of iso packet buffers .""" | transfer = transfer_p . contents
offset = 0
result = [ ]
append = result . append
for iso_transfer in _get_iso_packet_list ( transfer ) :
length = iso_transfer . length
append ( _get_iso_packet_buffer ( transfer , offset , length ) )
offset += length
return result |
def start_shell ( local_ns : Dict = None , banner : str = '' ) :
"""Create and immediately drop into a Python shell .
If IPython version 5 or greater is available it will be used instead
of the built - in python shell .
: param local _ ns : An optional dict containing the global namespace of
the newly created shell .
: param banner : An optional banner to render when terminal starts .""" | if IPYTHON_SHELL_AVAILABLE : # Don ' t try to stop IPython from displaying its banner , since
# it ' s different in every major version
terminal = embed . InteractiveShellEmbed ( user_ns = { } )
terminal . mainloop ( local_ns = local_ns )
else :
code . interact ( banner = banner , local = local_ns ) |
def disvecinf ( self , x , y , aq = None ) :
'''Can be called with only one x , y value
Returns array ( nparam , self . aq . naq ) with order
order 0 , layer [ 0]
order 0 , layer [ 1]
order 1 , layer [ 0]
order 1 , layer [ 1]
etc''' | if aq is None :
aq = self . model . aq . find_aquifer_data ( x , y )
rv = np . zeros ( ( 2 , self . nparam , aq . naq ) )
if aq == self . aq :
qxqyrv = rv . reshape ( ( 2 , self . order + 1 , self . nlayers , aq . naq ) )
# clever way of using a reshaped rv here
qxqy = np . zeros ( ( 2 * ( self . order + 1 ) , aq . naq ) )
qxqy [ : , : ] = self . bessel . disbesldv ( float ( x ) , float ( y ) , self . z1 , self . z2 , aq . lab , self . order , aq . ilap , aq . naq )
qxqyrv [ 0 , : ] = self . aq . coef [ self . layers ] * qxqy [ : self . order + 1 , np . newaxis , : ]
qxqyrv [ 1 , : ] = self . aq . coef [ self . layers ] * qxqy [ self . order + 1 : , np . newaxis , : ]
return rv |
def showDecidePage ( request , openid_request ) :
"""Render a page to the user so a trust decision can be made .
@ type openid _ request : openid . server . server . CheckIDRequest""" | trust_root = openid_request . trust_root
return_to = openid_request . return_to
try : # Stringify because template ' s ifequal can only compare to strings .
trust_root_valid = verifyReturnTo ( trust_root , return_to ) and "Valid" or "Invalid"
except DiscoveryFailure , err :
trust_root_valid = "DISCOVERY_FAILED"
except HTTPFetchingError , err :
trust_root_valid = "Unreachable"
pape_request = pape . Request . fromOpenIDRequest ( openid_request )
return direct_to_template ( request , 'server/trust.html' , { 'trust_root' : trust_root , 'trust_handler_url' : getViewURL ( request , processTrustResult ) , 'trust_root_valid' : trust_root_valid , 'pape_request' : pape_request , } ) |
def validate_options ( self , k , v ) :
"""Validate options .""" | super ( ) . validate_options ( k , v )
if k == 'errors' and v . lower ( ) not in ( 'strict' , 'replace' , 'ignore' , 'backslashreplace' ) :
raise ValueError ( "{}: '{}' is not a valid value for '{}'" . format ( self . __class__ . __name , v , k ) )
if k == 'normalize' and v . upper ( ) not in ( 'NFC' , 'NFKC' , 'NFD' , 'NFKD' ) :
raise ValueError ( "{}: '{}' is not a valid value for '{}'" . format ( self . __class__ . __name , v , k ) ) |
def _CheckInstallSuccess ( self ) :
"""Checks if the installer installed correctly .""" | if not os . path . exists ( self . install_path ) :
raise RuntimeError ( "Install failed, no files at: %s" % self . install_path )
try :
output = subprocess . check_output ( [ "sc" , "query" , self . service_name ] )
service_running = "RUNNING" in output
except subprocess . CalledProcessError as e :
if e . returncode == 1060 : # 1060 means : The specified service does not exist as an installed
# service .
service_running = False
else :
raise
if self . expect_service_running :
if not service_running :
raise RuntimeError ( "GRR service not running after install, sc query output: %s" % output )
else :
if service_running :
raise RuntimeError ( "GRR service running after install with expect_service_running == " "False, sc query output: %s" % output ) |
def add_output ( self , address , value , unit = 'satoshi' ) :
"""Add an output ( a person who will receive funds via this tx ) .
If no unit is specified , satoshi is implied .""" | value_satoshi = self . from_unit_to_satoshi ( value , unit )
if self . verbose :
print ( "Adding output of: %s satoshi (%.8f)" % ( value_satoshi , ( value_satoshi / 1e8 ) ) )
self . outs . append ( { 'address' : address , 'value' : value_satoshi } ) |
def write_dict_to_file ( file_path , obj ) :
"""Write a dictionary of string keys to a file""" | lines = [ ]
for key , value in obj . items ( ) :
lines . append ( key + ':' + repr ( value ) + '\n' )
with open ( file_path , 'w+' ) as file :
file . writelines ( lines )
return None |
def by_own_time_per_call ( stat ) :
"""Sorting by exclusive elapsed time per call in descending order .""" | return ( - stat . own_time_per_call if stat . own_hits else - stat . own_time , by_deep_time_per_call ( stat ) ) |
def setDisallowed ( self , laneID , disallowedClasses ) :
"""setDisallowed ( string , list ) - > None
Sets a list of disallowed vehicle classes .""" | if isinstance ( disallowedClasses , str ) :
disallowedClasses = [ disallowedClasses ]
self . _connection . _beginMessage ( tc . CMD_SET_LANE_VARIABLE , tc . LANE_DISALLOWED , laneID , 1 + 4 + sum ( map ( len , disallowedClasses ) ) + 4 * len ( disallowedClasses ) )
self . _connection . _packStringList ( disallowedClasses )
self . _connection . _sendExact ( ) |
def decrypt ( self , k , a , iv , e , t ) :
"""Decrypt according to the selected encryption and hashing
functions .
: param k : Encryption key ( optional )
: param a : Additional Authenticated Data
: param iv : Initialization Vector
: param e : Ciphertext
: param t : Authentication Tag
Returns plaintext or raises an error""" | hkey = k [ : _inbytes ( self . keysize ) ]
dkey = k [ _inbytes ( self . keysize ) : ]
# verify mac
if not constant_time . bytes_eq ( t , self . _mac ( hkey , a , iv , e ) ) :
raise InvalidSignature ( 'Failed to verify MAC' )
# decrypt
cipher = Cipher ( algorithms . AES ( dkey ) , modes . CBC ( iv ) , backend = self . backend )
decryptor = cipher . decryptor ( )
d = decryptor . update ( e ) + decryptor . finalize ( )
unpadder = PKCS7 ( self . blocksize ) . unpadder ( )
return unpadder . update ( d ) + unpadder . finalize ( ) |
def zinnia_breadcrumbs ( context , root_name = '' , template = 'zinnia/tags/breadcrumbs.html' , ) :
"""Return a breadcrumb for the application .""" | path = context [ 'request' ] . path
context_object = get_context_first_object ( context , [ 'object' , 'category' , 'tag' , 'author' ] )
context_page = context . get ( 'page_obj' )
breadcrumbs = retrieve_breadcrumbs ( path , context_object , context_page , root_name )
return { 'template' : template , 'breadcrumbs' : breadcrumbs } |
def _filter_matrix_columns ( cls , matrix , phandango_header , csv_header ) :
'''phandango _ header , csv _ header , matrix = output from _ to _ matrix''' | indexes_to_keep = set ( )
for row in matrix :
for i in range ( len ( row ) ) :
if row [ i ] not in { 'NA' , 'no' } :
indexes_to_keep . add ( i )
indexes_to_keep = sorted ( list ( indexes_to_keep ) )
for i in range ( len ( matrix ) ) :
matrix [ i ] = [ matrix [ i ] [ j ] for j in indexes_to_keep ]
phandango_header = [ phandango_header [ i ] for i in indexes_to_keep ]
csv_header = [ csv_header [ i ] for i in indexes_to_keep ]
return phandango_header , csv_header , matrix |
def hacking_has_only_comments ( physical_line , filename , lines , line_number ) :
"""Check for empty files with only comments
H104 empty file with only comments""" | if line_number == 1 and all ( map ( EMPTY_LINE_RE . match , lines ) ) :
return ( 0 , "H104: File contains nothing but comments" ) |
def load_object ( self , obj ) :
"""Find and return the template associated to the given object .
Arguments :
obj : an instance of a user - defined class .
search _ dirs : the list of directories in which to search .""" | locator = self . _make_locator ( )
path = locator . find_object ( obj , self . search_dirs )
return self . read ( path ) |
def cap17 ( msg ) :
"""Extract capacities from BDS 1,7 message
Args :
msg ( String ) : 28 bytes hexadecimal message string
Returns :
list : list of suport BDS codes""" | allbds = [ '05' , '06' , '07' , '08' , '09' , '0A' , '20' , '21' , '40' , '41' , '42' , '43' , '44' , '45' , '48' , '50' , '51' , '52' , '53' , '54' , '55' , '56' , '5F' , '60' , 'NA' , 'NA' , 'E1' , 'E2' ]
d = hex2bin ( data ( msg ) )
idx = [ i for i , v in enumerate ( d [ : 28 ] ) if v == '1' ]
capacity = [ 'BDS' + allbds [ i ] for i in idx if allbds [ i ] is not 'NA' ]
return capacity |
def remove_state_machine ( self , state_machine_id ) :
"""Remove the state machine for a specified state machine id from the list of registered state machines .
: param state _ machine _ id : the id of the state machine to be removed""" | import rafcon . core . singleton as core_singletons
removed_state_machine = None
if state_machine_id in self . _state_machines :
logger . debug ( "Remove state machine with id {0}" . format ( state_machine_id ) )
removed_state_machine = self . _state_machines . pop ( state_machine_id )
else :
logger . error ( "There is no state_machine with state_machine_id: %s" % state_machine_id )
return removed_state_machine
# destroy execution history
removed_state_machine . destroy_execution_histories ( )
return removed_state_machine |
def execution_engine_model_changed ( self , model , prop_name , info ) :
"""Active observation of state machine and show and hide widget .""" | if not self . _view_initialized :
return
active_sm_id = rafcon . gui . singleton . state_machine_manager_model . state_machine_manager . active_state_machine_id
if active_sm_id is None : # relieve all state machines that have no active execution and hide the widget
self . disable ( )
else : # observe all state machines that have an active execution and show the widget
self . check_configuration ( ) |
def pprint ( self , initials_only = False ) :
"""Pretty print the name .
Args :
initials _ only ( bool ) : ` ` True ` ` if we want the first names to be displayed with
only the initial followed by a dot . ` ` False ` ` otherwise .
Examples :
> > > ParsedName ( ' Lieber , Stanley Martin ' ) . pprint ( )
u ' Stanley Martin Lieber '
> > > ParsedName ( ' Lieber , Stanley Martin ' ) . pprint ( initials _ only = True )
u ' S . M . Lieber '
> > > ParsedName ( ' Downey , Robert Jr . ' ) . pprint ( initials _ only = True )
u ' R . Downey Jr . '""" | last_name = self . last
suffixes = ', ' + self . suffix if self . suffix else ''
if initials_only and last_name != u'' :
first_names = self . first_initials
else :
first_names = self . first
return u'{} {}{}' . format ( first_names , last_name , suffixes ) . strip ( ) |
def main ( args ) :
"""main entry point for the FDR script .
: param args : the arguments for this script , as a list of string . Should
already have had things like the script name stripped . That
is , if there are no args provided , this should be an empty
list .""" | # get options and arguments
ui = getUI ( args )
if ui . optionIsSet ( "test" ) : # just run unit tests
unittest . main ( argv = [ sys . argv [ 0 ] ] )
elif ui . optionIsSet ( "help" ) : # just show help
ui . usage ( )
else :
verbose = ( ui . optionIsSet ( "verbose" ) is True ) or DEFAULT_VERBOSITY
# header ?
header = ui . optionIsSet ( "header" )
# get field value
field = ui . getValue ( "field" ) - 1
# get output handle
out_fh = sys . stdout
if ui . optionIsSet ( "output" ) :
out_fh = open ( ui . getValue ( "output" ) , "w" )
# get input file - handle
in_fh = sys . stdin
if ui . hasArgument ( 0 ) :
in_fh = open ( ui . getArgument ( 0 ) )
delim = DEFAULT_DELIM
# load data , do conversion , write out results .
data_table = DataTable ( )
data_table . load ( in_fh , header , delim , verbose )
data_table . frame [ field ] = correct_pvals ( data_table . frame [ field ] , verbose = verbose )
data_table . write ( out_fh , delim , verbose ) |
def create_bool ( help_string = NO_HELP , default = NO_DEFAULT ) : # type : ( str , Union [ bool , NO _ DEFAULT _ TYPE ] ) - > bool
"""Create a bool parameter
: param help _ string :
: param default :
: return :""" | # noinspection PyTypeChecker
return ParamFunctions ( help_string = help_string , default = default , type_name = "bool" , function_s2t = convert_string_to_bool , function_t2s = convert_bool_to_string , ) |
def add_member ( self , host_name , hostgroup_name ) :
"""Add a host string to a hostgroup member
if the host group do not exist , create it
: param host _ name : host name
: type host _ name : str
: param hostgroup _ name : hostgroup name
: type hostgroup _ name : str
: return : None""" | hostgroup = self . find_by_name ( hostgroup_name )
if not hostgroup :
hostgroup = Hostgroup ( { 'hostgroup_name' : hostgroup_name , 'alias' : hostgroup_name , 'members' : host_name } )
self . add ( hostgroup )
else :
hostgroup . add_members ( host_name ) |
def progress ( count , total , suffix = '' ) :
'''Display progress bar
sources : https : / / gist . github . com / vladignatyev / 06860ec2040cb497f0f3''' | bar_len = 60
filled_len = int ( round ( bar_len * count / float ( total ) ) )
percents = round ( 100.0 * count / float ( total ) , 1 )
bar = '=' * filled_len + '-' * ( bar_len - filled_len )
sys . stdout . write ( '[%s] %s%s %s\r' % ( bar , percents , '%' , suffix ) )
sys . stdout . flush ( ) |
def get ( self , end_ix ) :
"""Returns
out : A np . ndarray of the equity pricing up to end _ ix after adjustments
and rounding have been applied .""" | if self . most_recent_ix == end_ix :
return self . current
target = end_ix - self . cal_start - self . offset + 1
self . current = self . window . seek ( target )
self . most_recent_ix = end_ix
return self . current |
def locate_config ( confname , app_name , location = None , prefix = 'etc' , verbose = False ) :
"""Returns absolute path on the filesystem to a config file named * confname * .""" | candidates = [ ]
if location :
candidate = os . path . normpath ( os . path . join ( location , confname ) )
if os . path . isfile ( candidate ) :
candidates += [ candidate ]
candidate = os . path . normpath ( os . path . join ( os . path . dirname ( os . path . dirname ( sys . executable ) ) , prefix , app_name , confname ) )
if os . path . isfile ( candidate ) :
candidates += [ candidate ]
candidate = os . path . normpath ( '/%s/%s/%s' % ( prefix , app_name , confname ) )
if os . path . isfile ( candidate ) :
candidates += [ candidate ]
candidate = os . path . normpath ( os . path . join ( os . getcwd ( ) , confname ) )
if os . path . isfile ( candidate ) :
candidates += [ candidate ]
if candidates :
if verbose :
LOGGER . info ( "config loaded from '%s'" , candidates [ 0 ] )
return candidates [ 0 ]
else :
LOGGER . warning ( "config '%s' was not found." , confname )
return None |
def parsexml ( self , xmlstring , modules , source = None ) :
"""Parses the docstrings out of the specified xml file .
: arg source : the path to the file from which the XML string was extracted .""" | result = { }
from fortpy . utility import XML_fromstring
xmlroot = XML_fromstring ( xmlstring , source )
if xmlroot . tag == "fortpy" and "mode" in xmlroot . attrib and xmlroot . attrib [ "mode" ] == "docstring" : # First , cycle through the kids to find the < global > tag ( if any
# exist ) . It ' s children will apply to any of the other tags we find
# and we will have to update their attributes accordingly .
xmlglobals = { }
for child in xmlroot . iterfind ( "globals" ) :
_update_globals ( list ( child ) , xmlglobals )
_set_global_defaults ( xmlglobals )
# We fill the dictionary with decorates names as keys and lists
# of the xml docstring elements as values .
for child in xmlroot :
if child . tag == "globals" :
continue
xmltags = [ ]
if child . tag == "decorates" and "name" in child . attrib :
decorates = child . attrib [ "name" ]
xmltags . extend ( list ( child ) )
elif "decorates" in child . attrib :
decorates = child . attrib [ "decorates" ]
xmltags . append ( child )
for xtag in xmltags :
_update_from_globals ( xtag , xmlglobals , child )
if decorates in result :
result [ decorates ] . extend ( xmltags )
else :
result [ decorates ] = xmltags
# Loop through all the docstrings we found and team them up with
# their respective module members .
self . _xml_update_modules ( result , modules ) |
async def connect_service ( bus_name , object_path , interface ) :
"""Connect to the service object on DBus , return InterfaceProxy .""" | proxy = await proxy_new_for_bus ( Gio . BusType . SYSTEM , Gio . DBusProxyFlags . DO_NOT_LOAD_PROPERTIES | Gio . DBusProxyFlags . DO_NOT_CONNECT_SIGNALS , info = None , name = bus_name , object_path = object_path , interface_name = interface , )
return InterfaceProxy ( proxy ) |
def visual_callback_2d ( background , fig = None ) :
"""Returns a callback than can be passed as the argument ` iter _ callback `
of ` morphological _ geodesic _ active _ contour ` and
` morphological _ chan _ vese ` for visualizing the evolution
of the levelsets . Only works for 2D images .
Parameters
background : ( M , N ) array
Image to be plotted as the background of the visual evolution .
fig : matplotlib . figure . Figure
Figure where results will be drawn . If not given , a new figure
will be created .
Returns
callback : Python function
A function that receives a levelset and updates the current plot
accordingly . This can be passed as the ` iter _ callback ` argument of
` morphological _ geodesic _ active _ contour ` and
` morphological _ chan _ vese ` .""" | # Prepare the visual environment .
if fig is None :
fig = plt . figure ( )
fig . clf ( )
ax1 = fig . add_subplot ( 1 , 2 , 1 )
ax1 . imshow ( background , cmap = plt . cm . gray )
ax2 = fig . add_subplot ( 1 , 2 , 2 )
ax_u = ax2 . imshow ( np . zeros_like ( background ) , vmin = 0 , vmax = 1 )
plt . pause ( 0.001 )
def callback ( levelset ) :
if ax1 . collections :
del ax1 . collections [ 0 ]
ax1 . contour ( levelset , [ 0.5 ] , colors = 'r' )
ax_u . set_data ( levelset )
fig . canvas . draw ( )
plt . pause ( 0.001 )
return callback |
def get_installed_extension_by_name ( self , publisher_name , extension_name , asset_types = None ) :
"""GetInstalledExtensionByName .
[ Preview API ] Get an installed extension by its publisher and extension name .
: param str publisher _ name : Name of the publisher . Example : " fabrikam " .
: param str extension _ name : Name of the extension . Example : " ops - tools " .
: param [ str ] asset _ types :
: rtype : : class : ` < InstalledExtension > < azure . devops . v5_0 . extension _ management . models . InstalledExtension > `""" | route_values = { }
if publisher_name is not None :
route_values [ 'publisherName' ] = self . _serialize . url ( 'publisher_name' , publisher_name , 'str' )
if extension_name is not None :
route_values [ 'extensionName' ] = self . _serialize . url ( 'extension_name' , extension_name , 'str' )
query_parameters = { }
if asset_types is not None :
asset_types = ":" . join ( asset_types )
query_parameters [ 'assetTypes' ] = self . _serialize . query ( 'asset_types' , asset_types , 'str' )
response = self . _send ( http_method = 'GET' , location_id = 'fb0da285-f23e-4b56-8b53-3ef5f9f6de66' , version = '5.0-preview.1' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( 'InstalledExtension' , response ) |
def upload ( client , source_dir ) :
"""Upload inappproducts to play store .""" | print ( '' )
print ( 'upload inappproducs' )
print ( '---------------------' )
products_folder = os . path . join ( source_dir , 'products' )
product_files = filter ( os . path . isfile , list_dir_abspath ( products_folder ) )
current_product_skus = map ( lambda product : product [ 'sku' ] , client . list_inappproducts ( ) )
print ( current_product_skus )
for product_file in product_files :
with open ( product_file ) as product_file :
product = json . load ( product_file )
# check if the product is new
sku = product [ 'sku' ]
product [ 'packageName' ] = client . package_name
print ( sku )
if sku in current_product_skus :
print ( "update product {0}" . format ( sku ) )
client . update_inappproduct ( product , sku )
else :
print ( "create product {0}" . format ( sku ) )
client . insert_inappproduct ( product ) |
def set_pixels ( self , pixels ) :
"""Set the image data .
Will not work if the new image has a different shape than the current image .
Parameters
pixels : numpy . ndarray
New image data
Returns
None""" | if not ( pixels . shape == self . _pixels . shape ) :
raise AssertionError ( "Shape mismatch between pixels supplied {0} and existing image pixels {1}" . format ( pixels . shape , self . _pixels . shape ) )
self . _pixels = pixels
# reset this so that it is calculated next time the function is called
self . _rms = None
return |
def _double_fork ( self ) :
"""Do the UNIX double - fork magic .
See Stevens ' " Advanced Programming in the UNIX Environment " for details
( ISBN 0201563177)
http : / / www . erlenstar . demon . co . uk / unix / faq _ 2 . html # SEC16""" | try :
pid = os . fork ( )
if pid > 0 : # Exit first parent .
sys . exit ( 0 )
return None
except OSError as err :
LOG . exception ( "Fork #1 failed: {0} ({1})" . format ( err . errno , err . strerror , ) , )
sys . exit ( exit . DAEMONIZE_FAILED )
return None
# Decouple from parent environment .
os . chdir ( "/" )
os . setsid ( )
os . umask ( 0 )
# Do second fork .
try :
pid = os . fork ( )
if pid > 0 : # Exit from second parent .
sys . exit ( 0 )
except OSError as err :
LOG . exception ( "Fork #2 failed: {0} ({1})" . format ( err . errno , err . strerror , ) , )
sys . exit ( exit . DAEMONIZE_FAILED )
return None |
def read_file ( filename , alt = None ) :
"""Read the contents of filename or give an alternative result instead .""" | lines = None
try :
with open ( filename , encoding = 'utf-8' ) as f :
lines = f . read ( )
except IOError :
lines = [ ] if alt is None else alt
return lines |
def rating ( self , value ) :
"""Set the rating parameter and regenerate the thumbnail link .""" | self . _rating = value
self . _thumb = self . _link_to_img ( ) |
def repeat_call ( func , retries , * args , ** kwargs ) :
'''Tries a total of ' retries ' times to execute callable before failing .''' | retries = max ( 0 , int ( retries ) )
try_num = 0
while True :
if try_num == retries :
return func ( * args , ** kwargs )
else :
try :
return func ( * args , ** kwargs )
except Exception as e :
if isinstance ( e , KeyboardInterrupt ) :
raise e
try_num += 1 |
def getConId ( self , contract_identifier ) :
"""Get contracts conId""" | details = self . contractDetails ( contract_identifier )
if len ( details [ "contracts" ] ) > 1 :
return details [ "m_underConId" ]
return details [ "m_summary" ] [ "m_conId" ] |
def parse_cluster ( self , global_params , region , cluster ) :
"""Parse a single EMR cluster
: param global _ params : Parameters shared for all regions
: param region : Name of the AWS region
: param cluster : EMR cluster""" | cluster_id = cluster [ 'Id' ]
cluster = api_clients [ region ] . describe_cluster ( ClusterId = cluster_id ) [ 'Cluster' ]
cluster [ 'id' ] = cluster . pop ( 'Id' )
cluster [ 'name' ] = cluster . pop ( 'Name' )
vpc_id = 'TODO'
# The EMR API won ' t disclose the VPC ID , so wait until all configs have been fetch and look up the VPC based on the subnet ID
manage_dictionary ( self . vpcs , vpc_id , VPCConfig ( self . vpc_resource_types ) )
self . vpcs [ vpc_id ] . clusters [ cluster_id ] = cluster |
def to_json ( self ) :
""": return : str""" | json_dict = self . to_json_basic ( )
json_dict [ 'wday' ] = self . _wday
json_dict [ 'hour' ] = self . _hour
json_dict [ 'min' ] = self . _min
return json . dumps ( json_dict ) |
def valid_domain ( domain ) :
"Validate a cookie domain ASCII string" | # Using encoding on domain would confuse browsers into not sending cookies .
# Generate UnicodeDecodeError up front if it can ' t store as ASCII .
domain . encode ( 'ascii' )
# Domains starting with periods are not RFC - valid , but this is very common
# in existing cookies , so they should still parse with DOMAIN _ AV .
if Definitions . DOMAIN_RE . match ( domain ) :
return True
return False |
def MakeRanges ( codes ) :
"""Turn a list like [ 1,2,3,7,8,9 ] into a range list [ [ 1,3 ] , [ 7,9 ] ]""" | ranges = [ ]
last = - 100
for c in codes :
if c == last + 1 :
ranges [ - 1 ] [ 1 ] = c
else :
ranges . append ( [ c , c ] )
last = c
return ranges |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.