signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def insert_object ( self , db_object ) :
"""Create new entry in the database .
Parameters
db _ object : ( Sub - class of ) ObjectHandle"""
|
# Create object using the to _ dict ( ) method .
obj = self . to_dict ( db_object )
obj [ 'active' ] = True
self . collection . insert_one ( obj )
|
def normalize_os_name ( os_name ) :
""": API : public"""
|
if os_name not in OS_ALIASES :
for proper_name , aliases in OS_ALIASES . items ( ) :
if os_name in aliases :
return proper_name
logger . warning ( 'Unknown operating system name: {bad}, known names are: {known}' . format ( bad = os_name , known = ', ' . join ( sorted ( known_os_names ( ) ) ) ) )
return os_name
|
def binarize ( obj , threshold = 0 ) :
"""Return a copy of the object with binarized piano - roll ( s ) .
Parameters
threshold : int or float
Threshold to binarize the piano - roll ( s ) . Default to zero ."""
|
_check_supported ( obj )
copied = deepcopy ( obj )
copied . binarize ( threshold )
return copied
|
def read_sealevel_pressure ( self , altitude_m = 0.0 ) :
"""Calculates the pressure at sealevel when given a known altitude in
meters . Returns a value in Pascals ."""
|
pressure = float ( self . read_pressure ( ) )
p0 = pressure / pow ( 1.0 - altitude_m / 44330.0 , 5.255 )
self . _logger . debug ( 'Sealevel pressure {0} Pa' . format ( p0 ) )
return p0
|
def authors_json ( soup ) :
"""authors list in article json format"""
|
authors_json_data = [ ]
contributors_data = contributors ( soup , "full" )
author_contributions_data = author_contributions ( soup , None )
author_competing_interests_data = competing_interests ( soup , None )
author_correspondence_data = full_correspondence ( soup )
authors_non_byline_data = authors_non_byline ( soup )
equal_contributions_map = map_equal_contributions ( contributors_data )
present_address_data = present_addresses ( soup )
foot_notes_data = other_foot_notes ( soup )
# First line authors builds basic structure
for contributor in contributors_data :
author_json = None
if contributor [ "type" ] == "author" and contributor . get ( "collab" ) :
author_json = author_group ( contributor , author_contributions_data , author_correspondence_data , author_competing_interests_data , equal_contributions_map , present_address_data , foot_notes_data )
elif contributor . get ( "on-behalf-of" ) :
author_json = author_on_behalf_of ( contributor )
elif contributor [ "type" ] == "author" and not contributor . get ( "group-author-key" ) :
author_json = author_person ( contributor , author_contributions_data , author_correspondence_data , author_competing_interests_data , equal_contributions_map , present_address_data , foot_notes_data )
if author_json :
authors_json_data . append ( author_json )
# Second , add byline author data
collab_map = collab_to_group_author_key_map ( contributors_data )
for contributor in [ elem for elem in contributors_data if elem . get ( "group-author-key" ) and not elem . get ( "collab" ) ] :
for group_author in [ elem for elem in authors_json_data if elem . get ( 'type' ) == 'group' ] :
group_author_key = None
if group_author [ "name" ] in collab_map :
group_author_key = collab_map [ group_author [ "name" ] ]
if contributor . get ( "group-author-key" ) == group_author_key :
author_json = author_person ( contributor , author_contributions_data , author_correspondence_data , author_competing_interests_data , equal_contributions_map , present_address_data , foot_notes_data )
if contributor . get ( "sub-group" ) :
if "groups" not in group_author :
group_author [ "groups" ] = OrderedDict ( )
if contributor . get ( "sub-group" ) not in group_author [ "groups" ] :
group_author [ "groups" ] [ contributor . get ( "sub-group" ) ] = [ ]
group_author [ "groups" ] [ contributor . get ( "sub-group" ) ] . append ( author_json )
else :
if "people" not in group_author :
group_author [ "people" ] = [ ]
group_author [ "people" ] . append ( author_json )
authors_json_data_rewritten = elifetools . json_rewrite . rewrite_json ( "authors_json" , soup , authors_json_data )
return authors_json_data_rewritten
|
def update ( self , identifier , new_instance ) :
"""Update an encryptable field , make sure that :
* We won ' t change the encryption context key
* The new value is going to be encrypted
* The return instance . plaintext is the updated one
Note : Will expunge the returned instance"""
|
old_instance = self . retrieve ( identifier )
old_encrypted_identifier = old_instance . encrypted_identifier
if ( new_instance . encryption_context_key and old_instance . encryption_context_key != new_instance . encryption_context_key ) :
raise ValueError ( "Cannot change encryption context key" )
# If updating a non encrypted field - skip
if new_instance . plaintext is None and new_instance . encrypted_relationship is None :
result = super ( ) . update ( identifier , new_instance )
self . expunge ( result )
return result
# Verify that the new instance is encrypted if it should be
# If it ' s not - encrypt it with the old key
# If it is - save the expected new plaintext
if new_instance . plaintext is not None :
expected_new_plaintext = new_instance . plaintext
new_instance = self . reencrypt_instance ( new_instance , old_instance . encryption_context_key )
else :
decrypt , expected_new_plaintext = decrypt_instance ( new_instance )
result = super ( ) . update ( identifier , new_instance )
# Delete the old encrypted value ( instead of using sqlalchemy cascade )
if old_encrypted_identifier != new_instance . encrypted_identifier :
self . encrypted_store . delete ( old_encrypted_identifier )
# Update the return result , super ( ) . update ( ) won ' t do it .
self . expunge ( result )
result . plaintext = expected_new_plaintext
return result
|
def visit_Index ( self , node : ast . Index ) -> Any :
"""Visit the node ' s ` ` value ` ` ."""
|
result = self . visit ( node = node . value )
self . recomputed_values [ node ] = result
return result
|
def departments_name_delete ( self , name , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / chat / departments # delete - department - by - name"
|
api_path = "/api/v2/departments/name/{name}"
api_path = api_path . format ( name = name )
return self . call ( api_path , method = "DELETE" , ** kwargs )
|
def _to_array ( self , itaper , normalization = '4pi' , csphase = 1 ) :
"""Return the spherical harmonic coefficients of taper i as an
array , where i = 0 is the best concentrated ."""
|
if self . coeffs is None :
coeffs = _np . copy ( self . _taper2coeffs ( itaper ) )
else :
if itaper > self . nwinrot - 1 :
raise ValueError ( 'itaper must be less than or equal to ' + 'nwinrot - 1. itaper = {:d}, nwinrot = {:d}' . format ( itaper , self . nwinrot ) )
coeffs = _shtools . SHVectorToCilm ( self . coeffs [ : , itaper ] )
if normalization == 'schmidt' :
for l in range ( self . lwin + 1 ) :
coeffs [ : , l , : l + 1 ] *= _np . sqrt ( 2.0 * l + 1.0 )
elif normalization == 'ortho' :
coeffs *= _np . sqrt ( 4.0 * _np . pi )
if csphase == - 1 :
for m in range ( self . lwin + 1 ) :
if m % 2 == 1 :
coeffs [ : , : , m ] = - coeffs [ : , : , m ]
return coeffs
|
def _ensure_paths_and_types ( index : Dict [ str , str ] ) -> Dict [ str , Path ] :
"""Take the direct results of loading the config and make sure
the filesystem reflects them ."""
|
configs_by_name = { ce . name : ce for ce in CONFIG_ELEMENTS }
correct_types : Dict [ str , Path ] = { }
for key , item in index . items ( ) :
if key not in configs_by_name : # old config , ignore
continue
if configs_by_name [ key ] . kind == ConfigElementType . FILE :
it = Path ( item )
it . parent . mkdir ( parents = True , exist_ok = True )
correct_types [ key ] = it
elif configs_by_name [ key ] . kind == ConfigElementType . DIR :
it = Path ( item )
it . mkdir ( parents = True , exist_ok = True )
correct_types [ key ] = it
else :
raise RuntimeError ( f"unhandled kind in ConfigElements: {key}: " f"{configs_by_name[key].kind}" )
return correct_types
|
def evalfunc ( cls , func , * args , ** kwargs ) :
"""Evaluate a function with error propagation .
Inputs :
` ` func ` ` : callable
this is the function to be evaluated . Should return either a
number or a np . ndarray .
` ` * args ` ` : other positional arguments of func . Arguments which are
not instances of ` ErrorValue ` are taken as constants .
keyword arguments supported :
` ` NMC ` ` : number of Monte - Carlo steps . If not defined , defaults
to 1000
` ` exceptions _ to _ retry ` ` : list of exception types to ignore :
if one of these is raised the given MC step is repeated once
again . Notice that this might induce an infinite loop !
The exception types in this list should be subclasses of
` ` Exception ` ` .
` ` exceptions _ to _ skip ` ` : list of exception types to skip : if
one of these is raised the given MC step is skipped , never
to be repeated . The exception types in this list should be
subclasses of ` ` Exception ` ` .
Output :
` ` result ` ` : an ` ErrorValue ` with the result . The error is estimated
via a Monte - Carlo approach to Gaussian error propagation ."""
|
def do_random ( x ) :
if isinstance ( x , cls ) :
return x . random ( )
else :
return x
if 'NMC' not in kwargs :
kwargs [ 'NMC' ] = 1000
if 'exceptions_to_skip' not in kwargs :
kwargs [ 'exceptions_to_skip' ] = [ ]
if 'exceptions_to_repeat' not in kwargs :
kwargs [ 'exceptions_to_repeat' ] = [ ]
meanvalue = func ( * args )
# this way we get either a number or a np . array
stdcollector = meanvalue * 0
mciters = 0
while mciters < kwargs [ 'NMC' ] :
try : # IGNORE : W0142
stdcollector += ( func ( * [ do_random ( a ) for a in args ] ) - meanvalue ) ** 2
mciters += 1
except Exception as e : # IGNORE : W0703
if any ( isinstance ( e , etype ) for etype in kwargs [ 'exceptions_to_skip' ] ) :
kwargs [ 'NMC' ] -= 1
elif any ( isinstance ( e , etype ) for etype in kwargs [ 'exceptions_to_repeat' ] ) :
pass
else :
raise
return cls ( meanvalue , stdcollector ** 0.5 / ( kwargs [ 'NMC' ] - 1 ) )
|
def parse_params ( self , layer = None , eps = 0.3 , eps_iter = 0.05 , nb_iter = 10 , ord = np . inf , clip_min = None , clip_max = None , ** kwargs ) :
"""Take in a dictionary of parameters and applies attack - specific checks
before saving them as attributes .
Attack - specific parameters :
: param layer : ( required str ) name of the layer to target .
: param eps : ( optional float ) maximum distortion of adversarial example
compared to original input
: param eps _ iter : ( optional float ) step size for each attack iteration
: param nb _ iter : ( optional int ) Number of attack iterations .
: param ord : ( optional ) Order of the norm ( mimics Numpy ) .
Possible values : np . inf , 1 or 2.
: param clip _ min : ( optional float ) Minimum input component value
: param clip _ max : ( optional float ) Maximum input component value"""
|
# Save attack - specific parameters
self . layer = layer
self . eps = eps
self . eps_iter = eps_iter
self . nb_iter = nb_iter
self . ord = ord
self . clip_min = clip_min
self . clip_max = clip_max
# Check if order of the norm is acceptable given current implementation
if self . ord not in [ np . inf , 1 , 2 ] :
raise ValueError ( "Norm order must be either np.inf, 1, or 2." )
if len ( kwargs . keys ( ) ) > 0 :
warnings . warn ( "kwargs is unused and will be removed on or after " "2019-04-26." )
return True
|
def ValidatePassword ( self , password ) :
"""Validates if the provided password matches with the stored password .
Args :
password ( string ) : a password .
Returns :
bool : the provided password matches with the stored password ."""
|
password = to_aes_key ( password )
return hashlib . sha256 ( password ) . digest ( ) == self . LoadStoredData ( 'PasswordHash' )
|
def IOR ( type , nr , size ) :
"""An ioctl with read parameters .
size ( ctype type or instance )
Type / structure of the argument passed to ioctl ' s " arg " argument ."""
|
return IOC ( IOC_READ , type , nr , IOC_TYPECHECK ( size ) )
|
def filter_keys ( d , keys , use_wildcards = False , list_of_dicts = False , deepcopy = True ) :
"""filter dict by certain keys
Parameters
d : dict
keys : list
use _ wildcards : bool
if true , can use * ( matches everything )
and ? ( matches any single character )
list _ of _ dicts : bool
treat list of dicts as additional branches
deepcopy : bool
deepcopy values
Examples
> > > from pprint import pprint
> > > d = { 1 : { " a " : " A " } , 2 : { " b " : " B " } , 4 : { 5 : { 6 : ' a ' , 7 : ' b ' } } }
> > > pprint ( filter _ keys ( d , [ ' a ' , 6 ] ) )
{1 : { ' a ' : ' A ' } , 4 : { 5 : { 6 : ' a ' } } }
> > > d = { 1 : { " axxxx " : " A " } , 2 : { " b " : " B " } }
> > > pprint ( filter _ keys ( d , [ ' a * ' ] , use _ wildcards = True ) )
{1 : { ' axxxx ' : ' A ' } }"""
|
list_of_dicts = '__list__' if list_of_dicts else None
flatd = flatten ( d , list_of_dicts = list_of_dicts )
def is_in ( a , bs ) :
if use_wildcards :
for b in bs :
try :
if a == b :
return True
if fnmatch ( b , a ) :
return True
except Exception :
pass
return False
else :
try :
return a in bs
except Exception :
return False
flatd = { paths : v for paths , v in flatd . items ( ) if any ( [ is_in ( k , paths ) for k in keys ] ) }
return unflatten ( flatd , list_of_dicts = list_of_dicts , deepcopy = deepcopy )
|
def send_one_ping ( self , current_socket ) :
"""Send one ICMP ECHO _ REQUEST ."""
|
# Header is type ( 8 ) , code ( 8 ) , checksum ( 16 ) , id ( 16 ) , sequence ( 16)
checksum = 0
# Make a dummy header with a 0 checksum .
header = struct . pack ( "!BBHHH" , ICMP_ECHO , 0 , checksum , self . own_id , self . seq_number )
padBytes = [ ]
startVal = 0x42
for i in range ( startVal , startVal + ( self . packet_size ) ) :
padBytes += [ ( i & 0xff ) ]
# Keep chars in the 0-255 range
data = bytes ( padBytes )
# Calculate the checksum on the data and the dummy header .
checksum = calculate_checksum ( header + data )
# Checksum is in network order
# Now that we have the right checksum , we put that in . It ' s just easier
# to make up a new header than to stuff it into the dummy .
header = struct . pack ( "!BBHHH" , ICMP_ECHO , 0 , checksum , self . own_id , self . seq_number )
packet = header + data
send_time = default_timer ( )
try :
current_socket . sendto ( packet , ( self . destination , 1 ) )
# Port number is irrelevant for ICMP
except socket . error as e :
print ( "General failure (%s)" % ( e . args [ 1 ] ) )
current_socket . close ( )
return
return send_time
|
def get_area_info ( bbox , date_interval , maxcc = None ) :
"""Get information about all images from specified area and time range
: param bbox : bounding box of requested area
: type bbox : geometry . BBox
: param date _ interval : a pair of time strings in ISO8601 format
: type date _ interval : tuple ( str )
: param maxcc : filter images by maximum percentage of cloud coverage
: type maxcc : float in range [ 0 , 1 ] or None
: return : list of dictionaries containing info provided by Opensearch REST service
: rtype : list ( dict )"""
|
result_list = search_iter ( bbox = bbox , start_date = date_interval [ 0 ] , end_date = date_interval [ 1 ] )
if maxcc :
return reduce_by_maxcc ( result_list , maxcc )
return result_list
|
def delete ( self , path , auth = None , ** kwargs ) :
"""Manually make a DELETE request .
: param str path : relative url of the request ( e . g . ` / users / username ` )
: param auth . Authentication auth : authentication object
: param kwargs dict : Extra arguments for the request , as supported by the
` requests < http : / / docs . python - requests . org / > ` _ library .
: raises NetworkFailure : if there is an error communicating with the server
: raises ApiFailure : if the request cannot be serviced"""
|
return self . _check_ok ( self . _delete ( path , auth = auth , ** kwargs ) )
|
def adjacency_projections ( mesh ) :
"""Test if a mesh is convex by projecting the vertices of
a triangle onto the normal of its adjacent face .
Parameters
mesh : Trimesh
Input geometry
Returns
projection : ( len ( mesh . face _ adjacency ) , ) float
Distance of projection of adjacent vertex onto plane"""
|
# normals and origins from the first column of face adjacency
normals = mesh . face_normals [ mesh . face_adjacency [ : , 0 ] ]
# one of the vertices on the shared edge
origins = mesh . vertices [ mesh . face_adjacency_edges [ : , 0 ] ]
# faces from the second column of face adjacency
vid_other = mesh . face_adjacency_unshared [ : , 1 ]
vector_other = mesh . vertices [ vid_other ] - origins
# get the projection with a dot product
dots = util . diagonal_dot ( vector_other , normals )
return dots
|
def invoke ( tok : str , props : Inputs , opts : InvokeOptions = None ) -> Awaitable [ Any ] :
"""invoke dynamically invokes the function , tok , which is offered by a provider plugin . The inputs
can be a bag of computed values ( Ts or Awaitable [ T ] s ) , and the result is a Awaitable [ Any ] that
resolves when the invoke finishes ."""
|
log . debug ( f"Invoking function: tok={tok}" )
if opts is None :
opts = InvokeOptions ( )
async def do_invoke ( ) : # If a parent was provided , but no provider was provided , use the parent ' s provider if one was specified .
if opts . parent is not None and opts . provider is None :
opts . provider = opts . parent . get_provider ( tok )
# Construct a provider reference from the given provider , if one was provided to us .
provider_ref = None
if opts . provider is not None :
provider_urn = await opts . provider . urn . future ( )
provider_id = ( await opts . provider . id . future ( ) ) or rpc . UNKNOWN
provider_ref = f"{provider_urn}::{provider_id}"
log . debug ( f"Invoke using provider {provider_ref}" )
monitor = get_monitor ( )
inputs = await rpc . serialize_properties ( props , { } )
version = opts . version or ""
log . debug ( f"Invoking function prepared: tok={tok}" )
req = provider_pb2 . InvokeRequest ( tok = tok , args = inputs , provider = provider_ref , version = version )
def do_invoke ( ) :
try :
return monitor . Invoke ( req )
except grpc . RpcError as exn : # gRPC - python gets creative with their exceptions . grpc . RpcError as a type is useless ;
# the usefullness come from the fact that it is polymorphically also a grpc . Call and thus has
# the . code ( ) member . Pylint doesn ' t know this because it ' s not known statically .
# Neither pylint nor I are the only ones who find this confusing :
# https : / / github . com / grpc / grpc / issues / 10885 # issuecomment - 302581315
# pylint : disable = no - member
if exn . code ( ) == grpc . StatusCode . UNAVAILABLE :
sys . exit ( 0 )
details = exn . details ( )
raise Exception ( details )
resp = await asyncio . get_event_loop ( ) . run_in_executor ( None , do_invoke )
log . debug ( f"Invoking function completed successfully: tok={tok}" )
# If the invoke failed , raise an error .
if resp . failures :
raise Exception ( f"invoke of {tok} failed: {resp.failures[0].reason} ({resp.failures[0].property})" )
# Otherwise , return the output properties .
ret_obj = getattr ( resp , 'return' )
if ret_obj :
return rpc . deserialize_properties ( ret_obj )
return { }
return asyncio . ensure_future ( RPC_MANAGER . do_rpc ( "invoke" , do_invoke ) ( ) )
|
def directory_create_temp ( self , template_name , mode , path , secure ) :
"""Creates a temporary directory in the guest .
in template _ name of type str
Template for the name of the directory to create . This must
contain at least one ' X ' character . The first group of consecutive
' X ' characters in the template will be replaced by a random
alphanumeric string to produce a unique name .
in mode of type int
The UNIX - style access mode mask to create the directory with .
Whether / how all three access groups and associated access rights are
realized is guest OS dependent . The API does the best it can on each
OS .
This parameter is ignore if the @ a secure parameter is set to @ c true .
It is strongly recommended to use 0700.
in path of type str
The path to the directory in which the temporary directory should
be created . Guest path style .
in secure of type bool
Whether to fail if the directory can not be securely created .
Currently this means that another unprivileged user cannot
manipulate the path specified or remove the temporary directory
after it has been created . Also causes the mode specified to be
ignored . May not be supported on all guest types .
return directory of type str
On success this will contain the full path to the created
directory . Guest path style .
raises : class : ` VBoxErrorNotSupported `
The operation is not possible as requested on this particular
guest type .
raises : class : ` OleErrorInvalidarg `
Invalid argument . This includes an incorrectly formatted template ,
or a non - absolute path .
raises : class : ` VBoxErrorIprtError `
The temporary directory could not be created . Possible reasons
include a non - existing path or an insecure path when the secure
option was requested ."""
|
if not isinstance ( template_name , basestring ) :
raise TypeError ( "template_name can only be an instance of type basestring" )
if not isinstance ( mode , baseinteger ) :
raise TypeError ( "mode can only be an instance of type baseinteger" )
if not isinstance ( path , basestring ) :
raise TypeError ( "path can only be an instance of type basestring" )
if not isinstance ( secure , bool ) :
raise TypeError ( "secure can only be an instance of type bool" )
directory = self . _call ( "directoryCreateTemp" , in_p = [ template_name , mode , path , secure ] )
return directory
|
def enable_vxlan_feature ( self , nexus_host , nve_int_num , src_intf ) :
"""Enable VXLAN on the switch ."""
|
# Configure the " feature " commands and NVE interface
# ( without " member " subcommand configuration ) .
# The Nexus 9K will not allow the " interface nve " configuration
# until the " feature nv overlay " command is issued and installed .
# To get around the N9K failing on the " interface nve " command
# send the two XML snippets down separately .
starttime = time . time ( )
# Do CLI ' feature nv overlay '
self . send_edit_string ( nexus_host , snipp . PATH_VXLAN_STATE , ( snipp . BODY_VXLAN_STATE % "enabled" ) )
# Do CLI ' feature vn - segment - vlan - based '
self . send_edit_string ( nexus_host , snipp . PATH_VNSEG_STATE , ( snipp . BODY_VNSEG_STATE % "enabled" ) )
# Do CLI ' int nve1 ' to Create nve1
self . send_edit_string ( nexus_host , ( snipp . PATH_NVE_CREATE % nve_int_num ) , ( snipp . BODY_NVE_CREATE % nve_int_num ) )
# Do CLI ' no shut
# source - interface loopback % s '
# beneath int nve1
self . send_edit_string ( nexus_host , ( snipp . PATH_NVE_CREATE % nve_int_num ) , ( snipp . BODY_NVE_ADD_LOOPBACK % ( "enabled" , src_intf ) ) )
self . capture_and_print_timeshot ( starttime , "enable_vxlan" , switch = nexus_host )
|
def _parse_relationships ( self , relationships ) :
"""Ensure compliance with the spec ' s relationships section
Specifically , the relationships object of the single resource
object . For modifications we only support relationships via
the ` data ` key referred to as Resource Linkage .
: param relationships :
dict JSON API relationships object"""
|
link = 'jsonapi.org/format/#document-resource-object-relationships'
if not isinstance ( relationships , dict ) :
self . fail ( 'The JSON API resource object relationships key MUST ' 'be a hash & comply with the spec\'s resource linkage ' 'section.' , link )
for key , val in relationships . items ( ) :
if not isinstance ( val , dict ) or 'data' not in val :
self . fail ( 'Relationship key %s MUST be a hash & contain ' 'a `data` field compliant with the spec\'s ' 'resource linkage section.' % key , link )
elif isinstance ( val [ 'data' ] , dict ) :
data = val [ 'data' ]
rid = isinstance ( data . get ( 'id' ) , unicode )
rtype = isinstance ( data . get ( 'type' ) , unicode )
if not rid or not rtype :
self . fail ( '%s relationship\'s resource linkage MUST ' 'contain `id` & `type` fields. Additionally, ' 'they must both be strings.' % key , link )
elif isinstance ( val [ 'data' ] , list ) :
abort ( exceptions . ModificationDenied ( ** { 'detail' : 'Modifying the %s relationship or any to-many ' 'relationships for that matter are is not ' 'currently supported. Instead, modify the ' 'to-one side directly.' % key , 'links' : link , } ) )
elif val [ 'data' ] :
self . fail ( 'The relationship key %s is malformed & impossible ' 'for us to understand your intentions. It MUST be ' 'a hash & contain a `data` field compliant with ' 'the spec\'s resource linkage section or null if ' 'you want to unset the relationship.' % key , link )
|
def get ( self , measurement_class ) :
"""Return the latest measurement for the given class or None if nothing
has been received from the vehicle ."""
|
name = Measurement . name_from_class ( measurement_class )
return self . _construct_measurement ( name )
|
def td_taper ( out , start , end , beta = 8 , side = 'left' ) :
"""Applies a taper to the given TimeSeries .
A half - kaiser window is used for the roll - off .
Parameters
out : TimeSeries
The ` ` TimeSeries ` ` to taper .
start : float
The time ( in s ) to start the taper window .
end : float
The time ( in s ) to end the taper window .
beta : int , optional
The beta parameter to use for the Kaiser window . See
` ` scipy . signal . kaiser ` ` for details . Default is 8.
side : { ' left ' , ' right ' }
The side to apply the taper to . If ` ` ' left ' ` ` ( ` ` ' right ' ` ` ) , the taper
will roll up ( down ) between ` ` start ` ` and ` ` end ` ` , with all values
before ` ` start ` ` ( after ` ` end ` ` ) set to zero . Default is ` ` ' left ' ` ` .
Returns
TimeSeries
The tapered time series ."""
|
out = out . copy ( )
width = end - start
winlen = 2 * int ( width / out . delta_t )
window = Array ( signal . get_window ( ( 'kaiser' , beta ) , winlen ) )
xmin = int ( ( start - out . start_time ) / out . delta_t )
xmax = xmin + winlen // 2
if side == 'left' :
out [ xmin : xmax ] *= window [ : winlen // 2 ]
if xmin > 0 :
out [ : xmin ] . clear ( )
elif side == 'right' :
out [ xmin : xmax ] *= window [ winlen // 2 : ]
if xmax < len ( out ) :
out [ xmax : ] . clear ( )
else :
raise ValueError ( "unrecognized side argument {}" . format ( side ) )
return out
|
def equally_accessible_windows ( is_accessible , size , start = 0 , stop = None , step = None ) :
"""Create windows each containing the same number of accessible bases .
Parameters
is _ accessible : array _ like , bool , shape ( n _ bases , )
Array defining accessible status of all bases on a contig / chromosome .
size : int
Window size ( number of accessible bases ) .
start : int , optional
The genome position at which to start .
stop : int , optional
The genome position at which to stop .
step : int , optional
The number of accessible sites between start positions
of windows . If not given , defaults to the window size , i . e . ,
non - overlapping windows . Use half the window size to get
half - overlapping windows .
Returns
windows : ndarray , int , shape ( n _ windows , 2)
Window start / stop positions ( 1 - based ) ."""
|
pos_accessible , = np . nonzero ( is_accessible )
pos_accessible += 1
# convert to 1 - based coordinates
# N . B . , need some care in handling start and stop positions , these are
# genomic positions at which to start and stop the windows
if start :
pos_accessible = pos_accessible [ pos_accessible >= start ]
if stop :
pos_accessible = pos_accessible [ pos_accessible <= stop ]
# now construct moving windows
windows = moving_statistic ( pos_accessible , lambda v : [ v [ 0 ] , v [ - 1 ] ] , size = size , step = step )
return windows
|
def blueprint ( self ) -> Optional [ str ] :
"""Returns the blueprint the matched endpoint belongs to .
This can be None if the request has not been matched or the
endpoint is not in a blueprint ."""
|
if self . endpoint is not None and '.' in self . endpoint :
return self . endpoint . rsplit ( '.' , 1 ) [ 0 ]
else :
return None
|
def iter_features ( self , stanza = None ) :
"""Return an iterator which yields the features of the node .
: param stanza : The IQ request stanza
: type stanza : : class : ` ~ aioxmpp . IQ `
: rtype : iterable of : class : ` str `
: return : : xep : ` 30 ` features of this node
` stanza ` is the : class : ` aioxmpp . IQ ` stanza of the request . This can be
used to filter the list according to who is asking ( not recommended ) .
` stanza ` may be : data : ` None ` if the features are queried without
a specific request context . In that case , implementors should assume
that the result is visible to everybody .
. . note : :
Subclasses must allow : data : ` None ` for ` stanza ` and default it to
: data : ` None ` .
The features are returned as strings . The features demanded by
: xep : ` 30 ` are always returned ."""
|
return itertools . chain ( iter ( self . STATIC_FEATURES ) , iter ( self . _features ) )
|
def _ConvertMessageDescriptor ( self , desc_proto , package = None , file_desc = None , scope = None , syntax = None ) :
"""Adds the proto to the pool in the specified package .
Args :
desc _ proto : The descriptor _ pb2 . DescriptorProto protobuf message .
package : The package the proto should be located in .
file _ desc : The file containing this message .
scope : Dict mapping short and full symbols to message and enum types .
syntax : string indicating syntax of the file ( " proto2 " or " proto3 " )
Returns :
The added descriptor ."""
|
if package :
desc_name = '.' . join ( ( package , desc_proto . name ) )
else :
desc_name = desc_proto . name
if file_desc is None :
file_name = None
else :
file_name = file_desc . name
if scope is None :
scope = { }
nested = [ self . _ConvertMessageDescriptor ( nested , desc_name , file_desc , scope , syntax ) for nested in desc_proto . nested_type ]
enums = [ self . _ConvertEnumDescriptor ( enum , desc_name , file_desc , None , scope ) for enum in desc_proto . enum_type ]
fields = [ self . _MakeFieldDescriptor ( field , desc_name , index ) for index , field in enumerate ( desc_proto . field ) ]
extensions = [ self . _MakeFieldDescriptor ( extension , desc_name , index , is_extension = True ) for index , extension in enumerate ( desc_proto . extension ) ]
oneofs = [ descriptor . OneofDescriptor ( desc . name , '.' . join ( ( desc_name , desc . name ) ) , index , None , [ ] , desc . options ) for index , desc in enumerate ( desc_proto . oneof_decl ) ]
extension_ranges = [ ( r . start , r . end ) for r in desc_proto . extension_range ]
if extension_ranges :
is_extendable = True
else :
is_extendable = False
desc = descriptor . Descriptor ( name = desc_proto . name , full_name = desc_name , filename = file_name , containing_type = None , fields = fields , oneofs = oneofs , nested_types = nested , enum_types = enums , extensions = extensions , options = _OptionsOrNone ( desc_proto ) , is_extendable = is_extendable , extension_ranges = extension_ranges , file = file_desc , serialized_start = None , serialized_end = None , syntax = syntax )
for nested in desc . nested_types :
nested . containing_type = desc
for enum in desc . enum_types :
enum . containing_type = desc
for field_index , field_desc in enumerate ( desc_proto . field ) :
if field_desc . HasField ( 'oneof_index' ) :
oneof_index = field_desc . oneof_index
oneofs [ oneof_index ] . fields . append ( fields [ field_index ] )
fields [ field_index ] . containing_oneof = oneofs [ oneof_index ]
scope [ _PrefixWithDot ( desc_name ) ] = desc
self . _descriptors [ desc_name ] = desc
return desc
|
def get_from_env ( ) :
"""Get a Resource from environment variables .
: rtype : : class : ` Resource `
: return : A resource with type and labels from the environment ."""
|
type_env = os . getenv ( OC_RESOURCE_TYPE )
if type_env is None :
return None
type_env = type_env . strip ( )
labels_env = os . getenv ( OC_RESOURCE_LABELS )
if labels_env is None :
return Resource ( type_env )
labels = parse_labels ( labels_env )
return Resource ( type_env , labels )
|
def tmpl_asciify ( text ) :
"""* synopsis : ` ` % asciify { text } ` `
* description : Translate non - ASCII characters to their ASCII equivalents . For example , “ café ” becomes “ cafe ” . Uses the mapping provided by the unidecode module ."""
|
ger_umlaute = { 'ae' : u'ä' , 'oe' : u'ö' , 'ue' : u'ü' , 'Ae' : u'Ä' , 'Oe' : u'Ö' , 'Ue' : u'Ü' }
for replace , search in ger_umlaute . items ( ) :
text = text . replace ( search , replace )
return str ( unidecode ( text ) . replace ( '[?]' , '' ) )
|
def convert_code ( in_file , out_file , in_alg = 'taudem' , out_alg = 'arcgis' , datatype = None ) :
"""convert D8 flow direction code from one algorithm to another .
Args :
in _ file : input raster file path
out _ file : output raster file path
in _ alg : available algorithms are in FlowModelConst . d8 _ dirs . " taudem " is the default
out _ alg : same as in _ alg . " arcgis " is the default
datatype : default is None and use the datatype of the in _ file"""
|
FileClass . check_file_exists ( in_file )
in_alg = in_alg . lower ( )
out_alg = out_alg . lower ( )
if in_alg not in FlowModelConst . d8_dirs or out_alg not in FlowModelConst . d8_dirs :
raise RuntimeError ( 'The input algorithm name should one of %s' % ', ' . join ( list ( FlowModelConst . d8_dirs . keys ( ) ) ) )
convert_dict = dict ( )
in_code = FlowModelConst . d8_dirs . get ( in_alg )
out_code = FlowModelConst . d8_dirs . get ( out_alg )
assert len ( in_code ) == len ( out_code )
for i , tmp_in_code in enumerate ( in_code ) :
convert_dict [ tmp_in_code ] = out_code [ i ]
if datatype is not None and datatype in GDALDataType :
RasterUtilClass . raster_reclassify ( in_file , convert_dict , out_file , datatype )
else :
RasterUtilClass . raster_reclassify ( in_file , convert_dict , out_file )
|
def execute_query ( cmd , client , application , analytics_query , start_time = None , end_time = None , offset = '1h' , resource_group_name = None ) :
"""Executes a query against the provided Application Insights application ."""
|
from . vendored_sdks . applicationinsights . models import QueryBody
targets = get_query_targets ( cmd . cli_ctx , application , resource_group_name )
return client . query . execute ( targets [ 0 ] , QueryBody ( query = analytics_query , timespan = get_timespan ( cmd . cli_ctx , start_time , end_time , offset ) , applications = targets [ 1 : ] ) )
|
def I_R_simga2 ( self , R , kwargs_mass , kwargs_light , kwargs_anisotropy ) :
"""equation A15 in Mamon & Lokas 2005 as a logarithmic numerical integral ( if option is chosen )
modulo pre - factor 2 * G
: param R : 2d projected radius ( in angular units )
: param kwargs _ mass : mass model parameters ( following lenstronomy lens model conventions )
: param kwargs _ light : deflector light parameters ( following lenstronomy light model conventions )
: param kwargs _ anisotropy : anisotropy parameters , may vary according to anisotropy type chosen .
We refer to the Anisotropy ( ) class for details on the parameters .
: return : integral of A15 in Mamon & Lokas 2005"""
|
R = max ( R , self . _min_integrate )
if self . _log_int is True :
min_log = np . log10 ( R + 0.001 )
max_log = np . log10 ( self . _max_integrate )
r_array = np . logspace ( min_log , max_log , self . _interp_grid_num )
dlog_r = ( np . log10 ( r_array [ 2 ] ) - np . log10 ( r_array [ 1 ] ) ) * np . log ( 10 )
IR_sigma2_dr = self . _integrand_A15 ( r_array , R , kwargs_mass , kwargs_light , kwargs_anisotropy ) * dlog_r * r_array
else :
r_array = np . linspace ( R + 0.001 , self . _max_integrate , self . _interp_grid_num )
dr = r_array [ 2 ] - r_array [ 1 ]
IR_sigma2_dr = self . _integrand_A15 ( r_array , R , kwargs_mass , kwargs_light , kwargs_anisotropy ) * dr
IR_sigma2 = np . sum ( IR_sigma2_dr )
return IR_sigma2
|
def _restore_expansion_state ( self ) :
"""Iter recursively all tree items and restore expansion state"""
|
def restore_tree_expansion ( child_tree_iter , expansion_state ) :
tree_item_path = self . history_tree_store . get_path ( child_tree_iter )
history_item = self . get_history_item_for_tree_iter ( child_tree_iter )
# restore expansion state if tree item path is valid and expansion state was not stored already
if tree_item_path and history_item in expansion_state :
if expansion_state [ history_item ] :
self . history_tree . expand_to_path ( tree_item_path )
for n in range ( self . history_tree_store . iter_n_children ( child_tree_iter ) ) :
child_iter = self . history_tree_store . iter_nth_child ( child_tree_iter , n )
restore_tree_expansion ( child_iter , expansion_state )
root_iter = self . history_tree_store . get_iter_first ( )
if not root_iter :
return
state_machine = self . get_history_item_for_tree_iter ( root_iter ) . state_reference . get_state_machine ( )
if state_machine . state_machine_id not in self . _expansion_state :
return
while root_iter :
restore_tree_expansion ( root_iter , self . _expansion_state [ state_machine . state_machine_id ] )
root_iter = self . history_tree_store . iter_next ( root_iter )
|
def check ( source ) :
"""Check code ."""
|
compile ( source , '<string>' , 'exec' , dont_inherit = True )
reporter = pyflakes . reporter . Reporter ( sys . stderr , sys . stderr )
pyflakes . api . check ( source , filename = '<string>' , reporter = reporter )
|
def compile_rcc ( self , namespace , unknown ) :
"""Compile qt resource files
: param namespace : namespace containing arguments from the launch parser
: type namespace : Namespace
: param unknown : list of unknown arguments
: type unknown : list
: returns : None
: rtype : None
: raises : None"""
|
rccfile = namespace . rccfile . name
qtcompile . compile_rcc ( rccfile )
|
def read ( self , name ) :
"""Read migration from file ."""
|
call_params = dict ( )
if os . name == 'nt' and sys . version_info >= ( 3 , 0 ) : # if system is windows - force utf - 8 encoding
call_params [ 'encoding' ] = 'utf-8'
with open ( os . path . join ( self . migrate_dir , name + '.py' ) , ** call_params ) as f :
code = f . read ( )
scope = { }
exec_in ( code , scope )
return scope . get ( 'migrate' , VOID ) , scope . get ( 'rollback' , VOID )
|
def __LoginBySSPI ( host , port , service , adapter , version , path , keyFile , certFile , thumbprint , sslContext , b64token , connectionPoolTimeout = CONNECTION_POOL_IDLE_TIMEOUT_SEC ) :
"""Private method that performs the actual Connect and returns a
connected service instance object .
@ param host : Which host to connect to .
@ type host : string
@ param port : Port
@ type port : int
@ param service : Service
@ type service : string
@ param adapter : Adapter
@ type adapter : string
@ param version : Version
@ type version : string
@ param path : Path
@ type path : string
@ param keyFile : ssl key file path
@ type keyFile : string
@ param certFile : ssl cert file path
@ type certFile : string
@ param thumbprint : host cert thumbprint
@ type thumbprint : string
@ param sslContext : SSL Context describing the various SSL options . It is only
supported in Python 2.7.9 or higher .
@ type sslContext : SSL . Context
@ param b64token : base64 encoded token
@ type b64token : string
@ param connectionPoolTimeout : Timeout in secs for idle connections to close , specify negative numbers for never
closing the connections
@ type connectionPoolTimeout : int"""
|
content , si , stub = __RetrieveContent ( host , port , adapter , version , path , keyFile , certFile , thumbprint , sslContext , connectionPoolTimeout )
if b64token is None :
raise Exception ( 'Token is not defined for sspi login' )
# Login
try :
x = content . sessionManager . LoginBySSPI ( b64token )
except vim . fault . InvalidLogin :
raise
except Exception as e :
raise
return si , stub
|
def getcloud ( site , feed_id = None ) :
"""Returns the tag cloud for a site or a site ' s subscriber ."""
|
cloudict = fjcache . cache_get ( site . id , 'tagclouds' )
if not cloudict :
cloudict = cloudata ( site )
fjcache . cache_set ( site , 'tagclouds' , cloudict )
# A subscriber ' s tag cloud has been requested .
if feed_id :
feed_id = int ( feed_id )
if feed_id in cloudict :
return cloudict [ feed_id ]
return [ ]
# The site tagcloud has been requested .
return cloudict [ 0 ]
|
def optional ( p , default_value = None ) :
'''` Make a parser as optional . If success , return the result , otherwise return
default _ value silently , without raising any exception . If default _ value is not
provided None is returned instead .'''
|
@ Parser
def optional_parser ( text , index ) :
res = p ( text , index )
if res . status :
return Value . success ( res . index , res . value )
else : # Return the maybe existing default value without doing anything .
return Value . success ( res . index , default_value )
return optional_parser
|
def process_keys ( self ) :
"""Process all the keys in the ` input _ queue ` .
( To be called after ` feed ` . )
Note : because of the ` feed ` / ` process _ keys ` separation , it is
possible to call ` feed ` from inside a key binding .
This function keeps looping until the queue is empty ."""
|
while self . input_queue :
key_press = self . input_queue . popleft ( )
if key_press . key != Keys . CPRResponse :
self . beforeKeyPress . fire ( )
self . _process_coroutine . send ( key_press )
if key_press . key != Keys . CPRResponse :
self . afterKeyPress . fire ( )
# Invalidate user interface .
cli = self . _cli_ref ( )
if cli :
cli . invalidate ( )
|
def pack ( self ) :
'''Pack this exception into a serializable dictionary that is safe for
transport via msgpack'''
|
if six . PY3 :
return { 'message' : six . text_type ( self ) , 'args' : self . args }
return dict ( message = self . __unicode__ ( ) , args = self . args )
|
def _get_shells ( ) :
'''Return the valid shells on this system'''
|
start = time . time ( )
if 'sh.last_shells' in __context__ :
if start - __context__ [ 'sh.last_shells' ] > 5 :
__context__ [ 'sh.last_shells' ] = start
else :
__context__ [ 'sh.shells' ] = __salt__ [ 'cmd.shells' ] ( )
else :
__context__ [ 'sh.last_shells' ] = start
__context__ [ 'sh.shells' ] = __salt__ [ 'cmd.shells' ] ( )
return __context__ [ 'sh.shells' ]
|
def iter_encode ( self , obj ) :
'''The iterative version of ` arff . ArffEncoder . encode ` .
This encodes iteratively a given object and return , one - by - one , the
lines of the ARFF file .
: param obj : the object containing the ARFF information .
: return : ( yields ) the ARFF file as unicode strings .'''
|
# DESCRIPTION
if obj . get ( 'description' , None ) :
for row in obj [ 'description' ] . split ( '\n' ) :
yield self . _encode_comment ( row )
# RELATION
if not obj . get ( 'relation' ) :
raise BadObject ( 'Relation name not found or with invalid value.' )
yield self . _encode_relation ( obj [ 'relation' ] )
yield u''
# ATTRIBUTES
if not obj . get ( 'attributes' ) :
raise BadObject ( 'Attributes not found.' )
attribute_names = set ( )
for attr in obj [ 'attributes' ] : # Verify for bad object format
if not isinstance ( attr , ( tuple , list ) ) or len ( attr ) != 2 or not isinstance ( attr [ 0 ] , basestring ) :
raise BadObject ( 'Invalid attribute declaration "%s"' % str ( attr ) )
if isinstance ( attr [ 1 ] , basestring ) : # Verify for invalid types
if attr [ 1 ] not in _SIMPLE_TYPES :
raise BadObject ( 'Invalid attribute type "%s"' % str ( attr ) )
# Verify for bad object format
elif not isinstance ( attr [ 1 ] , ( tuple , list ) ) :
raise BadObject ( 'Invalid attribute type "%s"' % str ( attr ) )
# Verify attribute name is not used twice
if attr [ 0 ] in attribute_names :
raise BadObject ( 'Trying to use attribute name "%s" for the ' 'second time.' % str ( attr [ 0 ] ) )
else :
attribute_names . add ( attr [ 0 ] )
yield self . _encode_attribute ( attr [ 0 ] , attr [ 1 ] )
yield u''
attributes = obj [ 'attributes' ]
# DATA
yield _TK_DATA
if 'data' in obj :
data = _get_data_object_for_encoding ( obj . get ( 'data' ) )
for line in data . encode_data ( obj . get ( 'data' ) , attributes ) :
yield line
yield u''
|
def get_pltpat ( self , plt_ext = "svg" ) :
"""Return png pattern : { BASE } . png { BASE } _ pruned . png { BASE } _ upper _ pruned . png"""
|
if self . ntplt . desc == "" :
return "." . join ( [ "{BASE}" , plt_ext ] )
return "" . join ( [ "{BASE}_" , self . ntplt . desc , "." , plt_ext ] )
|
def setCommInfo ( infostr ) :
"""set common information , update MagBlock . comminfo
: param infostr : should be met one of the following options :
* infostr is a dict , { k1 : v1 , k2 : v2}
* infostr is a string , with format like : " k1 = v1 , k2 = v2" """
|
if isinstance ( infostr , dict ) :
for k , v in infostr . items ( ) :
MagBlock . comminfo [ k ] = v
elif isinstance ( infostr , str ) :
for k , v in MagBlock . str2dict ( infostr ) . items ( ) :
MagBlock . comminfo [ k ] = v
else :
print ( "Information string ERROR." )
|
def extract_status_code ( error ) :
"""Extract an error code from a message ."""
|
try :
return int ( error . code )
except ( AttributeError , TypeError , ValueError ) :
try :
return int ( error . status_code )
except ( AttributeError , TypeError , ValueError ) :
try :
return int ( error . errno )
except ( AttributeError , TypeError , ValueError ) :
return 500
|
def set_code ( self , key , code ) :
"""Sets code of cell key , marks grid as changed"""
|
old_code = self . grid . code_array ( key )
try :
old_code = unicode ( old_code , encoding = "utf-8" )
except TypeError :
pass
if code == old_code :
return
if not ( old_code is None and not code ) and code != old_code : # Mark content as changed
post_command_event ( self . main_window , self . ContentChangedMsg )
# Set cell code
self . grid . code_array . __setitem__ ( key , code )
|
def break_iterable ( iterable , pred ) :
"""Break a iterable on the item that matches the predicate into lists .
The item that matched the predicate is not included in the result .
> > > list ( break _ iterable ( [ 1 , 2 , 3 , 4 ] , lambda x : x = = 3 ) )
[ [ 1 , 2 ] , [ 4 ] ]"""
|
sublist = [ ]
for i in iterable :
if pred ( i ) :
yield sublist
sublist = [ ]
else :
sublist . append ( i )
yield sublist
|
def wrap_default ( self , data , renderer_context ) :
"""Convert native data to a JSON API resource collection
This wrapper expects a standard DRF data object ( a dict - like
object with a ` fields ` dict - like attribute ) , or a list of
such data objects ."""
|
wrapper = self . dict_class ( )
view = renderer_context . get ( "view" , None )
request = renderer_context . get ( "request" , None )
model = self . model_from_obj ( view )
resource_type = self . model_to_resource_type ( model )
if isinstance ( data , list ) :
many = True
resources = data
else :
many = False
resources = [ data ]
items = [ ]
links = self . dict_class ( )
linked = self . dict_class ( )
meta = self . dict_class ( )
for resource in resources :
converted = self . convert_resource ( resource , data , request )
item = converted . get ( 'data' , { } )
linked_ids = converted . get ( 'linked_ids' , { } )
if linked_ids :
item [ "links" ] = linked_ids
items . append ( item )
links . update ( converted . get ( 'links' , { } ) )
linked = self . update_nested ( linked , converted . get ( 'linked' , { } ) )
meta . update ( converted . get ( 'meta' , { } ) )
if many :
wrapper [ resource_type ] = items
else :
wrapper [ resource_type ] = items [ 0 ]
if links :
links = self . prepend_links_with_name ( links , resource_type )
wrapper [ "links" ] = links
if linked :
wrapper [ "linked" ] = linked
if meta :
wrapper [ "meta" ] = meta
return wrapper
|
def _resolve_subkeys ( key , separator = '.' ) :
"""Given a key which may actually be a nested key , return the top level
key and any nested subkeys as separate values .
Args :
key ( str ) : A string that may or may not contain the separator .
separator ( str ) : The namespace separator . Defaults to ` . ` .
Returns :
Tuple [ str , str ] : The key and subkey ( s ) ."""
|
subkey = None
if separator in key :
index = key . index ( separator )
subkey = key [ index + 1 : ]
key = key [ : index ]
return key , subkey
|
def pop ( self , timeout = 0 ) :
'''Pop an item'''
|
if timeout > 0 :
data = self . server . brpop ( self . key , timeout )
if isinstance ( data , tuple ) :
data = data [ 1 ]
else :
data = self . server . rpop ( self . key )
if data :
return self . _decode_item ( data )
|
def get_description ( self , obj ) :
"""Set search entry description for object"""
|
search_description = self . get_model_config_value ( obj , 'search_description' )
if not search_description :
return super ( ) . get_description ( obj )
return search_description . format ( ** obj . __dict__ )
|
def connection_lost ( self , exc ) :
"""Handle lost connection ."""
|
_LOGGER . debug ( 'Connection lost with %s' , self . transport )
if self . gateway . cancel_check_conn :
self . gateway . cancel_check_conn ( )
self . gateway . cancel_check_conn = None
if exc :
_LOGGER . error ( exc )
self . conn_lost_callback ( )
self . transport = None
|
def generalize_sql ( sql ) :
"""Removes most variables from an SQL query and replaces them with X or N for numbers .
Based on Mediawiki ' s DatabaseBase : : generalizeSQL
: type sql str | None
: rtype : str"""
|
if sql is None :
return None
# multiple spaces
sql = re . sub ( r'\s{2,}' , ' ' , sql )
# MW comments
# e . g . / * CategoryDataService : : getMostVisited N . N . N . N * /
sql = remove_comments_from_sql ( sql )
# handle LIKE statements
sql = normalize_likes ( sql )
sql = re . sub ( r"\\\\" , '' , sql )
sql = re . sub ( r"\\'" , '' , sql )
sql = re . sub ( r'\\"' , '' , sql )
sql = re . sub ( r"'[^\']*'" , 'X' , sql )
sql = re . sub ( r'"[^\"]*"' , 'X' , sql )
# All newlines , tabs , etc replaced by single space
sql = re . sub ( r'\s+' , ' ' , sql )
# All numbers = > N
sql = re . sub ( r'-?[0-9]+' , 'N' , sql )
# WHERE foo IN ( ' 880987 ' , ' 882618 ' , ' 708228 ' , ' 522330 ' )
sql = re . sub ( r' (IN|VALUES)\s*\([^,]+,[^)]+\)' , ' \\1 (XYZ)' , sql , flags = re . IGNORECASE )
return sql . strip ( )
|
def quote_first_command_arg ( self , arg ) :
"""There ' s a bug in Windows when running an executable that ' s
located inside a path with a space in it . This method handles
that case , or on non - Windows systems or an executable with no
spaces , it just leaves well enough alone ."""
|
if ( sys . platform != 'win32' or ' ' not in arg ) : # Problem does not apply :
return arg
try :
import win32api
except ImportError :
raise ValueError ( "The executable %r contains a space, and in order to " "handle this issue you must have the win32api module " "installed" % arg )
arg = win32api . GetShortPathName ( arg )
return arg
|
def from_hsl ( h , s , l , alpha = 1.0 , wref = _DEFAULT_WREF ) :
"""Create a new instance based on the specifed HSL values .
Parameters :
The Hue component value [ 0 . . . 1]
The Saturation component value [ 0 . . . 1]
The Lightness component value [ 0 . . . 1]
: alpha :
The color transparency [ 0 . . . 1 ] , default is opaque
: wref :
The whitepoint reference , default is 2 ° D65.
Returns :
A grapefruit . Color instance .
> > > Color . from _ hsl ( 30 , 1 , 0.5)
Color ( 1.0 , 0.5 , 0.0 , 1.0)
> > > Color . from _ hsl ( 30 , 1 , 0.5 , 0.5)
Color ( 1.0 , 0.5 , 0.0 , 0.5)"""
|
return Color ( ( h , s , l ) , 'hsl' , alpha , wref )
|
def on_message ( self , headers , body ) :
"""See : py : meth : ` ConnectionListener . on _ message `
Special case : if the header ' filename ' is present , the content is written out
as a file"""
|
self . __sysout ( '' )
if 'filename' in headers :
content = base64 . b64decode ( body . encode ( ) )
if os . path . exists ( headers [ 'filename' ] ) :
fname = '%s.%s' % ( headers [ 'filename' ] , int ( time . time ( ) ) )
else :
fname = headers [ 'filename' ]
with open ( fname , 'wb' ) as f :
f . write ( content )
self . __print_async ( "MESSAGE" , headers , "Saved file: %s" % fname )
else :
self . __print_async ( "MESSAGE" , headers , body )
|
def items ( self , start = None , stop = None ) :
"""Return an iterator yielding pairs .
If * start * is specified , iteration starts at the first pair with a key
that is larger than or equal to * start * . If not specified , iteration
starts at the first pair in the list .
If * stop * is specified , iteration stops at the last pair that is
smaller than * stop * . If not specified , iteration end with the last pair
in the list ."""
|
if start is None :
node = self . _head [ 2 ]
else :
self . _find_lt ( start )
node = self . _path [ 0 ] [ 2 ]
while node is not self . _tail and ( stop is None or node [ 0 ] < stop ) :
yield ( node [ 0 ] , node [ 1 ] )
node = node [ 2 ]
|
def get_log_entry_ids_by_log ( self , log_id ) :
"""Gets the list of ` ` LogEntry ` ` ` ` Ids ` ` associated with a ` ` Log ` ` .
arg : log _ id ( osid . id . Id ) : ` ` Id ` ` of a ` ` Log ` `
return : ( osid . id . IdList ) - list of related logEntry ` ` Ids ` `
raise : NotFound - ` ` log _ id ` ` is not found
raise : NullArgument - ` ` log _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . ResourceBinSession . get _ resource _ ids _ by _ bin
id_list = [ ]
for log_entry in self . get_log_entries_by_log ( log_ids ) :
id_list . append ( log_entry . get_id ( ) )
return IdList ( id_list )
|
def decrypt ( self , pkt , verify = True ) :
"""Decrypt ( and decapsulate ) an IP ( v6 ) packet containing ESP or AH .
@ param pkt : the packet to decrypt
@ param verify : if False , do not perform the integrity check
@ return : the decrypted / decapsulated packet
@ raise IPSecIntegrityError : if the integrity check fails"""
|
if not isinstance ( pkt , self . SUPPORTED_PROTOS ) :
raise TypeError ( 'cannot decrypt %s, supported protos are %s' % ( pkt . __class__ , self . SUPPORTED_PROTOS ) )
if self . proto is ESP and pkt . haslayer ( ESP ) :
return self . _decrypt_esp ( pkt , verify = verify )
elif self . proto is AH and pkt . haslayer ( AH ) :
return self . _decrypt_ah ( pkt , verify = verify )
else :
raise TypeError ( '%s has no %s layer' % ( pkt , self . proto . name ) )
|
def switch_toggle ( context , ain ) :
"""Toggle an actor ' s power state"""
|
context . obj . login ( )
actor = context . obj . get_actor_by_ain ( ain )
if actor :
if actor . get_state ( ) :
actor . switch_off ( )
click . echo ( "State for {} is now OFF" . format ( ain ) )
else :
actor . switch_on ( )
click . echo ( "State for {} is now ON" . format ( ain ) )
else :
click . echo ( "Actor not found: {}" . format ( ain ) )
|
def _get_system_tz ( self ) :
'''Get the system timezone for use when no timezone is explicitly provided
Requires pytz , if not available then no timezone will be set when not
explicitly provided .'''
|
if not HAS_PYTZ :
return None
def _etc_localtime ( ) :
try :
with open ( '/etc/localtime' , 'rb' ) as fp :
return pytz . tzfile . build_tzinfo ( 'system' , fp )
except OSError as exc :
if exc . errno != errno . ENOENT :
self . logger . error ( 'Unable to read from /etc/localtime: %s' , exc . strerror )
except pytz . UnknownTimeZoneError :
self . logger . error ( '/etc/localtime contains unrecognized tzinfo' )
return None
def _etc_timezone ( ) :
try :
with open ( '/etc/timezone' , 'r' ) as fp :
tzname = fp . read ( ) . strip ( )
return pytz . timezone ( tzname )
except OSError as exc :
if exc . errno != errno . ENOENT :
self . logger . error ( 'Unable to read from /etc/localtime: %s' , exc . strerror )
except pytz . UnknownTimeZoneError :
self . logger . error ( '/etc/timezone contains unrecognized timezone \'%s\'' , tzname )
return None
return _etc_localtime ( ) or _etc_timezone ( )
|
def emit ( self , record ) :
"""Emit a record .
If the stream associated with this handler provides tty then the record
that is emitted with be formatted to include escape sequences for
appropriate styling ."""
|
try :
message = self . format ( record )
if not self . is_colorized :
self . stream . write ( message )
else :
style = self . _get_style_function_for_level ( record . levelno )
self . stream . write ( style ( message ) )
self . stream . write ( getattr ( self , 'terminator' , '\n' ) )
self . flush ( )
except ( KeyboardInterrupt , SystemExit ) :
raise
except :
self . handleError ( record )
|
def add_spectrogram ( self , view = None ) :
"""add a spectrogram layer to the environment
Kwargs :
view ( < DOM Element : view > ) : environment view used to display the spectrogram , if set to None , a new view is created
Returns :
< DOM Element : view > : the view used to store the spectrogram"""
|
spectrolayer = self . __add_spectrogram ( 0 )
spectroruler = self . __add_time_ruler ( )
if view is None :
view = self . __add_view ( )
self . __add_layer_reference ( view , spectroruler )
self . __add_layer_reference ( view , spectrolayer )
return view
|
def bulleted_list ( items , max_count = None , indent = 2 ) :
"""Format a bulleted list of values ."""
|
if max_count is not None and len ( items ) > max_count :
item_list = list ( items )
items = item_list [ : max_count - 1 ]
items . append ( '...' )
items . append ( item_list [ - 1 ] )
line_template = ( " " * indent ) + "- {}"
return "\n" . join ( map ( line_template . format , items ) )
|
def remove ( self , options = [ ] , sub_job_num = None ) :
"""Removes a job from the job queue , or from being executed .
Args :
options ( list of str , optional ) : A list of command line options for the condor _ rm command . For
details on valid options see : http : / / research . cs . wisc . edu / htcondor / manual / current / condor _ rm . html .
Defaults to an empty list .
job _ num ( int , optional ) : The number of sub _ job to remove rather than the whole cluster . Defaults to None ."""
|
args = [ 'condor_rm' ]
args . extend ( options )
job_id = '%s.%s' % ( self . cluster_id , sub_job_num ) if sub_job_num else str ( self . cluster_id )
args . append ( job_id )
out , err = self . _execute ( args )
return out , err
|
def mutate ( self , node , index ) :
"""Modify the numeric value on ` node ` ."""
|
assert index < len ( OFFSETS ) , 'received count with no associated offset'
assert isinstance ( node , parso . python . tree . Number )
val = eval ( node . value ) + OFFSETS [ index ]
# pylint : disable = W0123
return parso . python . tree . Number ( ' ' + str ( val ) , node . start_pos )
|
def _post ( self , path , ** kwargs ) :
"""return a dict ."""
|
# clean kwargs ( filter None and empty string )
clean_kwargs = clean_dict ( kwargs )
data = bytes ( json . dumps ( clean_kwargs ) , encoding = 'UTF-8' )
# change content type on post
self . _headers [ 'Content-Type' ] = 'application/json'
api = self . _api ( '%s.json' % path )
req = request . Request ( api , headers = self . _headers , data = data , method = 'POST' )
try :
resp = request . urlopen ( req , data ) . read ( )
except urllib . error . HTTPError as e :
resp = e . fp . read ( )
# reset content type
self . _headers [ 'Content-Type' ] = 'text/json'
return json . loads ( resp . decode ( ) )
|
def subcmd_bootstrap_parser ( subcmd ) :
"""bootstrap subcommand"""
|
subcmd . add_argument ( '--broker' , action = 'store' , dest = 'broker' , help = u'Route to the Ansible Service Broker' )
subcmd . add_argument ( '--secure' , action = 'store_true' , dest = 'verify' , help = u'Verify SSL connection to Ansible Service Broker' , default = False )
subcmd . add_argument ( '--ca-path' , action = 'store' , dest = 'cert' , help = u'CA cert to use for verifying SSL connection to Ansible Service Broker' , default = None )
subcmd . add_argument ( '--no-relist' , action = 'store_true' , dest = 'no_relist' , help = u'Do not relist the catalog after bootstrapping the broker' , default = False )
subcmd . add_argument ( '--username' , '-u' , action = 'store' , default = None , dest = 'basic_auth_username' , help = u'Specify the basic auth username to be used' )
subcmd . add_argument ( '--password' , '-p' , action = 'store' , default = None , dest = 'basic_auth_password' , help = u'Specify the basic auth password to be used' )
subcmd . add_argument ( '--broker-name' , action = 'store' , dest = 'broker_name' , help = u'Name of the ServiceBroker k8s resource' , default = u'ansible-service-broker' )
return
|
def remove_plugin ( self , name , force = False ) :
"""Remove an installed plugin .
Args :
name ( string ) : Name of the plugin to remove . The ` ` : latest ` `
tag is optional , and is the default if omitted .
force ( bool ) : Disable the plugin before removing . This may
result in issues if the plugin is in use by a container .
Returns :
` ` True ` ` if successful"""
|
url = self . _url ( '/plugins/{0}' , name )
res = self . _delete ( url , params = { 'force' : force } )
self . _raise_for_status ( res )
return True
|
def frange ( start , end , inc = 1.0 ) :
"""A range function , that accepts float increments and reversed direction .
See also numpy . linspace ( )"""
|
start = 1.0 * start
end = 1.0 * end
inc = 1.0 * inc
# if we got a dumb increment
if not inc :
return _n . array ( [ start , end ] )
# if the increment is going the wrong direction
if 1.0 * ( end - start ) / inc < 0.0 :
inc = - inc
# get the integer steps
ns = _n . array ( list ( range ( 0 , int ( 1.0 * ( end - start ) / inc ) + 1 ) ) )
return start + ns * inc
|
def join_event_view ( request , id ) :
"""Join event page . If a POST request , actually add or remove the attendance of the current
user . Otherwise , display a page with confirmation .
id : event id"""
|
event = get_object_or_404 ( Event , id = id )
if request . method == "POST" :
if not event . show_attending :
return redirect ( "events" )
if "attending" in request . POST :
attending = request . POST . get ( "attending" )
attending = ( attending == "true" )
if attending :
event . attending . add ( request . user )
else :
event . attending . remove ( request . user )
return redirect ( "events" )
context = { "event" : event , "is_events_admin" : request . user . has_admin_permission ( 'events' ) }
return render ( request , "events/join_event.html" , context )
|
def _proxy ( self ) :
"""Generate an instance context for the instance , the context is capable of
performing various actions . All instance actions are proxied to the context
: returns : WorkerChannelContext for this WorkerChannelInstance
: rtype : twilio . rest . taskrouter . v1 . workspace . worker . worker _ channel . WorkerChannelContext"""
|
if self . _context is None :
self . _context = WorkerChannelContext ( self . _version , workspace_sid = self . _solution [ 'workspace_sid' ] , worker_sid = self . _solution [ 'worker_sid' ] , sid = self . _solution [ 'sid' ] , )
return self . _context
|
def exitcode ( self ) :
"""Process exit code . : const : ` 0 ` when process exited successfully ,
positive number when exception was occurred , negative number when
process was signaled and : data : ` None ` when process has not exited
yet ."""
|
if self . _process is None :
raise ProcessError ( "Process '%s' has not been started yet" % self . name )
return self . _process . exitcode
|
def create ( container , portal_type , * args , ** kwargs ) :
"""Creates an object in Bika LIMS
This code uses most of the parts from the TypesTool
see : ` Products . CMFCore . TypesTool . _ constructInstance `
: param container : container
: type container : ATContentType / DexterityContentType / CatalogBrain
: param portal _ type : The portal type to create , e . g . " Client "
: type portal _ type : string
: param title : The title for the new content object
: type title : string
: returns : The new created object"""
|
from bika . lims . utils import tmpID
if kwargs . get ( "title" ) is None :
kwargs [ "title" ] = "New {}" . format ( portal_type )
# generate a temporary ID
tmp_id = tmpID ( )
# get the fti
types_tool = get_tool ( "portal_types" )
fti = types_tool . getTypeInfo ( portal_type )
if fti . product :
obj = _createObjectByType ( portal_type , container , tmp_id )
else : # newstyle factory
factory = getUtility ( IFactory , fti . factory )
obj = factory ( tmp_id , * args , ** kwargs )
if hasattr ( obj , '_setPortalTypeName' ) :
obj . _setPortalTypeName ( fti . getId ( ) )
notify ( ObjectCreatedEvent ( obj ) )
# notifies ObjectWillBeAddedEvent , ObjectAddedEvent and
# ContainerModifiedEvent
container . _setObject ( tmp_id , obj )
# we get the object here with the current object id , as it might be
# renamed already by an event handler
obj = container . _getOb ( obj . getId ( ) )
# handle AT Content
if is_at_content ( obj ) :
obj . processForm ( )
# Edit after processForm ; processForm does AT unmarkCreationFlag .
obj . edit ( ** kwargs )
# explicit notification
modified ( obj )
return obj
|
def sanitize_array ( data , index , dtype = None , copy = False , raise_cast_failure = False ) :
"""Sanitize input data to an ndarray , copy if specified , coerce to the
dtype if specified ."""
|
if dtype is not None :
dtype = pandas_dtype ( dtype )
if isinstance ( data , ma . MaskedArray ) :
mask = ma . getmaskarray ( data )
if mask . any ( ) :
data , fill_value = maybe_upcast ( data , copy = True )
data . soften_mask ( )
# set hardmask False if it was True
data [ mask ] = fill_value
else :
data = data . copy ( )
data = extract_array ( data , extract_numpy = True )
# GH # 846
if isinstance ( data , np . ndarray ) :
if dtype is not None :
subarr = np . array ( data , copy = False )
# possibility of nan - > garbage
if is_float_dtype ( data . dtype ) and is_integer_dtype ( dtype ) :
try :
subarr = _try_cast ( data , True , dtype , copy , True )
except ValueError :
if copy :
subarr = data . copy ( )
else :
subarr = _try_cast ( data , True , dtype , copy , raise_cast_failure )
elif isinstance ( data , Index ) : # don ' t coerce Index types
# e . g . indexes can have different conversions ( so don ' t fast path
# them )
# GH # 6140
subarr = sanitize_index ( data , index , copy = copy )
else : # we will try to copy be - definition here
subarr = _try_cast ( data , True , dtype , copy , raise_cast_failure )
elif isinstance ( data , ExtensionArray ) :
if isinstance ( data , ABCPandasArray ) : # We don ' t want to let people put our PandasArray wrapper
# ( the output of Series / Index . array ) , into a Series . So
# we explicitly unwrap it here .
subarr = data . to_numpy ( )
else :
subarr = data
# everything else in this block must also handle ndarray ' s ,
# becuase we ' ve unwrapped PandasArray into an ndarray .
if dtype is not None :
subarr = data . astype ( dtype )
if copy :
subarr = data . copy ( )
return subarr
elif isinstance ( data , ( list , tuple ) ) and len ( data ) > 0 :
if dtype is not None :
try :
subarr = _try_cast ( data , False , dtype , copy , raise_cast_failure )
except Exception :
if raise_cast_failure : # pragma : no cover
raise
subarr = np . array ( data , dtype = object , copy = copy )
subarr = lib . maybe_convert_objects ( subarr )
else :
subarr = maybe_convert_platform ( data )
subarr = maybe_cast_to_datetime ( subarr , dtype )
elif isinstance ( data , range ) : # GH # 16804
arr = np . arange ( data . start , data . stop , data . step , dtype = 'int64' )
subarr = _try_cast ( arr , False , dtype , copy , raise_cast_failure )
else :
subarr = _try_cast ( data , False , dtype , copy , raise_cast_failure )
# scalar like , GH
if getattr ( subarr , 'ndim' , 0 ) == 0 :
if isinstance ( data , list ) : # pragma : no cover
subarr = np . array ( data , dtype = object )
elif index is not None :
value = data
# figure out the dtype from the value ( upcast if necessary )
if dtype is None :
dtype , value = infer_dtype_from_scalar ( value )
else : # need to possibly convert the value here
value = maybe_cast_to_datetime ( value , dtype )
subarr = construct_1d_arraylike_from_scalar ( value , len ( index ) , dtype )
else :
return subarr . item ( )
# the result that we want
elif subarr . ndim == 1 :
if index is not None : # a 1 - element ndarray
if len ( subarr ) != len ( index ) and len ( subarr ) == 1 :
subarr = construct_1d_arraylike_from_scalar ( subarr [ 0 ] , len ( index ) , subarr . dtype )
elif subarr . ndim > 1 :
if isinstance ( data , np . ndarray ) :
raise Exception ( 'Data must be 1-dimensional' )
else :
subarr = com . asarray_tuplesafe ( data , dtype = dtype )
# This is to prevent mixed - type Series getting all casted to
# NumPy string type , e . g . NaN - - > ' - 1 # IND ' .
if issubclass ( subarr . dtype . type , str ) : # GH # 16605
# If not empty convert the data to dtype
# GH # 19853 : If data is a scalar , subarr has already the result
if not lib . is_scalar ( data ) :
if not np . all ( isna ( data ) ) :
data = np . array ( data , dtype = dtype , copy = False )
subarr = np . array ( data , dtype = object , copy = copy )
if is_object_dtype ( subarr . dtype ) and dtype != 'object' :
inferred = lib . infer_dtype ( subarr , skipna = False )
if inferred == 'period' :
try :
subarr = period_array ( subarr )
except IncompatibleFrequency :
pass
return subarr
|
def map ( self , f , preservesPartitioning = False ) :
"""Return a new DStream by applying a function to each element of DStream ."""
|
def func ( iterator ) :
return map ( f , iterator )
return self . mapPartitions ( func , preservesPartitioning )
|
def unpack ( cls , msg , client , server , request_id ) :
"""Parse message and return an ` OpGetMore ` .
Takes the client message as bytes , the client and server socket objects ,
and the client request id ."""
|
flags , = _UNPACK_INT ( msg [ : 4 ] )
namespace , pos = _get_c_string ( msg , 4 )
num_to_return , = _UNPACK_INT ( msg [ pos : pos + 4 ] )
pos += 4
cursor_id , = _UNPACK_LONG ( msg [ pos : pos + 8 ] )
return OpGetMore ( namespace = namespace , flags = flags , _client = client , num_to_return = num_to_return , cursor_id = cursor_id , request_id = request_id , _server = server )
|
def batch_predict ( dataset , model_dir , output_csv , output_bq_table ) :
"""Batch predict running locally ."""
|
import apache_beam as beam
from google . datalab . utils import LambdaJob
from . import _predictor
if output_csv is None and output_bq_table is None :
raise ValueError ( 'output_csv and output_bq_table cannot both be None.' )
job_id = ( 'batch-predict-image-classification-' + datetime . datetime . now ( ) . strftime ( '%y%m%d-%H%M%S' ) )
# Project is needed for bigquery data source , even in local run .
options = { 'project' : _util . default_project ( ) , }
opts = beam . pipeline . PipelineOptions ( flags = [ ] , ** options )
p = beam . Pipeline ( 'DirectRunner' , options = opts )
_predictor . configure_pipeline ( p , dataset , model_dir , output_csv , output_bq_table )
job = LambdaJob ( lambda : p . run ( ) . wait_until_finish ( ) , job_id )
return job
|
def can_fetch ( self , useragent , url ) :
"""using the parsed robots . txt decide if useragent can fetch url"""
|
if self . disallow_all :
return False
if self . allow_all :
return True
# search for given user agent matches
# the first match counts
parsed_url = urllib . parse . urlparse ( urllib . parse . unquote ( url ) )
url = urllib . parse . urlunparse ( ( '' , '' , parsed_url . path , parsed_url . params , parsed_url . query , parsed_url . fragment ) )
url = urllib . parse . quote ( url )
if not url :
url = "/"
for entry in self . entries :
if entry . applies_to ( useragent ) :
return entry . allowance ( url )
# try the default entry last
if self . default_entry :
return self . default_entry . allowance ( url )
# agent not found = = > access granted
return True
|
def parse ( self , spec = None , spec_params = None ) :
"""Parses the contents generically , or using a spec with optional params
: param spec :
A class derived from Asn1Value that defines what class _ and tag the
value should have , and the semantics of the encoded value . The
return value will be of this type . If omitted , the encoded value
will be decoded using the standard universal tag based on the
encoded tag number .
: param spec _ params :
A dict of params to pass to the spec object
: return :
An object of the type spec , or if not present , a child of Asn1Value"""
|
if self . _parsed is None or self . _parsed [ 1 : 3 ] != ( spec , spec_params ) :
try :
passed_params = spec_params or { }
_tag_type_to_explicit_implicit ( passed_params )
if self . explicit is not None :
if 'explicit' in passed_params :
passed_params [ 'explicit' ] = self . explicit + passed_params [ 'explicit' ]
else :
passed_params [ 'explicit' ] = self . explicit
contents = self . _header + self . contents + self . _trailer
parsed_value , _ = _parse_build ( contents , spec = spec , spec_params = passed_params )
self . _parsed = ( parsed_value , spec , spec_params )
# Once we ' ve parsed the Any value , clear any attributes from this object
# since they are now duplicate
self . tag = None
self . explicit = None
self . implicit = False
self . _header = b''
self . contents = contents
self . _trailer = b''
except ( ValueError , TypeError ) as e :
args = e . args [ 1 : ]
e . args = ( e . args [ 0 ] + '\n while parsing %s' % type_name ( self ) , ) + args
raise e
return self . _parsed [ 0 ]
|
def _reference_info ( references ) :
"""Get information about document references .
Helper for : meth : ` ~ . firestore _ v1beta1 . client . Client . get _ all ` .
Args :
references ( List [ . DocumentReference , . . . ] ) : Iterable of document
references .
Returns :
Tuple [ List [ str , . . . ] , Dict [ str , . DocumentReference ] ] : A two - tuple of
* fully - qualified documents paths for each reference in ` ` references ` `
* a mapping from the paths to the original reference . ( If multiple
` ` references ` ` contains multiple references to the same document ,
that key will be overwritten in the result . )"""
|
document_paths = [ ]
reference_map = { }
for reference in references :
doc_path = reference . _document_path
document_paths . append ( doc_path )
reference_map [ doc_path ] = reference
return document_paths , reference_map
|
def split_fasta ( f , id2f ) :
"""split fasta file into separate fasta files based on list of scaffolds
that belong to each separate file"""
|
opened = { }
for seq in parse_fasta ( f ) :
id = seq [ 0 ] . split ( '>' ) [ 1 ] . split ( ) [ 0 ]
if id not in id2f :
continue
fasta = id2f [ id ]
if fasta not in opened :
opened [ fasta ] = '%s.fa' % fasta
seq [ 1 ] += '\n'
with open ( opened [ fasta ] , 'a+' ) as f_out :
f_out . write ( '\n' . join ( seq ) )
|
def loginlogs_get ( self , service_staff_id , start_date , end_date , session ) :
'''taobao . wangwang . eservice . loginlogs . get 获取登录日志
通过用户id查询用户自己或者子账户的登录日志 : 主账号可以查询自己和店铺子账户的登录日志 组管理员可以查询自己和组内子账号的登录日志 非组管理员的子账户只能查询自己的登录日志'''
|
request = TOPRequest ( 'taobao.wangwang.eservice.loginlogs.get' )
request [ 'service_staff_id' ] = service_staff_id
request [ 'start_date' ] = start_date
request [ 'end_date' ] = end_date
self . create ( self . execute ( request , session ) )
return self . loginlogs
|
def get_certificate_issuer_config_by_id ( self , certificate_issuer_configuration_id , ** kwargs ) : # noqa : E501
"""Get certificate issuer configuration . # noqa : E501
Provides the configured certificate issuer . # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass asynchronous = True
> > > thread = api . get _ certificate _ issuer _ config _ by _ id ( certificate _ issuer _ configuration _ id , asynchronous = True )
> > > result = thread . get ( )
: param asynchronous bool
: param str certificate _ issuer _ configuration _ id : The ID of the certificate issuer configuration . ( required )
: return : CertificateIssuerConfigResponse
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'asynchronous' ) :
return self . get_certificate_issuer_config_by_id_with_http_info ( certificate_issuer_configuration_id , ** kwargs )
# noqa : E501
else :
( data ) = self . get_certificate_issuer_config_by_id_with_http_info ( certificate_issuer_configuration_id , ** kwargs )
# noqa : E501
return data
|
def linear_extrapolation_plot ( * args , ** kwargs ) :
"""Deprecation wrapper"""
|
warnings . warn ( "`linear_extrapolation_plot` has moved to " "`cleverhans.plot.pyplot_image`. " "cleverhans.utils.linear_extrapolation_plot may be removed on " "or after 2019-04-24." )
# pylint : disable = line - too - long
from cleverhans . plot . pyplot_image import linear_extrapolation_plot as new_linear_extrapolation_plot
return new_linear_extrapolation_plot ( * args , ** kwargs )
|
def plot ( self ) :
"""Return a matplotlib figure of the dose - response dataset .
Examples
> > > fig = dataset . plot ( )
> > > fig . show ( )
> > > fig . clear ( )
. . image : : . . / tests / resources / test _ cidataset _ plot . png
: align : center
: alt : Example generated BMD plot
Returns
out : matplotlib . figure . Figure
A matplotlib figure representation of the dataset ."""
|
fig = plotting . create_empty_figure ( )
ax = fig . gca ( )
xlabel = self . kwargs . get ( "xlabel" , "Dose" )
ylabel = self . kwargs . get ( "ylabel" , "Response" )
ax . set_xlabel ( xlabel )
ax . set_ylabel ( ylabel )
ax . scatter ( self . individual_doses , self . responses , label = "Data" , ** plotting . DATASET_INDIVIDUAL_FORMAT , )
ax . margins ( plotting . PLOT_MARGINS )
ax . set_title ( self . _get_dataset_name ( ) )
ax . legend ( ** settings . LEGEND_OPTS )
return fig
|
def load_case ( self , testcase ) :
"""Load a TestSuite containing all TestCase instances for all tests in
a TestCase subclass .
Parameters
testcase : type
A subclass of : class : ` unittest . TestCase `"""
|
tests = [ self . load_test ( testcase , name ) for name in self . find_test_method_names ( testcase ) ]
return self . create_suite ( tests )
|
def __setDeviceMode ( self , mode ) :
"""set thread device mode :
Args :
mode : thread device mode . 15 = rsdn , 13 = rsn , 4 = s
r : rx - on - when - idle
s : secure IEEE 802.15.4 data request
d : full thread device
n : full network data
Returns :
True : successful to set the device mode
False : fail to set the device mode"""
|
print 'call __setDeviceMode'
try :
cmd = WPANCTL_CMD + 'setprop Thread:DeviceMode %d' % mode
return self . __sendCommand ( cmd ) [ 0 ] != 'Fail'
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( 'setDeviceMode() Error: ' + str ( e ) )
|
def _wait_for_js ( self ) :
"""Class method added by the decorators to allow
decorated classes to manually re - check JavaScript
dependencies .
Expect that ` self ` is a class that :
1 ) Has been decorated with either ` js _ defined ` or ` requirejs `
2 ) Has a ` browser ` property
If either ( 1 ) or ( 2 ) is not satisfied , then do nothing ."""
|
# No Selenium browser available , so return without doing anything
if not hasattr ( self , 'browser' ) :
return
# pylint : disable = protected - access
# Wait for JavaScript variables to be defined
if hasattr ( self , '_js_vars' ) and self . _js_vars :
EmptyPromise ( lambda : _are_js_vars_defined ( self . browser , self . _js_vars ) , u"JavaScript variables defined: {0}" . format ( ", " . join ( self . _js_vars ) ) ) . fulfill ( )
# Wait for RequireJS dependencies to load
if hasattr ( self , '_requirejs_deps' ) and self . _requirejs_deps :
EmptyPromise ( lambda : _are_requirejs_deps_loaded ( self . browser , self . _requirejs_deps ) , u"RequireJS dependencies loaded: {0}" . format ( ", " . join ( self . _requirejs_deps ) ) , try_limit = 5 ) . fulfill ( )
|
def save ( self , target , format = None , encoding = None , ** options ) :
'''Save stream to the local filesystem .
Args :
target ( str ) : Path where to save the stream .
format ( str , optional ) : The format the stream will be saved as . If
None , detects from the ` ` target ` ` path . Defaults to None .
encoding ( str , optional ) : Saved file encoding . Defaults to
` ` config . DEFAULT _ ENCODING ` ` .
* * options : Extra options passed to the writer .'''
|
# Get encoding / format
if encoding is None :
encoding = config . DEFAULT_ENCODING
if format is None :
_ , format = helpers . detect_scheme_and_format ( target )
# Prepare writer class
writer_class = self . __custom_writers . get ( format )
if writer_class is None :
if format not in config . WRITERS :
message = 'Format "%s" is not supported' % format
raise exceptions . FormatError ( message )
writer_class = helpers . import_attribute ( config . WRITERS [ format ] )
# Prepare writer options
writer_options = helpers . extract_options ( options , writer_class . options )
if options :
message = 'Not supported options "%s" for format "%s"'
message = message % ( ', ' . join ( options ) , format )
raise exceptions . TabulatorException ( message )
# Write data to target
writer = writer_class ( ** writer_options )
writer . write ( self . iter ( ) , target , headers = self . headers , encoding = encoding )
|
def make_labels ( mapping ) :
"""Convert aesthetic mapping into text labels"""
|
labels = mapping . copy ( )
for ae in labels :
labels [ ae ] = strip_calculated_markers ( labels [ ae ] )
return labels
|
def match_resource_id ( self , resource_id , match ) :
"""Sets the resource ` ` Id ` ` for this query .
arg : resource _ id ( osid . id . Id ) : a resource ` ` Id ` `
arg : match ( boolean ) : ` ` true ` ` if a positive match , ` ` false ` `
for a negative match
raise : NullArgument - ` ` resource _ id ` ` is ` ` null ` `
* compliance : mandatory - - This method must be implemented . *"""
|
if not isinstance ( resource_id , Id ) :
raise errors . InvalidArgument ( )
self . _add_match ( 'resourceId' , str ( resource_id ) , match )
|
def get_parser ( self ) :
"""Returns : class : ` monolith . cli . Parser ` instance for this
* ExecutionManager * ."""
|
parser = self . parser_cls ( prog = self . prog_name , usage = self . get_usage ( ) , stream = self . stderr )
subparsers = parser . add_subparsers ( title = 'subcommands' , )
for name , command in self . registry . items ( ) :
cmdparser = subparsers . add_parser ( name , help = command . help )
for argument in command . get_args ( ) :
cmdparser . add_argument ( * argument . args , ** argument . kwargs )
command . setup_parser ( parser , cmdparser )
cmdparser . set_defaults ( func = command . handle )
return parser
|
def _run_main ( main , argv ) :
"""Calls main , optionally with pdb or profiler ."""
|
if FLAGS . run_with_pdb :
sys . exit ( pdb . runcall ( main , argv ) )
elif FLAGS . run_with_profiling or FLAGS . profile_file : # Avoid import overhead since most apps ( including performance - sensitive
# ones ) won ' t be run with profiling .
import atexit
if FLAGS . use_cprofile_for_profiling :
import cProfile as profile
else :
import profile
profiler = profile . Profile ( )
if FLAGS . profile_file :
atexit . register ( profiler . dump_stats , FLAGS . profile_file )
else :
atexit . register ( profiler . print_stats )
retval = profiler . runcall ( main , argv )
sys . exit ( retval )
else :
sys . exit ( main ( argv ) )
|
def from_name ( cls , name ) :
"""Retrieve a disk id associated to a name ."""
|
disks = cls . list ( { 'name' : name } )
if len ( disks ) == 1 :
return disks [ 0 ] [ 'id' ]
elif not disks :
return
raise DuplicateResults ( 'disk name %s is ambiguous.' % name )
|
def visit_Call ( self , node ) : # type : ( ast . Call ) - > None
"""python3.7 + breakpoint ( )"""
|
if isinstance ( node . func , ast . Name ) and node . func . id == 'breakpoint' :
st = Debug ( node . lineno , node . col_offset , node . func . id , 'called' )
self . breakpoints . append ( st )
self . generic_visit ( node )
|
def set_minmax ( field , render_kw = None , force = False ) :
"""Returns * render _ kw * with * min * and * max * set if validators use them .
Sets * min * and / or * max * keys if a ` Length ` or ` NumberRange ` validator is
using them .
. . note : :
This won ' t change keys already present unless * force * is used ."""
|
if render_kw is None :
render_kw = { }
for validator in field . validators :
if isinstance ( validator , MINMAX_VALIDATORS ) :
if 'min' not in render_kw or force :
v_min = getattr ( validator , 'min' , - 1 )
if v_min not in ( - 1 , None ) :
render_kw [ 'min' ] = v_min
if 'max' not in render_kw or force :
v_max = getattr ( validator , 'max' , - 1 )
if v_max not in ( - 1 , None ) :
render_kw [ 'max' ] = v_max
return render_kw
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.