signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_fields ( self ) :
"""Get the required fields for serializing the result ."""
|
fields = self . Meta . fields
exclude = self . Meta . exclude
ignore_fields = self . Meta . ignore_fields
indices = self . Meta . index_classes
declared_fields = copy . deepcopy ( self . _declared_fields )
prefix_field_names = len ( indices ) > 1
field_mapping = OrderedDict ( )
# overlapping fields on multiple indices is supported by internally prefixing the field
# names with the index class to which they belong or , optionally , a user - provided alias
# for the index .
for index_cls in self . Meta . index_classes :
prefix = ""
if prefix_field_names :
prefix = "_%s__" % self . _get_index_class_name ( index_cls )
for field_name , field_type in six . iteritems ( index_cls . fields ) :
orig_name = field_name
field_name = "%s%s" % ( prefix , field_name )
# Don ' t use this field if it is in ` ignore _ fields `
if orig_name in ignore_fields or field_name in ignore_fields :
continue
# When fields to include are decided by ` exclude `
if exclude :
if orig_name in exclude or field_name in exclude :
continue
# When fields to include are decided by ` fields `
if fields :
if orig_name not in fields and field_name not in fields :
continue
# Look up the field attributes on the current index model ,
# in order to correctly instantiate the serializer field .
model = index_cls ( ) . get_model ( )
kwargs = self . _get_default_field_kwargs ( model , field_type )
kwargs [ 'prefix_field_names' ] = prefix_field_names
field_mapping [ field_name ] = self . _field_mapping [ field_type ] ( ** kwargs )
# Add any explicitly declared fields . They * will * override any index fields
# in case of naming collision ! .
if declared_fields :
for field_name in declared_fields :
field_mapping [ field_name ] = declared_fields [ field_name ]
return field_mapping
|
def filter_data ( data , filter_dict ) :
"""filter a data dictionary for values only matching the filter"""
|
for key , match_string in filter_dict . items ( ) :
if key not in data :
logger . warning ( "{0} doesn't match a top level key" . format ( key ) )
continue
values = data [ key ]
matcher = re . compile ( match_string )
if isinstance ( values , list ) :
values = [ v for v in values if matcher . search ( v ) ]
elif isinstance ( values , dict ) :
values = dict ( ( k , v ) for k , v in values . items ( ) if matcher . search ( k ) )
else :
raise MiuraException ( "cannot filter a {0}" . format ( type ( values ) ) )
data [ key ] = values
|
def create_notification_channel ( self , callback_url , calendar_ids = ( ) ) :
"""Create a new channel for receiving push notifications .
: param string callback _ url : The url that will receive push notifications .
Must not be longer than 128 characters and should be HTTPS .
: param tuple calendar _ ids : List of calendar ids to create notification channels for . ( Optional . Default empty tuple )
: return : Channel id and channel callback
: rtype : ` ` dict ` `"""
|
data = { 'callback_url' : callback_url }
if calendar_ids :
data [ 'filters' ] = { 'calendar_ids' : calendar_ids }
return self . request_handler . post ( 'channels' , data = data ) . json ( ) [ 'channel' ]
|
def graphics ( self ) -> typing . List [ Graphic ] :
"""Return the graphics attached to this data item .
. . versionadded : : 1.0
Scriptable : Yes"""
|
return [ Graphic ( graphic ) for graphic in self . __display_item . graphics ]
|
def generate_synthetic_observation_trajectory ( self , length , initial_Pi = None ) :
"""Generate a synthetic realization of observables .
Parameters
length : int
Length of synthetic state trajectory to be generated .
initial _ Pi : np . array of shape ( nstates , ) , optional , default = None
The initial probability distribution , if samples are not to be taken from equilibrium .
Returns
o _ t : np . array of shape ( nstates , ) of dtype = np . float32
The trajectory of observations .
s _ t : np . array of shape ( nstates , ) of dtype = np . int32
The trajectory of hidden states , with each element in range ( 0 , nstates ) .
Examples
Generate a synthetic observation trajectory for an equilibrium realization .
> > > from bhmm import testsystems
> > > model = testsystems . dalton _ model ( )
> > > [ o _ t , s _ t ] = model . generate _ synthetic _ observation _ trajectory ( length = 100)
Use an initial nonequilibrium distribution .
> > > from bhmm import testsystems
> > > model = testsystems . dalton _ model ( )
> > > [ o _ t , s _ t ] = model . generate _ synthetic _ observation _ trajectory ( length = 100 , initial _ Pi = np . array ( [ 1,0,0 ] ) )"""
|
# First , generate synthetic state trajetory .
s_t = self . generate_synthetic_state_trajectory ( length , initial_Pi = initial_Pi )
# Next , generate observations from these states .
o_t = self . output_model . generate_observation_trajectory ( s_t )
return [ o_t , s_t ]
|
def get_app_model_classes ( self ) :
"""Helper method that returns a list of model classes for the current app ."""
|
models = [ ]
for m in self . models :
mod , cls = m . rsplit ( '.' , 1 )
mod = import_module ( mod )
models . append ( getattr ( mod , cls ) )
return models
|
def get_link ( href , value = None , ** kwargs ) :
"""Returns a well - formed link . If href is None / empty , returns an empty string
: param href : value to be set for attribute href
: param value : the text to be displayed . If None , the href itself is used
: param kwargs : additional attributes and values
: return : a well - formed html anchor"""
|
if not href :
return ""
anchor_value = value and value or href
attr = render_html_attributes ( ** kwargs )
return '<a href="{}" {}>{}</a>' . format ( href , attr , anchor_value )
|
def get_assets_by_record_type ( self , asset_record_type = None ) :
"""Gets an ` ` AssetList ` ` containing the given asset record ` ` Type ` ` .
In plenary mode , the returned list contains all known assets or
an error results . Otherwise , the returned list may contain only
those assets that are accessible through this session .
arg : asset _ record _ type ( osid . type . Type ) : an asset record type
return : ( osid . repository . AssetList ) - the returned ` ` Asset
list ` `
raise : NullArgument - ` ` asset _ record _ type ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
return AssetList ( self . _provider_session . get_assets_by_record_type ( asset_record_type ) , self . _config_map )
|
def _convert_service_properties_to_xml ( logging , hour_metrics , minute_metrics , cors , target_version = None , delete_retention_policy = None , static_website = None ) :
'''< ? xml version = " 1.0 " encoding = " utf - 8 " ? >
< StorageServiceProperties >
< Logging >
< Version > version - number < / Version >
< Delete > true | false < / Delete >
< Read > true | false < / Read >
< Write > true | false < / Write >
< RetentionPolicy >
< Enabled > true | false < / Enabled >
< Days > number - of - days < / Days >
< / RetentionPolicy >
< / Logging >
< HourMetrics >
< Version > version - number < / Version >
< Enabled > true | false < / Enabled >
< IncludeAPIs > true | false < / IncludeAPIs >
< RetentionPolicy >
< Enabled > true | false < / Enabled >
< Days > number - of - days < / Days >
< / RetentionPolicy >
< / HourMetrics >
< MinuteMetrics >
< Version > version - number < / Version >
< Enabled > true | false < / Enabled >
< IncludeAPIs > true | false < / IncludeAPIs >
< RetentionPolicy >
< Enabled > true | false < / Enabled >
< Days > number - of - days < / Days >
< / RetentionPolicy >
< / MinuteMetrics >
< Cors >
< CorsRule >
< AllowedOrigins > comma - separated - list - of - allowed - origins < / AllowedOrigins >
< AllowedMethods > comma - separated - list - of - HTTP - verb < / AllowedMethods >
< MaxAgeInSeconds > max - caching - age - in - seconds < / MaxAgeInSeconds >
< ExposedHeaders > comma - seperated - list - of - response - headers < / ExposedHeaders >
< AllowedHeaders > comma - seperated - list - of - request - headers < / AllowedHeaders >
< / CorsRule >
< / Cors >
< DeleteRetentionPolicy >
< Enabled > true | false < / Enabled >
< Days > number - of - days < / Days >
< / DeleteRetentionPolicy >
< StaticWebsite >
< Enabled > true | false < / Enabled >
< IndexDocument > < / IndexDocument >
< ErrorDocument404Path > < / ErrorDocument404Path >
< / StaticWebsite >
< / StorageServiceProperties >'''
|
service_properties_element = ETree . Element ( 'StorageServiceProperties' )
# Logging
if logging :
logging_element = ETree . SubElement ( service_properties_element , 'Logging' )
ETree . SubElement ( logging_element , 'Version' ) . text = logging . version
ETree . SubElement ( logging_element , 'Delete' ) . text = str ( logging . delete )
ETree . SubElement ( logging_element , 'Read' ) . text = str ( logging . read )
ETree . SubElement ( logging_element , 'Write' ) . text = str ( logging . write )
retention_element = ETree . SubElement ( logging_element , 'RetentionPolicy' )
_convert_retention_policy_to_xml ( logging . retention_policy , retention_element )
# HourMetrics
if hour_metrics :
hour_metrics_element = ETree . SubElement ( service_properties_element , 'HourMetrics' )
_convert_metrics_to_xml ( hour_metrics , hour_metrics_element )
# MinuteMetrics
if minute_metrics :
minute_metrics_element = ETree . SubElement ( service_properties_element , 'MinuteMetrics' )
_convert_metrics_to_xml ( minute_metrics , minute_metrics_element )
# CORS
# Make sure to still serialize empty list
if cors is not None :
cors_element = ETree . SubElement ( service_properties_element , 'Cors' )
for rule in cors :
cors_rule = ETree . SubElement ( cors_element , 'CorsRule' )
ETree . SubElement ( cors_rule , 'AllowedOrigins' ) . text = "," . join ( rule . allowed_origins )
ETree . SubElement ( cors_rule , 'AllowedMethods' ) . text = "," . join ( rule . allowed_methods )
ETree . SubElement ( cors_rule , 'MaxAgeInSeconds' ) . text = str ( rule . max_age_in_seconds )
ETree . SubElement ( cors_rule , 'ExposedHeaders' ) . text = "," . join ( rule . exposed_headers )
ETree . SubElement ( cors_rule , 'AllowedHeaders' ) . text = "," . join ( rule . allowed_headers )
# Target version
if target_version :
ETree . SubElement ( service_properties_element , 'DefaultServiceVersion' ) . text = target_version
# DeleteRetentionPolicy
if delete_retention_policy :
policy_element = ETree . SubElement ( service_properties_element , 'DeleteRetentionPolicy' )
ETree . SubElement ( policy_element , 'Enabled' ) . text = str ( delete_retention_policy . enabled )
if delete_retention_policy . enabled :
ETree . SubElement ( policy_element , 'Days' ) . text = str ( delete_retention_policy . days )
# StaticWebsite
if static_website :
static_website_element = ETree . SubElement ( service_properties_element , 'StaticWebsite' )
ETree . SubElement ( static_website_element , 'Enabled' ) . text = str ( static_website . enabled )
if static_website . enabled :
if static_website . index_document is not None :
ETree . SubElement ( static_website_element , 'IndexDocument' ) . text = str ( static_website . index_document )
if static_website . error_document_404_path is not None :
ETree . SubElement ( static_website_element , 'ErrorDocument404Path' ) . text = str ( static_website . error_document_404_path )
# Add xml declaration and serialize
try :
stream = BytesIO ( )
ETree . ElementTree ( service_properties_element ) . write ( stream , xml_declaration = True , encoding = 'utf-8' , method = 'xml' )
except :
raise
finally :
output = stream . getvalue ( )
stream . close ( )
return output
|
def init_git_pillar ( opts ) :
'''Clear out the ext pillar caches , used when the master starts'''
|
ret = [ ]
for opts_dict in [ x for x in opts . get ( 'ext_pillar' , [ ] ) ] :
if 'git' in opts_dict :
try :
pillar = salt . utils . gitfs . GitPillar ( opts , opts_dict [ 'git' ] , per_remote_overrides = git_pillar . PER_REMOTE_OVERRIDES , per_remote_only = git_pillar . PER_REMOTE_ONLY , global_only = git_pillar . GLOBAL_ONLY )
ret . append ( pillar )
except salt . exceptions . FileserverConfigError :
if opts . get ( 'git_pillar_verify_config' , True ) :
raise
else :
log . critical ( 'Could not initialize git_pillar' )
return ret
|
def login ( ) :
'''Log in as administrator
You can use wither basic auth or form based login ( via POST ) .
: param username : The administrator ' s username
: type username : string
: param password : The administrator ' s password
: type password : string'''
|
username = None
password = None
next = flask . request . args . get ( 'next' )
auth = flask . request . authorization
if flask . request . method == 'POST' :
username = flask . request . form [ 'username' ]
password = flask . request . form [ 'password' ]
if auth and auth . type == 'basic' :
username = auth . username
password = auth . password
if not flogin . current_user . is_active :
error = 'You have to login with proper credentials'
if username and password :
if check_auth ( username , password ) :
user = _users . get ( username )
if user :
if flogin . login_user ( user ) :
return flask . redirect ( next or flask . url_for ( "user" ) )
error = 'Could not log in user.'
else :
error = 'User not found.'
else :
error = 'Wrong username or password.'
else :
error = 'No username or password.'
return flask . Response ( 'Could not verify your access level for that URL.\n {}' . format ( error ) , 401 , { str ( 'WWW-Authenticate' ) : str ( 'Basic realm="Login Required"' ) } )
return flask . redirect ( next or flask . url_for ( "user" ) )
|
def trim_occluded_throats ( network , mask = 'all' ) :
r"""Remove throats with zero area from the network and also remove
pores that are isolated ( as a result or otherwise )
Parameters
network : OpenPNM Network Object
mask : string
Applies routine only to pores and throats with this label"""
|
occluded_ts = network [ 'throat.area' ] == 0
if sp . sum ( occluded_ts ) > 0 :
occluded_ts *= network [ "throat." + mask ]
trim ( network = network , throats = occluded_ts )
|
def place_limit_order ( self , side : Side , amount : Number , price : Number ) -> Order :
"""Place a limit order ."""
|
return self . place_order ( side , OrderType . LIMIT , amount , price )
|
def get_value ( self , source ) :
"""Apply self . convert to the source . The parameter passed to convert depends on
self . source _ name . If source _ name is given , self . convert ( getattr ( source , source _ name ) ) is called ,
otherwise self . convert ( source ) is called ."""
|
if self . source_name is None :
present , value = True , self . convert ( source )
converted = True
else :
present , value = has_value ( source , self . source_name )
converted = False
if not present or value is None :
if self . is_required :
raise ValueError ( "required value not present" )
else :
return None
else :
if converted :
return value
else :
return self . convert ( value )
|
def add_pool_member ( hostname , username , password , name , member ) :
'''A function to connect to a bigip device and add a new member to an existing pool .
hostname
The host / address of the bigip device
username
The iControl REST username
password
The iControl REST password
name
The name of the pool to modify
member
The name of the member to add
i . e . 10.1.1.2:80
CLI Example :
. . code - block : : bash
salt ' * ' bigip . add _ pool _ members bigip admin admin my - pool 10.2.2.1:80'''
|
# for states
if isinstance ( member , dict ) : # check for state alternative name ' member _ state ' , replace with state
if 'member_state' in member . keys ( ) :
member [ 'state' ] = member . pop ( 'member_state' )
# replace underscore with dash
for key in member :
new_key = key . replace ( '_' , '-' )
member [ new_key ] = member . pop ( key )
payload = member
# for execution
else :
payload = { 'name' : member , 'address' : member . split ( ':' ) [ 0 ] }
# build session
bigip_session = _build_session ( username , password )
# post to REST
try :
response = bigip_session . post ( BIG_IP_URL_BASE . format ( host = hostname ) + '/ltm/pool/{name}/members' . format ( name = name ) , data = salt . utils . json . dumps ( payload ) )
except requests . exceptions . ConnectionError as e :
return _load_connection_error ( hostname , e )
return _load_response ( response )
|
def _save_file_and_pos ( self ) :
"""Save current position into file"""
|
if not self . _pos_changed :
return
with open ( self . pos_storage_filename , 'w+' ) as f :
_pos = '%s:%s' % ( self . _log_file , self . _log_pos )
_logger . debug ( 'Saving position %s to file %s' % ( _pos , self . pos_storage_filename ) )
f . write ( _pos )
self . _pos_changed = False
|
async def getNodesBy ( self , full , valu = None , cmpr = '=' ) :
'''The main function for retrieving nodes by prop .
Args :
full ( str ) : The property / tag name .
valu ( obj ) : A lift compatible value for the type .
cmpr ( str ) : An optional alternate comparator .
Yields :
( synapse . lib . node . Node ) : Node instances .'''
|
if self . debug :
await self . printf ( f'get nodes by: {full} {cmpr} {valu!r}' )
# special handling for by type ( * type = ) here . . .
if cmpr == '*type=' :
async for node in self . _getNodesByType ( full , valu = valu ) :
yield node
return
if full . startswith ( '#' ) :
async for node in self . _getNodesByTag ( full , valu = valu , cmpr = cmpr ) :
yield node
return
fields = full . split ( '#' , 1 )
if len ( fields ) > 1 :
form , tag = fields
async for node in self . _getNodesByFormTag ( form , tag , valu = valu , cmpr = cmpr ) :
yield node
return
async for node in self . _getNodesByProp ( full , valu = valu , cmpr = cmpr ) :
yield node
|
def get_route_edge_attributes ( G , route , attribute = None , minimize_key = 'length' , retrieve_default = None ) :
"""Get a list of attribute values for each edge in a path .
Parameters
G : networkx multidigraph
route : list
list of nodes in the path
attribute : string
the name of the attribute to get the value of for each edge .
If not specified , the complete data dict is returned for each edge .
minimize _ key : string
if there are parallel edges between two nodes , select the one with the
lowest value of minimize _ key
retrieve _ default : Callable [ Tuple [ Any , Any ] , Any ]
Function called with the edge nodes as parameters to retrieve a default value , if the edge does not
contain the given attribute . Per default , a ` KeyError ` is raised
Returns
attribute _ values : list
list of edge attribute values"""
|
attribute_values = [ ]
for u , v in zip ( route [ : - 1 ] , route [ 1 : ] ) : # if there are parallel edges between two nodes , select the one with the
# lowest value of minimize _ key
data = min ( G . get_edge_data ( u , v ) . values ( ) , key = lambda x : x [ minimize_key ] )
if attribute is None :
attribute_value = data
elif retrieve_default is not None :
attribute_value = data . get ( attribute , retrieve_default ( u , v ) )
else :
attribute_value = data [ attribute ]
attribute_values . append ( attribute_value )
return attribute_values
|
def has_target ( alias , target ) :
'''Return true if the alias / target is set
CLI Example :
. . code - block : : bash
salt ' * ' aliases . has _ target alias target'''
|
if target == '' :
raise SaltInvocationError ( 'target can not be an empty string' )
aliases = list_aliases ( )
if alias not in aliases :
return False
if isinstance ( target , list ) :
target = ', ' . join ( target )
return target == aliases [ alias ]
|
def multipart_encoder ( ** kwargs ) :
"""initialize MultipartEncoder with uploading fields ."""
|
def get_filetype ( file_path ) :
file_type = filetype . guess ( file_path )
if file_type :
return file_type . mime
else :
return "text/html"
fields_dict = { }
for key , value in kwargs . items ( ) :
if os . path . isabs ( value ) :
_file_path = value
is_file = True
else :
global PWD
_file_path = os . path . join ( PWD , value )
is_file = os . path . isfile ( _file_path )
if is_file :
filename = os . path . basename ( _file_path )
with open ( _file_path , 'rb' ) as f :
mime_type = get_filetype ( _file_path )
fields_dict [ key ] = ( filename , f . read ( ) , mime_type )
else :
fields_dict [ key ] = value
return MultipartEncoder ( fields = fields_dict )
|
def get_mimetype ( path ) :
"""Guesses the mime type of a file . If mime type cannot be detected , plain
text is assumed .
: param path : path of the file
: return : the corresponding mime type ."""
|
filename = os . path . split ( path ) [ 1 ]
mimetype = mimetypes . guess_type ( filename ) [ 0 ]
if mimetype is None :
mimetype = 'text/x-plain'
_logger ( ) . debug ( 'mimetype detected: %s' , mimetype )
return mimetype
|
def OnFind ( self , event ) :
"""Find functionality , called from toolbar , returns find position"""
|
# Search starts in next cell after the current one
gridpos = list ( self . grid . actions . cursor )
text , flags = event . text , event . flags
findpos = self . grid . actions . find ( gridpos , text , flags )
if findpos is None : # If nothing is found mention it in the statusbar and return
statustext = _ ( "'{text}' not found." ) . format ( text = text )
else : # Otherwise select cell with next occurrence if successful
self . grid . actions . cursor = findpos
# Update statusbar
statustext = _ ( u"Found '{text}' in cell {key}." )
statustext = statustext . format ( text = text , key = findpos )
post_command_event ( self . grid . main_window , self . grid . StatusBarMsg , text = statustext )
event . Skip ( )
|
def simxPackFloats ( floatList ) :
'''Please have a look at the function description / documentation in the V - REP user manual'''
|
if sys . version_info [ 0 ] == 3 :
s = bytes ( )
for i in range ( len ( floatList ) ) :
s = s + struct . pack ( '<f' , floatList [ i ] )
s = bytearray ( s )
else :
s = ''
for i in range ( len ( floatList ) ) :
s += struct . pack ( '<f' , floatList [ i ] )
return s
|
def get_goea_nts_prt ( self , fldnames = None , ** usr_kws ) :
"""Return list of namedtuples removing fields which are redundant or verbose ."""
|
kws = usr_kws . copy ( )
if 'not_fldnames' not in kws :
kws [ 'not_fldnames' ] = [ 'goterm' , 'parents' , 'children' , 'id' ]
if 'rpt_fmt' not in kws :
kws [ 'rpt_fmt' ] = True
return self . get_goea_nts_all ( fldnames , ** kws )
|
def set_session ( self , headers = None ) :
"""Init session with default or custom headers
Args :
headers : A dict of headers ( default None , thus using the default
header to init the session )"""
|
if headers is None :
headers = { 'User-Agent' : ( 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3)' ' AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/48.0.2564.116 Safari/537.36' ) }
elif not isinstance ( headers , dict ) :
raise TypeError ( '"headers" must be a dict object' )
self . session = Session ( self . proxy_pool )
self . session . headers . update ( headers )
|
def _restore_auto_increment ( self , table ) :
"""restore the auto increment value for the table to what it was previously"""
|
query , seq_table , seq_column , seq_name = self . _get_auto_increment_info ( table )
if query :
queries = [ query , "select nextval('{}')" . format ( seq_name ) ]
return self . _run_queries ( queries )
|
def who_likes ( obj ) :
"""Usage :
{ % who _ likes obj as var % }"""
|
return Like . objects . filter ( receiver_content_type = ContentType . objects . get_for_model ( obj ) , receiver_object_id = obj . pk )
|
def parse_cropbox ( cropbox ) :
"""Returns x , y , x2 , y2 tuple for cropping ."""
|
if isinstance ( cropbox , six . text_type ) :
return tuple ( [ int ( x . strip ( ) ) for x in cropbox . split ( ',' ) ] )
else :
return tuple ( cropbox )
|
def __do_case_6_work ( d_w , d_u , case_1 , case_2 , case_3 , dfs_data ) :
"""Encapsulates the work that will be done for case 6 of _ _ embed _ frond ,
since it gets used in more than one place ."""
|
# - - We should only ever see u - cases 1 and 3
if case_2 : # - - We should never get here
return False
comp_d_w = abs ( d_w )
# - - Add the frond to the right side
__insert_frond_RF ( d_w , d_u , dfs_data )
# - - Add uw to Rm
m = dfs_data [ 'FG' ] [ 'm' ]
Rm = R ( m , dfs_data )
if comp_d_w < Rm [ 'x' ] :
Rm [ 'x' ] = d_w
if d_u > Rm [ 'y' ] :
Rm [ 'y' ] = d_u
# - - Case 3 requires a bit of extra work
if case_3 :
Rm [ 'x' ] = d_w
u_m1 = u ( m - 1 , dfs_data )
while comp_d_w < u_m1 :
merge_Fm ( dfs_data )
m = dfs_data [ 'FG' ] [ 'm' ]
u_m1 = u ( m - 1 , dfs_data )
# else :
# print " Case 6 work , u - case 1"
return True
|
def album_infos ( self , album_id ) :
""": param album _ id :
: return : {
code : int ,
album : { album }"""
|
action = uri + '/album/' + str ( album_id )
data = self . request ( 'GET' , action )
if data [ 'code' ] == 200 :
return data [ 'album' ]
|
def cmd_devid ( args ) :
'''show parameters'''
|
params = mestate . mlog . params
k = sorted ( params . keys ( ) )
for p in k :
if p . startswith ( 'COMPASS_DEV_ID' ) :
mp_util . decode_devid ( params [ p ] , p )
if p . startswith ( 'INS_' ) and p . endswith ( '_ID' ) :
mp_util . decode_devid ( params [ p ] , p )
|
def pattern ( name , pattern ) :
"""Function to put a name on a pyparsing pattern .
Just for ease of debugging / tracing parse errors ."""
|
pattern . setName ( name )
astracing . maybe_trace ( pattern )
return pattern
|
def sky2pix ( self , pos ) :
"""Convert sky coordinates into pixel coordinates .
Parameters
pos : ( float , float )
The ( ra , dec ) sky coordinates ( degrees )
Returns
pixel : ( float , float )
The ( x , y ) pixel coordinates"""
|
pixel = self . wcs . wcs_world2pix ( [ pos ] , 1 )
# wcs and pyfits have oposite ideas of x / y
return [ pixel [ 0 ] [ 1 ] , pixel [ 0 ] [ 0 ] ]
|
def _handle_sigint ( self , signum : int , frame : Any ) -> None :
"""Shutdown after processing current task ."""
|
logger . warning ( "Catched SIGINT" )
self . shutdown ( )
|
def _validate_annotation ( self , annotation ) :
'''Ensures that the annotation has the right fields .'''
|
required_keys = set ( self . _required_keys )
keys = set ( key for key , val in annotation . items ( ) if val )
missing_keys = required_keys . difference ( keys )
if missing_keys :
error = 'Annotation missing required fields: {0}' . format ( missing_keys )
raise AnnotationError ( error )
|
def appendAssayToStudy ( assay , studyNum , pathToISATABFile ) :
"""This function appends an Assay object to a study in an ISA file
Typically , you should use the exploreISA function to check the contents
of the ISA file and retrieve the assay and study number you are interested in !
: param assay : The Assay
: type assay : ISA Assay object
: param studyNum : The Study number ( notice it ' s not zero - based index ) .
: type studyNum : int
: param pathToISATABFile : The path to the ISATAB file
: type pathToISATABFile : string
: raise FileNotFoundError : If pathToISATABFile does not contain file ' i _ Investigation . txt ' ."""
|
from isatools import isatab
try :
isa = isatab . load ( pathToISATABFile , skip_load_tables = True )
std = isa . studies [ studyNum - 1 ]
lngth = len ( std . assays )
base = os . path . basename ( assay . filename )
fname = os . path . splitext ( base ) [ 0 ]
fname = fname + str ( lngth )
ext = os . path . splitext ( base ) [ 1 ]
fname = fname + ext
assay . filename = fname
isa . studies [ studyNum - 1 ] . assays . append ( assay )
isatab . dump ( isa_obj = isa , output_path = pathToISATABFile )
except FileNotFoundError as err :
raise err
|
def process_ekb_file ( fname ) :
"""Processes an EKB file produced by CWMS .
Parameters
fname : str
Path to the EKB file to process .
Returns
cp : indra . sources . cwms . CWMSProcessor
A CWMSProcessor , which contains a list of INDRA statements in its
statements attribute ."""
|
# Process EKB XML file into statements
with open ( fname , 'rb' ) as fh :
ekb_str = fh . read ( ) . decode ( 'utf-8' )
return process_ekb ( ekb_str )
|
def _AddOption ( self , name ) :
"""Add an option to this Value .
Args :
name : ( str ) , the name of the Option to add .
Raises :
TextFSMTemplateError : If option is already present or
the option does not exist ."""
|
# Check for duplicate option declaration
if name in [ option . name for option in self . options ] :
raise TextFSMTemplateError ( 'Duplicate option "%s"' % name )
# Create the option object
try :
option = self . _options_cls . GetOption ( name ) ( self )
except AttributeError :
raise TextFSMTemplateError ( 'Unknown option "%s"' % name )
self . options . append ( option )
|
def _partial_extraction_fixed ( self , idx , extra_idx = 0 ) :
"""Private method for a single extraction on a fixed - type tab file"""
|
myarray = np . array ( [ ] )
with open ( self . abspath ) as fobj :
contents = fobj . readlines ( ) [ idx + extra_idx : ]
for line in contents :
try :
vals = re . findall ( r' *[\w\-\+\.]*' , line )
temp = np . array ( [ float ( val ) for val in vals if val not in ( '' , ' ' ) ] )
myarray = np . hstack ( ( myarray , temp ) )
except ValueError :
break
return myarray
|
def run ( items , background = None ) :
"""Detect copy number variations from tumor / normal samples using Battenberg ."""
|
paired = vcfutils . get_paired_bams ( [ x [ "align_bam" ] for x in items ] , items )
if not paired or not paired . normal_bam :
logger . warn ( "Battenberg only works on paired tumor/normal inputs, skipping %s" % dd . get_sample_name ( items [ 0 ] ) )
batout = None
elif not tz . get_in ( [ "genome_resources" , "aliases" , "human" ] , paired . tumor_data ) :
logger . warn ( "Battenberg only works on human data, skipping %s" % dd . get_sample_name ( items [ 0 ] ) )
batout = None
else :
batout = _do_run ( paired )
batout [ "variantcaller" ] = "battenberg"
out = [ ]
for data in items :
if batout and dd . get_sample_name ( data ) == paired . tumor_name :
if "sv" not in data :
data [ "sv" ] = [ ]
data [ "sv" ] . append ( batout )
out . append ( data )
return out
|
def netconf_config_change_changed_by_server_or_user_by_user_session_id ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
netconf_config_change = ET . SubElement ( config , "netconf-config-change" , xmlns = "urn:ietf:params:xml:ns:yang:ietf-netconf-notifications" )
changed_by = ET . SubElement ( netconf_config_change , "changed-by" )
server_or_user = ET . SubElement ( changed_by , "server-or-user" )
by_user = ET . SubElement ( server_or_user , "by-user" )
session_id = ET . SubElement ( by_user , "session-id" )
session_id . text = kwargs . pop ( 'session_id' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def get_object ( connection , object_meta_data : dict , dirname : str ) :
"""Download object from objectstore .
object _ meta _ data is an object retured when
using ' get _ full _ container _ list '"""
|
return connection . get_object ( dirname , object_meta_data [ 'name' ] ) [ 1 ]
|
def initialize_plot ( self , ranges = None ) :
"""Initializes a new plot object with the last available frame ."""
|
# Get element key and ranges for frame
fig = self . generate_plot ( self . keys [ - 1 ] , ranges )
self . drawn = True
return fig
|
def set ( self , name : str , value : str ) -> None :
"""重写请求中的 header , 不推荐使用"""
|
name = name . casefold ( )
self . _headers [ name ] = value
|
def _get_upstream ( self ) :
"""Return the remote and remote merge branch for the current branch"""
|
if not self . _remote or not self . _branch :
branch = self . branch_name
if not branch :
raise Scm . LocalException ( 'Failed to determine local branch' )
def get_local_config ( key ) :
value = self . _check_output ( [ 'config' , '--local' , '--get' , key ] , raise_type = Scm . LocalException )
return value . strip ( )
self . _remote = self . _remote or get_local_config ( 'branch.{}.remote' . format ( branch ) )
self . _branch = self . _branch or get_local_config ( 'branch.{}.merge' . format ( branch ) )
return self . _remote , self . _branch
|
def get_field_class ( qs , field_name ) :
"""Given a queryset and a field name , it will return the field ' s class"""
|
try :
return qs . model . _meta . get_field ( field_name ) . __class__ . __name__
# while annotating , it ' s possible that field does not exists .
except FieldDoesNotExist :
return None
|
def gethost ( self , ip_addr ) :
"""Do reverse lookup on an ip address"""
|
# Handle silly fake ipv6 addresses
try :
if ip_addr [ : 7 ] == '::ffff:' :
ip_addr = ip_addr [ 7 : ]
except TypeError :
pass
if ip_addr [ 0 ] in string . letters :
return ip_addr
try :
return self . hostsmap [ ip_addr ]
except KeyError :
pass
try :
name = socket . gethostbyaddr ( ip_addr ) [ 0 ]
except socket . error :
name = ip_addr
self . hostsmap [ ip_addr ] = name
return name
|
def update ( self , callback_method = values . unset , callback_url = values . unset , friendly_name = values . unset ) :
"""Update the TriggerInstance
: param unicode callback _ method : The HTTP method to use to call callback _ url
: param unicode callback _ url : The URL we call when the trigger fires
: param unicode friendly _ name : A string to describe the resource
: returns : Updated TriggerInstance
: rtype : twilio . rest . api . v2010 . account . usage . trigger . TriggerInstance"""
|
data = values . of ( { 'CallbackMethod' : callback_method , 'CallbackUrl' : callback_url , 'FriendlyName' : friendly_name , } )
payload = self . _version . update ( 'POST' , self . _uri , data = data , )
return TriggerInstance ( self . _version , payload , account_sid = self . _solution [ 'account_sid' ] , sid = self . _solution [ 'sid' ] , )
|
def window ( self , windowDuration , slideDuration = None ) :
"""Return a new DStream in which each RDD contains all the elements in seen in a
sliding window of time over this DStream .
@ param windowDuration : width of the window ; must be a multiple of this DStream ' s
batching interval
@ param slideDuration : sliding interval of the window ( i . e . , the interval after which
the new DStream will generate RDDs ) ; must be a multiple of this
DStream ' s batching interval"""
|
self . _validate_window_param ( windowDuration , slideDuration )
d = self . _ssc . _jduration ( windowDuration )
if slideDuration is None :
return DStream ( self . _jdstream . window ( d ) , self . _ssc , self . _jrdd_deserializer )
s = self . _ssc . _jduration ( slideDuration )
return DStream ( self . _jdstream . window ( d , s ) , self . _ssc , self . _jrdd_deserializer )
|
def _symm_current ( C ) :
"""To get rid of NaNs produced by _ scalar2array , symmetrize operators
where C _ ijkl = C _ klij"""
|
nans = np . isnan ( C )
C [ nans ] = np . einsum ( 'klij' , C ) [ nans ]
return C
|
def get_user_push_restrictions ( self ) :
""": calls : ` GET / repos / : owner / : repo / branches / : branch / protection / restrictions / users < https : / / developer . github . com / v3 / repos / branches > ` _
: rtype : : class : ` github . PaginatedList . PaginatedList ` of : class : ` github . NamedUser . NamedUser `"""
|
return github . PaginatedList . PaginatedList ( github . NamedUser . NamedUser , self . _requester , self . protection_url + "/restrictions/users" , None )
|
def _set_preferred_infinite ( self , v , load = False ) :
"""Setter method for preferred _ infinite , mapped from YANG variable / interface / fortygigabitethernet / ipv6 / ipv6 _ nd _ ra / ipv6 _ intf _ cmds / nd / prefix / lifetime / preferred / preferred _ infinite ( empty )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ preferred _ infinite is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ preferred _ infinite ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = YANGBool , is_leaf = True , yang_name = "preferred-infinite" , rest_name = "infinite" , parent = self , choice = ( u'ch-preferred-type' , u'ca-preferred-infinite' ) , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Infinite preferred lifetime' , u'alt-name' : u'infinite' } } , namespace = 'urn:brocade.com:mgmt:brocade-ipv6-nd-ra' , defining_module = 'brocade-ipv6-nd-ra' , yang_type = 'empty' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """preferred_infinite must be of a type compatible with empty""" , 'defined-type' : "empty" , 'generated-type' : """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preferred-infinite", rest_name="infinite", parent=self, choice=(u'ch-preferred-type', u'ca-preferred-infinite'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Infinite preferred lifetime', u'alt-name': u'infinite'}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='empty', is_config=True)""" , } )
self . __preferred_infinite = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def visit_emptynode ( self , node , parent ) :
"""visit an EmptyNode node by returning a fresh instance of it"""
|
return nodes . EmptyNode ( getattr ( node , "lineno" , None ) , getattr ( node , "col_offset" , None ) , parent )
|
def download ( url , file_handle , chunk_size = 1024 ) :
"""Downloads a given URL to a specific file .
Parameters
url : str
URL to download .
file _ handle : file
Where to save the downloaded URL ."""
|
r = requests . get ( url , stream = True )
total_length = r . headers . get ( 'content-length' )
if total_length is None :
maxval = UnknownLength
else :
maxval = int ( total_length )
name = file_handle . name
with progress_bar ( name = name , maxval = maxval ) as bar :
for i , chunk in enumerate ( r . iter_content ( chunk_size ) ) :
if total_length :
bar . update ( i * chunk_size )
file_handle . write ( chunk )
|
def _calcOnAxisFactor ( self , aLocation , deltaAxis , deltasOnSameAxis , deltaLocation ) :
"""Calculate the on - axis factors ."""
|
if deltaAxis == "origin" :
f = 0
v = 0
else :
f = aLocation [ deltaAxis ]
v = deltaLocation [ deltaAxis ]
i = [ ]
iv = { }
for value in deltasOnSameAxis :
iv [ Location ( value ) [ deltaAxis ] ] = 1
i = sorted ( iv . keys ( ) )
r = 0
B , M , A = [ ] , [ ] , [ ]
mA , mB , mM = None , None , None
for value in i :
if value < f :
B . append ( value )
elif value > f :
A . append ( value )
else :
M . append ( value )
if len ( B ) > 0 :
mB = max ( B )
B . sort ( )
if len ( A ) > 0 :
mA = min ( A )
A . sort ( )
if len ( M ) > 0 :
mM = min ( M )
M . sort ( )
if mM is not None :
if ( ( f - _EPSILON < v ) and ( f + _EPSILON > v ) ) or f == v :
r = 1
else :
r = 0
elif mB is not None and mA is not None :
if v < mB or v > mA :
r = 0
else :
if v == mA :
r = float ( f - mB ) / ( mA - mB )
else :
r = float ( f - mA ) / ( mB - mA )
elif mB is None and mA is not None :
if v == A [ 1 ] :
r = float ( f - A [ 0 ] ) / ( A [ 1 ] - A [ 0 ] )
elif v == A [ 0 ] :
r = float ( f - A [ 1 ] ) / ( A [ 0 ] - A [ 1 ] )
else :
r = 0
elif mB is not None and mA is None :
if v == B [ - 2 ] :
r = float ( f - B [ - 1 ] ) / ( B [ - 2 ] - B [ - 1 ] )
elif v == mB :
r = float ( f - B [ - 2 ] ) / ( B [ - 1 ] - B [ - 2 ] )
else :
r = 0
return r
|
def to_dict ( self ) :
"""Return the information from the pedigree file as a dictionary .
family id is key and a list with dictionarys for each individual
as value .
Returns :
families ( dict ) : A dictionary with the families"""
|
self . logger . debug ( "Return the information as a dictionary" )
families = { }
for family_id in self . families :
family = [ ]
for individual_id in self . families [ family_id ] . individuals :
individual = self . families [ family_id ] . individuals [ individual_id ]
family . append ( individual . to_json ( ) )
self . logger . debug ( "Adding individual {0} to family {1}" . format ( individual_id , family_id ) )
self . logger . debug ( "Adding family {0}" . format ( family_id ) )
families [ family_id ] = family
return families
|
def get_shop ( self , shop_id = 0 ) :
"""查询门店的WiFi信息
http : / / mp . weixin . qq . com / wiki / 15 / bcfb5d4578ea818b89913472cf2bbf8f . html
: param shop _ id : 门店 ID
: return : 返回的 JSON 数据包"""
|
res = self . _post ( 'shop/get' , data = { 'shop_id' : shop_id , } , result_processor = lambda x : x [ 'data' ] )
return res
|
def AddToFileNameTable ( self , fileName , showID ) :
"""Add entry to FileName table . If the file name and show id combination
already exists in the table a fatal error is raised .
Parameters
fileName : string
File name .
showID : int
Show id ."""
|
goodlogging . Log . Info ( "DB" , "Adding filename string match '{0}'={1} to database" . format ( fileName , showID ) , verbosity = self . logVerbosity )
currentValues = self . SearchFileNameTable ( fileName )
if currentValues is None :
self . _ActionDatabase ( "INSERT INTO FileName (FileName, ShowID) VALUES (?,?)" , ( fileName , showID ) )
else :
goodlogging . Log . Fatal ( "DB" , "An entry for '{0}' already exists in the FileName table" . format ( fileName ) )
|
def app_cache_restorer ( ) :
"""A context manager that restore model cache state as it was before
entering context ."""
|
state = _app_cache_deepcopy ( apps . __dict__ )
try :
yield state
finally :
with apps_lock ( ) :
apps . __dict__ = state
# Rebind the app registry models cache to
# individual app config ones .
for app_conf in apps . get_app_configs ( ) :
app_conf . models = apps . all_models [ app_conf . label ]
apps . clear_cache ( )
|
def profile_cancel ( self , query_id , timeout = 10 ) :
"""Cancel the query that has the given queryid .
: param query _ id : The UUID of the query in standard UUID format that Drill assigns to each query .
: param timeout : int
: return : pydrill . client . Result"""
|
result = Result ( * self . perform_request ( ** { 'method' : 'GET' , 'url' : '/profiles/cancel/{0}' . format ( query_id ) , 'params' : { 'request_timeout' : timeout } } ) )
return result
|
def insert_instance ( instance , table , ** kwargs ) :
"""Inserts an object ' s values into a given table , will not populate Nonetype values
@ param instance : Instance of an object to insert
@ param table : Table in which to insert instance values
@ return : ID of the last inserted row"""
|
instancedict = instance . __dict__ . copy ( )
instancedictclone = instancedict . copy ( )
# Remove all Nonetype values
for k , v in instancedictclone . iteritems ( ) :
if v is None :
instancedict . pop ( k )
keys , values = CoyoteDb . get_insert_fields_and_values_from_dict ( instancedict )
sql = """INSERT INTO {table} ({keys}) VALUES ({values})""" . format ( table = table , keys = keys , values = values )
insert = CoyoteDb . insert ( sql = sql , ** kwargs )
return insert
|
def simxReadStringStream ( clientID , signalName , operationMode ) :
'''Please have a look at the function description / documentation in the V - REP user manual'''
|
signalLength = ct . c_int ( ) ;
signalValue = ct . POINTER ( ct . c_ubyte ) ( )
if ( sys . version_info [ 0 ] == 3 ) and ( type ( signalName ) is str ) :
signalName = signalName . encode ( 'utf-8' )
ret = c_ReadStringStream ( clientID , signalName , ct . byref ( signalValue ) , ct . byref ( signalLength ) , operationMode )
a = bytearray ( )
if ret == 0 :
for i in range ( signalLength . value ) :
a . append ( signalValue [ i ] )
if sys . version_info [ 0 ] != 3 :
a = str ( a )
return ret , a
|
def kwargs ( self ) :
"""combine GET and POST params to be passed to the controller"""
|
kwargs = dict ( self . query_kwargs )
kwargs . update ( self . body_kwargs )
return kwargs
|
def vcenter_discovery_ignore_delete_all_response_always ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
vcenter = ET . SubElement ( config , "vcenter" , xmlns = "urn:brocade.com:mgmt:brocade-vswitch" )
id_key = ET . SubElement ( vcenter , "id" )
id_key . text = kwargs . pop ( 'id' )
discovery = ET . SubElement ( vcenter , "discovery" )
ignore_delete_all_response = ET . SubElement ( discovery , "ignore-delete-all-response" )
always = ET . SubElement ( ignore_delete_all_response , "always" )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def get_job_definition ( self , identifier ) :
"""Get job defintiion by name or ARN
: param identifier : Name or ARN
: type identifier : str
: return : Job definition or None
: rtype : JobDefinition or None"""
|
env = self . get_job_definition_by_arn ( identifier )
if env is None :
env = self . get_job_definition_by_name ( identifier )
return env
|
def _loadSources ( self ) :
"""creates a trigdict and populates it with data from self . autorityFiles"""
|
self . confstems = { }
self . sourceDict = newtrigdict . Trigdict ( )
for fName in self . authorityFiles :
self . _loadOneSource ( fName )
# We want to allow naked bibstems in references , too
for stem in self . sourceDict . values ( ) :
cleanStem = stem . replace ( "." , "" ) . upper ( )
self . _addPub ( stem , cleanStem )
|
def write ( self , data ) :
"""Write data to the stream
: data : the data to write to the stream
: returns : None"""
|
if self . padded : # flush out any remaining bits first
if len ( self . _bits ) > 0 :
self . _flush_bits_to_stream ( )
self . _stream . write ( data )
else : # nothing to do here
if len ( data ) == 0 :
return
bits = bytes_to_bits ( data )
self . write_bits ( bits )
|
def is_noise ( self ) :
"""Is this module just noise ? ( too common either at top or bottom of
the graph ) ."""
|
noise = self . args [ 'noise_level' ]
if not ( self . in_degree and self . out_degree ) :
return self . degree > noise
return False
|
def count_substring_occurrences ( text : str , subtext : str ) -> int :
"""Determine the number of occurrences of a specific substring within a given string . Overlapping instances are included .
Examples :
> > > count _ substring _ occurrences ( ' ' , ' a ' )
> > > count _ substring _ occurrences ( ' aaa ' , ' a ' )
> > > count _ substring _ occurrences ( ' aaaa ' , ' aa ' )
Args :
text : The original string
subtext : The substring to search for
Returns :
The count of subtext occurrences within the original string"""
|
return sum ( 1 for i in range ( len ( text ) - len ( subtext ) + 1 ) if text [ i : i + len ( subtext ) ] == subtext )
|
def permission_request_approve_link ( context , perm ) :
"""Renders a html link to the approve view of the given permission request .
Returns no content if the request - user has no permission to delete foreign
permissions ."""
|
user = context [ 'request' ] . user
if user . is_authenticated ( ) :
if user . has_perm ( 'authority.approve_permission_requests' ) :
return base_link ( context , perm , 'authority-approve-permission-request' )
return { 'url' : None }
|
def parse_devicelist ( data_str ) :
"""Parse the BT Home Hub 5 data format ."""
|
p = HTMLTableParser ( )
p . feed ( data_str )
known_devices = p . tables [ 9 ]
devices = { }
for device in known_devices :
if len ( device ) == 5 and device [ 2 ] != '' :
devices [ device [ 2 ] ] = device [ 1 ]
return devices
|
def synopsis ( case_id ) :
"""Update the case synopsis ."""
|
text = request . form [ 'text' ]
case_obj = app . db . case ( case_id )
app . db . update_synopsis ( case_obj , text )
return redirect ( request . referrer )
|
def optimize_orientations ( fwtour , clm , phase , cpus ) :
"""Optimize the orientations of contigs by using heuristic flipping ."""
|
# Prepare input files
tour_contigs = clm . active_contigs
tour = clm . tour
oo = clm . oo
print_tour ( fwtour , tour , "FLIPALL{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs )
tag1 = clm . flip_whole ( tour )
print_tour ( fwtour , tour , "FLIPWHOLE{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs )
tag2 = clm . flip_one ( tour )
print_tour ( fwtour , tour , "FLIPONE{}" . format ( phase ) , tour_contigs , oo , signs = clm . signs )
return tag1 , tag2
|
def round ( value , decimal = None , digits = None , places = None ) :
""": param value : THE VALUE TO ROUND
: param decimal : NUMBER OF DECIMAL PLACES TO ROUND ( NEGATIVE IS LEFT - OF - DECIMAL )
: param digits : ROUND TO SIGNIFICANT NUMBER OF digits
: param places : SAME AS digits
: return :"""
|
value = float ( value )
if value == 0.0 :
return "0"
digits = coalesce ( digits , places )
if digits != None :
left_of_decimal = int ( math . ceil ( math . log10 ( abs ( value ) ) ) )
decimal = digits - left_of_decimal
right_of_decimal = max ( decimal , 0 )
format = "{:." + text_type ( right_of_decimal ) + "f}"
return format . format ( _round ( value , decimal ) )
|
def raise_for_status ( self ) :
'''Raise Postmark - specific HTTP errors . If there isn ' t one , the
standard HTTP error is raised .
HTTP 401 raises : class : ` UnauthorizedError `
HTTP 422 raises : class : ` UnprocessableEntityError `
HTTP 500 raises : class : ` InternalServerError `'''
|
if self . status_code == 401 :
raise UnauthorizedError ( self . _requests_response )
elif self . status_code == 422 :
raise UnprocessableEntityError ( self . _requests_response )
elif self . status_code == 500 :
raise InternalServerError ( self . _requests_response )
return self . _requests_response . raise_for_status ( )
|
def parse_content_type ( header ) :
"""Parse the " Content - Type " header ."""
|
typ = subtyp = None ;
options = { }
typ , pos = expect_re ( re_token , header , 0 )
_ , pos = expect_lit ( '/' , header , pos )
subtyp , pos = expect_re ( re_token , header , pos )
ctype = header [ : pos ] if subtyp else ''
while pos < len ( header ) :
_ , pos = accept_ws ( header , pos )
_ , pos = expect_lit ( ';' , header , pos )
_ , pos = accept_ws ( header , pos )
name , pos = expect_re ( re_token , header , pos )
_ , pos = expect_lit ( '=' , header , pos )
char = lookahead ( header , pos )
if char == '"' :
value , pos = expect_re ( re_qstring , header , pos )
value = re_qpair . sub ( '\\1' , value )
elif char :
value , pos = expect_re ( re_token , header , pos )
if name and value is not None :
options [ name ] = value
return ctype , options
|
def update_campaign_metadata_list ( self , campaign_id , ** kwargs ) : # noqa : E501
"""List all campaign device metadata # noqa : E501
Get campaign device metadata . # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass asynchronous = True
> > > thread = api . update _ campaign _ metadata _ list ( campaign _ id , asynchronous = True )
> > > result = thread . get ( )
: param asynchronous bool
: param str campaign _ id : The update campaign ID ( required )
: param int limit : How many objects to retrieve in the page
: param str order : ASC or DESC
: param str after : The ID of the the item after which to retrieve the next page
: param str include : A comma - separated list of data fields to return . Currently supported : total _ count
: return : CampaignDeviceMetadataPage
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'asynchronous' ) :
return self . update_campaign_metadata_list_with_http_info ( campaign_id , ** kwargs )
# noqa : E501
else :
( data ) = self . update_campaign_metadata_list_with_http_info ( campaign_id , ** kwargs )
# noqa : E501
return data
|
def weight_variable ( shape ) :
"""weight _ variable generates a weight variable of a given shape ."""
|
initial = tf . truncated_normal ( shape , stddev = 0.1 )
return tf . Variable ( initial )
|
def contribute_to_class ( self , cls , name ) :
"""Swap out any reference to ` ` KeywordsField ` ` with the
` ` KEYWORDS _ FIELD _ string ` ` field in ` ` search _ fields ` ` ."""
|
super ( KeywordsField , self ) . contribute_to_class ( cls , name )
string_field_name = list ( self . fields . keys ( ) ) [ 0 ] % self . related_field_name
if hasattr ( cls , "search_fields" ) and name in cls . search_fields :
try :
weight = cls . search_fields [ name ]
except TypeError : # search _ fields is a sequence .
index = cls . search_fields . index ( name )
search_fields_type = type ( cls . search_fields )
cls . search_fields = list ( cls . search_fields )
cls . search_fields [ index ] = string_field_name
cls . search_fields = search_fields_type ( cls . search_fields )
else :
del cls . search_fields [ name ]
cls . search_fields [ string_field_name ] = weight
|
import math
def calculate_geometric_series_sum ( first_term : int , series_len : int , ratio : int ) -> int :
"""Function to calculate the sum of a geometric progression series .
Args :
first _ term ( int ) : The first term in the geometric progression series .
series _ len ( int ) : The number of terms in the series .
ratio ( int ) : The common ratio between terms in the series .
Returns :
int : Sum of the geometric progression series .
Examples :
> > > calculate _ geometric _ series _ sum ( 1 , 5 , 2)
31
> > > calculate _ geometric _ series _ sum ( 1 , 5 , 4)
341
> > > calculate _ geometric _ series _ sum ( 2 , 6 , 3)
728"""
|
sum_series = ( first_term * ( 1 - math . pow ( ratio , series_len ) ) ) / ( 1 - ratio )
return sum_series
|
def firsthash ( frame , removedupes = False ) :
'''Hashes the first time step . Only will work as long as
the hash can fit in a uint64.
Parameters :
frame : first frame .
Keywords :
removedups : specify duplicates for the given frame .
Returns a dictionary of everything needed
to generate hashes from the genhash function .'''
|
# hashes must have i8 available
# overwise , we ' ll have overflow
def avgdiff ( d ) :
d = np . sort ( d ) ;
d = d [ 1 : ] - d [ : - 1 ]
ret = np . average ( d [ np . nonzero ( d ) ] ) ;
if np . isnan ( ret ) :
return 1.0 ;
return ret ;
def hasextent ( l , eps = 1e-10 ) : # will I one day make pic sims on the pm scale ? ?
dim = frame [ 'data' ] [ l ] ;
return np . abs ( dim . max ( ) - dim . min ( ) ) > eps ;
fields = list ( frame [ 'data' ] . dtype . names ) ;
dims = [ i for i in [ 'xi' , 'yi' , 'zi' ] if i in fields and hasextent ( i ) ] ;
ip = np . array ( [ frame [ 'data' ] [ l ] for l in dims ] ) . T ;
avgdiffs = np . array ( [ avgdiff ( a ) for a in ip . T ] ) ;
mins = ip . min ( axis = 0 ) ;
ips = ( ( ( ip - mins ) / avgdiffs ) . round ( ) . astype ( 'uint64' ) )
pws = np . floor ( np . log10 ( ips . max ( axis = 0 ) ) ) . astype ( 'uint64' ) + 1
pws = list ( pws ) ;
pw = [ 0 ] + [ ipw + jpw for ipw , jpw in zip ( [ 0 ] + pws [ : - 1 ] , pws [ : - 1 ] ) ] ;
pw = 10 ** np . array ( pw ) ;
# . astype ( ' int64 ' ) ;
# the dictionary used for hashing
d = dict ( dims = dims , mins = mins , avgdiffs = avgdiffs , pw = pw ) ;
hashes = genhash ( frame , removedupes = False , ** d ) ;
if removedupes : # consider if the negation of this is faster for genhash
uni , counts = np . unique ( hashes , return_counts = True ) ;
d [ 'dupes' ] = uni [ counts > 1 ]
dupei = np . in1d ( hashes , d [ 'dupes' ] ) ;
hashes [ dupei ] = - 1 ;
d [ 'removedupes' ] = True ;
return hashes , d
|
def _get_validation_labels ( val_path ) :
"""Returns labels for validation .
Args :
val _ path : path to TAR file containing validation images . It is used to
retrieve the name of pictures and associate them to labels .
Returns :
dict , mapping from image name ( str ) to label ( str ) ."""
|
labels_path = tfds . core . get_tfds_path ( _VALIDATION_LABELS_FNAME )
with tf . io . gfile . GFile ( labels_path ) as labels_f :
labels = labels_f . read ( ) . strip ( ) . split ( '\n' )
with tf . io . gfile . GFile ( val_path , 'rb' ) as tar_f_obj :
tar = tarfile . open ( mode = 'r:' , fileobj = tar_f_obj )
images = sorted ( tar . getnames ( ) )
return dict ( zip ( images , labels ) )
|
def type_with_ranges ( self , tchain , p_elem , rangekw , gen_data ) :
"""Handle types with ' range ' or ' length ' restrictions .
` tchain ` is the chain of type definitions from which the
ranges may need to be extracted . ` rangekw ` is the statement
keyword determining the range type ( either ' range ' or
' length ' ) . ` gen _ data ` is a function that generates the
output schema node ( a RELAX NG < data > pattern ) ."""
|
ranges = self . get_ranges ( tchain , rangekw )
if not ranges :
return p_elem . subnode ( gen_data ( ) )
if len ( ranges ) > 1 :
p_elem = SchemaNode . choice ( p_elem )
p_elem . occur = 2
for r in ranges :
d_elem = gen_data ( )
for p in self . range_params ( r , rangekw ) :
d_elem . subnode ( p )
p_elem . subnode ( d_elem )
|
def _on_stream_disconnect ( self , stream ) :
"""Respond to disconnection of a local stream by propagating DEL _ ROUTE for
any contexts we know were attached to it ."""
|
# During a stream crash it is possible for disconnect signal to fire
# twice , in which case ignore the second instance .
routes = self . _routes_by_stream . pop ( stream , None )
if routes is None :
return
LOG . debug ( '%r: %r is gone; propagating DEL_ROUTE for %r' , self , stream , routes )
for target_id in routes :
self . router . del_route ( target_id )
self . _propagate_up ( mitogen . core . DEL_ROUTE , target_id )
self . _propagate_down ( mitogen . core . DEL_ROUTE , target_id )
context = self . router . context_by_id ( target_id , create = False )
if context :
mitogen . core . fire ( context , 'disconnect' )
|
def add_log_fields ( self , fields : Dict [ str , Any ] ) :
"""Add the provided log fields
If a key is already present , then it is ignored .
: param fields : the log fields to add"""
|
self . _log_fields . add_fields ( fields )
|
def execute ( self ) :
"""Main method to call to run the worker"""
|
self . prepare_models ( )
self . prepare_worker ( )
if self . options . print_options :
self . print_options ( )
self . run ( )
|
def get_filename_from_url ( url ) :
"""Get a filename from a URL .
> > > from planet . api import utils
> > > urls = [
. . . ' https : / / planet . com / ' ,
. . . ' https : / / planet . com / path / to / ' ,
. . . ' https : / / planet . com / path / to / example . tif ' ,
. . . ' https : / / planet . com / path / to / example . tif ? foo = f6f1 & bar = baz ' ,
. . . ' https : / / planet . com / path / to / example . tif ? foo = f6f1 & bar = baz # quux '
> > > for url in urls :
. . . print ( ' { } - > { } ' . format ( url , utils . get _ filename _ from _ url ( url ) ) )
https : / / planet . com / - > None
https : / / planet . com / path / to / - > None
https : / / planet . com / path / to / example . tif - > example . tif
https : / / planet . com / path / to / example . tif ? foo = f6f1 & bar = baz - > example . tif
https : / / planet . com / path / to / example . tif ? foo = f6f1 & bar = baz # quux - > example . tif
: returns : a filename ( i . e . ` ` basename ` ` )
: rtype : str or None"""
|
path = urlparse ( url ) . path
name = path [ path . rfind ( '/' ) + 1 : ]
return name or None
|
def maskname ( mask ) :
"""Returns the event name associated to mask . IN _ ISDIR is appended to
the result when appropriate . Note : only one event is returned , because
only one event can be raised at a given time .
@ param mask : mask .
@ type mask : int
@ return : event name .
@ rtype : str"""
|
ms = mask
name = '%s'
if mask & IN_ISDIR :
ms = mask - IN_ISDIR
name = '%s|IN_ISDIR'
return name % EventsCodes . ALL_VALUES [ ms ]
|
def read2dict ( cls , filename , info ) :
"""Read the control parameters from the given path ( and its
auxiliary paths , where appropriate ) and store them in the given
| dict | object ` info ` .
Note that the | dict | ` info ` can be used to feed information
into the execution of control files . Use this method only if you
are completely sure on how the control parameter import of HydPy
works . Otherwise , you should most probably prefer to use
| ControlManager . load _ file | ."""
|
if not filename . endswith ( '.py' ) :
filename += '.py'
path = os . path . join ( cls . _workingpath , filename )
try :
if path not in cls . _registry :
with open ( path ) as file_ :
cls . _registry [ path ] = file_ . read ( )
exec ( cls . _registry [ path ] , { } , info )
except BaseException :
objecttools . augment_excmessage ( 'While trying to load the control file `%s`' % path )
if 'model' not in info :
raise IOError ( 'Model parameters cannot be loaded from control file `%s`. ' 'Please refer to the HydPy documentation on how to prepare ' 'control files properly.' % path )
|
def resize_image_folder ( bucket , key_prefix , pil_size ) :
"""This function resizes all the images in a folder"""
|
con = boto . connect_s3 ( )
b = con . get_bucket ( bucket )
for key in b . list ( key_prefix ) :
key = b . get_key ( key . name )
if 'image' not in key . content_type :
continue
size = key . get_metadata ( 'size' )
if size == str ( pil_size ) :
continue
with tempfile . TemporaryFile ( ) as big , tempfile . TemporaryFile ( ) as small : # download file and resize
key . get_contents_to_file ( big )
big . flush ( )
big . seek ( 0 )
img = Image . open ( big )
img . thumbnail ( pil_size , Image . ANTIALIAS )
img . save ( small , img . format )
small . flush ( )
small . seek ( 0 )
key . set_metadata ( 'size' , str ( pil_size ) )
key . set_contents_from_file ( small , headers = { 'Content-Type' : key . content_type } )
|
def dist_docs ( ) :
"create a documentation bundle"
|
dist_dir = path ( "dist" )
docs_package = path ( "%s/%s-%s-docs.zip" % ( dist_dir . abspath ( ) , options . setup . name , options . setup . version ) )
dist_dir . exists ( ) or dist_dir . makedirs ( )
docs_package . exists ( ) and docs_package . remove ( )
sh ( r'cd build/apidocs && zip -qr9 %s .' % ( docs_package , ) )
print ( '' )
print ( "Upload @ http://pypi.python.org/pypi?:action=pkg_edit&name=%s" % ( options . setup . name , ) )
print ( docs_package )
|
def offset ( self , offset ) :
"""Apply an OFFSET to the query and return the newly resulting Query ."""
|
query = self . _copy ( )
query . _offset = offset
return query
|
def read_namespaced_resource_quota_status ( self , name , namespace , ** kwargs ) : # noqa : E501
"""read _ namespaced _ resource _ quota _ status # noqa : E501
read status of the specified ResourceQuota # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . read _ namespaced _ resource _ quota _ status ( name , namespace , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the ResourceQuota ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: return : V1ResourceQuota
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . read_namespaced_resource_quota_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
else :
( data ) = self . read_namespaced_resource_quota_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
return data
|
def getproxies_environment ( ) :
"""Return a dictionary of scheme - > proxy server URL mappings .
Scan the environment for variables named < scheme > _ proxy ;
this seems to be the standard convention . If you need a
different way , you can pass a proxies dictionary to the
[ Fancy ] URLopener constructor ."""
|
proxies = { }
for name , value in os . environ . items ( ) :
name = name . lower ( )
if value and name [ - 6 : ] == '_proxy' :
proxies [ name [ : - 6 ] ] = value
return proxies
|
def _node_add_with_peer_list ( self , child_self , child_other ) :
'''_ node _ add _ with _ peer _ list
Low - level api : Apply delta child _ other to child _ self when child _ self is
the peer of child _ other . Element child _ self and child _ other are list
nodes . Element child _ self will be modified during the process . RFC6020
section 7.8.6 is a reference of this method .
Parameters
child _ self : ` Element `
A child of a config node in a config tree .
child _ other : ` Element `
A child of a config node in another config tree . child _ self is
the peer of child _ other .
Returns
None
There is no return of this method .'''
|
parent_self = child_self . getparent ( )
s_node = self . device . get_schema_node ( child_self )
if child_other . get ( operation_tag ) != 'delete' and child_other . get ( operation_tag ) != 'remove' and s_node . get ( 'ordered-by' ) == 'user' and child_other . get ( insert_tag ) is not None :
if child_other . get ( insert_tag ) == 'first' :
scope = parent_self . getchildren ( )
siblings = self . _get_sequence ( scope , child_other . tag , parent_self )
if siblings [ 0 ] != child_self :
siblings [ 0 ] . addprevious ( child_self )
elif child_other . get ( insert_tag ) == 'last' :
scope = parent_self . getchildren ( )
siblings = self . _get_sequence ( scope , child_other . tag , parent_self )
if siblings [ - 1 ] != child_self :
siblings [ - 1 ] . addnext ( child_self )
elif child_other . get ( insert_tag ) == 'before' :
if child_other . get ( key_tag ) is None :
_inserterror ( 'before' , self . device . get_xpath ( child_other ) , 'key' )
sibling = parent_self . find ( child_other . tag + child_other . get ( key_tag ) , namespaces = child_other . nsmap )
if sibling is None :
path = self . device . get_xpath ( child_other )
key = child_other . get ( key_tag )
_inserterror ( 'before' , path , 'key' , key )
if sibling != child_self :
sibling . addprevious ( child_self )
elif child_other . get ( insert_tag ) == 'after' :
if child_other . get ( key_tag ) is None :
_inserterror ( 'after' , self . device . get_xpath ( child_other ) , 'key' )
sibling = parent_self . find ( child_other . tag + child_other . get ( key_tag ) , namespaces = child_other . nsmap )
if sibling is None :
path = self . device . get_xpath ( child_other )
key = child_other . get ( key_tag )
_inserterror ( 'after' , path , 'key' , key )
if sibling != child_self :
sibling . addnext ( child_self )
if child_other . get ( operation_tag ) is None or child_other . get ( operation_tag ) == 'merge' :
self . node_add ( child_self , child_other )
elif child_other . get ( operation_tag ) == 'replace' :
e = deepcopy ( child_other )
parent_self . replace ( child_self , self . _del_attrib ( e ) )
elif child_other . get ( operation_tag ) == 'create' :
raise ConfigDeltaError ( 'data-exists: try to create node {} but ' 'it already exists' . format ( self . device . get_xpath ( child_other ) ) )
elif child_other . get ( operation_tag ) == 'delete' or child_other . get ( operation_tag ) == 'remove' :
parent_self . remove ( child_self )
else :
raise ConfigDeltaError ( "unknown operation: node {} contains " "operation '{}'" . format ( self . device . get_xpath ( child_other ) , child_other . get ( operation_tag ) ) )
|
def clear_cache ( self ) :
"""Clear all kinds of internal caches to release resources .
Currently persistent commands will be interrupted .
: return : self"""
|
for cmd in ( self . cat_file_all , self . cat_file_header ) :
if cmd :
cmd . __del__ ( )
self . cat_file_all = None
self . cat_file_header = None
return self
|
def set_render_manager ( self , agent : BaseAgent ) :
"""Sets the render manager for the agent .
: param agent : An instance of an agent ."""
|
rendering_manager = self . game_interface . renderer . get_rendering_manager ( self . index , self . team )
agent . _set_renderer ( rendering_manager )
|
def lss ( inlist ) :
"""Squares each value in the passed list , adds up these squares and
returns the result .
Usage : lss ( inlist )"""
|
ss = 0
for item in inlist :
ss = ss + item * item
return ss
|
def flatten ( sequence , levels = 1 ) :
"""Example :
> > > nested = [ [ 1,2 ] , [ [ 3 ] ] ]
> > > list ( flatten ( nested ) )
[1 , 2 , [ 3 ] ]"""
|
if levels == 0 :
for x in sequence :
yield x
else :
for x in sequence :
for y in flatten ( x , levels - 1 ) :
yield y
|
def _piped_realign_gatk ( data , region , cl , out_base_file , tmp_dir , prep_params ) :
"""Perform realignment with GATK , using input commandline .
GATK requires writing to disk and indexing before realignment ."""
|
broad_runner = broad . runner_from_config ( data [ "config" ] )
pa_bam = "%s-prealign%s" % os . path . splitext ( out_base_file )
if not utils . file_exists ( pa_bam ) :
with file_transaction ( data , pa_bam ) as tx_out_file :
cmd = "{cl} -o {tx_out_file}" . format ( ** locals ( ) )
do . run ( cmd , "GATK re-alignment {0}" . format ( region ) , data )
bam . index ( pa_bam , data [ "config" ] )
realn_file = realign . gatk_realigner_targets ( broad_runner , pa_bam , dd . get_ref_file ( data ) , data [ "config" ] , region = region_to_gatk ( region ) , known_vrns = dd . get_variation_resources ( data ) )
realn_cl = realign . gatk_indel_realignment_cl ( broad_runner , pa_bam , dd . get_ref_file ( data ) , realn_file , tmp_dir , region = region_to_gatk ( region ) , known_vrns = dd . get_variation_resources ( data ) )
return pa_bam , realn_cl
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.