signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def API_GET ( self , courseid , taskid , submissionid ) : # pylint : disable = arguments - differ
"""List all the submissions that the connected user made . Returns list of the form
" id " : " submission _ id1 " ,
" submitted _ on " : " date " ,
" status " : " done " , # can be " done " , " waiting " , " error " ( execution status of the task ) .
" grade " : 0.0,
" input " : { } , # the input data . File are base64 encoded .
" result " : " success " # only if status = done . Result of the execution .
" feedback " : " " # only if status = done . the HTML global feedback for the task
" problems _ feedback " : # only if status = done . HTML feedback per problem . Some pid may be absent .
" pid1 " : " feedback1 " ,
If you use the endpoint / api / v0 / courses / the _ course _ id / tasks / the _ task _ id / submissions / submissionid ,
this dict will contain one entry or the page will return 404 Not Found ."""
|
with_input = "input" in web . input ( )
return _get_submissions ( self . course_factory , self . submission_manager , self . user_manager , self . app . _translations , courseid , taskid , with_input , submissionid )
|
def spec_sum ( ph2 ) :
"""Compute total spectral sum of the real spectral quantity ` ` ph ^ 2 ` ` .
Parameters
model : pyqg . Model instance
The model object from which ` ph ` originates
ph2 : real array
The field on which to compute the sum
Returns
var _ dens : float
The sum of ` ph2 `"""
|
ph2 = 2. * ph2
ph2 [ ... , 0 ] = ph2 [ ... , 0 ] / 2.
ph2 [ ... , - 1 ] = ph2 [ ... , - 1 ] / 2.
return ph2 . sum ( axis = ( - 1 , - 2 ) )
|
def configfile ( f ) :
"""This decorator will parse a configuration file in YAML format
and store the dictionary in ` ` ctx . blockchain . config ` `"""
|
@ click . pass_context
def new_func ( ctx , * args , ** kwargs ) :
ctx . config = yaml . load ( open ( ctx . obj [ "configfile" ] ) )
return ctx . invoke ( f , * args , ** kwargs )
return update_wrapper ( new_func , f )
|
def request_will_echo ( self ) :
"""Tell the DE that we would like to echo their text . See RFC 857."""
|
self . _iac_will ( ECHO )
self . _note_reply_pending ( ECHO , True )
self . telnet_echo = True
|
def _create_overrides_from_config ( config ) :
"""Creates a two - level dictionary of d [ section ] [ option ] from a config parser object ."""
|
d = { }
for s in config . sections ( ) :
d [ s ] = { }
for opt in config . options ( s ) :
d [ s ] [ opt ] = config . get ( s , opt )
return d
|
def materialize_as_ndarray ( a ) :
"""Convert distributed arrays to ndarrays ."""
|
if type ( a ) in ( list , tuple ) :
if da is not None and any ( isinstance ( arr , da . Array ) for arr in a ) :
return da . compute ( * a , sync = True )
return tuple ( np . asarray ( arr ) for arr in a )
return np . asarray ( a )
|
def to_excel ( self , * args ) :
"""Dump all the data to excel , fname and path can be passed as args"""
|
path = os . getcwd ( )
fname = self . fname . replace ( ".tpl" , "_tpl" ) + ".xlsx"
idxs = self . filter_trends ( "" )
for idx in idxs :
self . extract ( idx )
data_df = pd . DataFrame ( self . data )
data_df . columns = self . label . values ( )
data_df . insert ( 0 , "Time [s]" , self . time )
if len ( args ) > 0 and args [ 0 ] != "" :
path = args [ 0 ]
if os . path . exists ( path ) == False :
os . mkdir ( path )
data_df . to_excel ( path + os . sep + fname )
else :
data_df . to_excel ( self . path + os . sep + fname )
|
def get_parties ( self , obj ) :
"""All parties ."""
|
return PartySerializer ( Party . objects . all ( ) , many = True ) . data
|
def system_info ( url , auth , verify_ssl ) :
"""Retrieve SDC system information .
Args :
url ( str ) : the host url .
auth ( tuple ) : a tuple of username , and password ."""
|
sysinfo_response = requests . get ( url + '/info' , headers = X_REQ_BY , auth = auth , verify = verify_ssl )
sysinfo_response . raise_for_status ( )
return sysinfo_response . json ( )
|
def get_reports ( self , ** params ) :
"""https : / / developers . coinbase . com / api / v2 # list - all - reports"""
|
response = self . _get ( 'v2' , 'reports' , data = params )
return self . _make_api_object ( response , Report )
|
def get_default_client ( path = None , ui = None , ** kwargs ) :
"""Get a client for a connected Trezor device .
Returns a TrezorClient instance with minimum fuss .
If no path is specified , finds first connected Trezor . Otherwise performs
a prefix - search for the specified device . If no UI is supplied , instantiates
the default CLI UI ."""
|
from . transport import get_transport
from . ui import ClickUI
transport = get_transport ( path , prefix_search = True )
if ui is None :
ui = ClickUI ( )
return TrezorClient ( transport , ui , ** kwargs )
|
def typelogged_module ( md ) :
"""Works like typelogged , but is only applicable to modules by explicit call ) .
md must be a module or a module name contained in sys . modules ."""
|
if not pytypes . typelogging_enabled :
return md
if isinstance ( md , str ) :
if md in sys . modules :
md = sys . modules [ md ]
if md is None :
return md
elif md in pytypes . typechecker . _pending_modules : # if import is pending , we just store this call for later
pytypes . typechecker . _pending_modules [ md ] . append ( typelogged_module )
return md
assert ( ismodule ( md ) )
if md . __name__ in pytypes . typechecker . _pending_modules : # if import is pending , we just store this call for later
pytypes . typechecker . _pending_modules [ md . __name__ ] . append ( typelogged_module )
# we already process the module now as far as possible for its internal use
# todo : Issue warning here that not the whole module might be covered yet
assert ( ismodule ( md ) )
if md . __name__ in _fully_typelogged_modules and _fully_typelogged_modules [ md . __name__ ] == len ( md . __dict__ ) :
return md
# To play it safe we avoid to modify the dict while iterating over it ,
# so we previously cache keys .
# For this we don ' t use keys ( ) because of Python 3.
# Todo : Better use inspect . getmembers here
keys = [ key for key in md . __dict__ ]
for key in keys :
memb = md . __dict__ [ key ]
if _check_as_func ( memb ) and memb . __module__ == md . __name__ :
setattr ( md , key , typelogged_func ( memb ) )
elif isclass ( memb ) and memb . __module__ == md . __name__ :
typelogged_class ( memb )
if not md . __name__ in pytypes . typechecker . _pending_modules :
_fully_typelogged_modules [ md . __name__ ] = len ( md . __dict__ )
return md
|
def _write_rigid_information ( xml_file , rigid_bodies ) :
"""Write rigid body information .
Parameters
xml _ file : file object
The file object of the hoomdxml file being written
rigid _ bodies : list , len = n _ particles
The rigid body that each particle belongs to ( - 1 for none )"""
|
if not all ( body is None for body in rigid_bodies ) :
xml_file . write ( '<body>\n' )
for body in rigid_bodies :
if body is None :
body = - 1
xml_file . write ( '{}\n' . format ( int ( body ) ) )
xml_file . write ( '</body>\n' )
|
def write_diversity_metrics ( data , sample_ids , fp = None ) :
"""Given a dictionary of diversity calculations ( keyed by method )
write out the data to a file ."""
|
if fp is None :
fp = "./diversity_data.txt"
with open ( fp , "w" ) as outf :
out = csv . writer ( outf , delimiter = "\t" )
out . writerow ( [ "SampleID" , "Group" , "Calculation" ] )
for group , d in data . iteritems ( ) :
for sid , value in d . iteritems ( ) :
out . writerow ( [ sid , group , value ] )
|
def format_options ( self , ctx , formatter ) :
"""Writes SCL related options into the formatter as a separate
group ."""
|
super ( Pyp2rpmCommand , self ) . format_options ( ctx , formatter )
scl_opts = [ ]
for param in self . get_params ( ctx ) :
if isinstance ( param , SclizeOption ) :
scl_opts . append ( param . get_scl_help_record ( ctx ) )
if scl_opts :
with formatter . section ( 'SCL related options' ) :
formatter . write_dl ( scl_opts )
|
def on_site ( self , site_id = None ) :
"""Return a : class : ` QuerySet ` of pages that are published on the site
defined by the ` ` SITE _ ID ` ` setting .
: param site _ id : specify the id of the site object to filter with ."""
|
if settings . PAGE_USE_SITE_ID :
if not site_id :
site_id = settings . SITE_ID
return self . filter ( sites = site_id )
return self . all ( )
|
def get_user_token ( ) :
"""Return the authenticated user ' s auth token"""
|
if not hasattr ( stack . top , 'current_user' ) :
return ''
current_user = stack . top . current_user
return current_user . get ( 'token' , '' )
|
def convertTranscriptEffect ( self , annStr , hgvsG ) :
"""Takes the ANN string of a SnpEff generated VCF , splits it
and returns a populated GA4GH transcript effect object .
: param annStr : String
: param hgvsG : String
: return : effect protocol . TranscriptEffect ( )"""
|
effect = self . _createGaTranscriptEffect ( )
effect . hgvs_annotation . CopyFrom ( protocol . HGVSAnnotation ( ) )
annDict = dict ( )
if self . _annotationType == ANNOTATIONS_SNPEFF :
annDict = dict ( zip ( self . SNPEFF_FIELDS , annStr . split ( "|" ) ) )
elif self . _annotationType == ANNOTATIONS_VEP_V82 :
annDict = dict ( zip ( self . VEP_FIELDS , annStr . split ( "|" ) ) )
else :
annDict = dict ( zip ( self . CSQ_FIELDS , annStr . split ( "|" ) ) )
annDict [ "hgvs_annotation.genomic" ] = hgvsG if hgvsG else u''
for key , val in annDict . items ( ) :
try :
protocol . deepSetAttr ( effect , key , val )
except AttributeError :
if val and key not in self . EXCLUDED_FIELDS :
protocol . setAttribute ( effect . attributes . attr [ key ] . values , val )
effect . effects . extend ( self . convertSeqOntology ( annDict . get ( 'effects' ) ) )
self . addLocations ( effect , annDict . get ( 'protPos' ) , annDict . get ( 'cdnaPos' ) )
effect . id = self . getTranscriptEffectId ( effect )
return effect
|
def endpoint_present ( name , publicurl = None , internalurl = None , adminurl = None , region = None , profile = None , url = None , interface = None , ** connection_args ) :
'''Ensure the specified endpoints exists for service
name
The Service name
publicurl
The public url of service endpoint ( for V2 API )
internalurl
The internal url of service endpoint ( for V2 API )
adminurl
The admin url of the service endpoint ( for V2 API )
region
The region of the endpoint
url
The endpoint URL ( for V3 API )
interface
The interface type , which describes the visibility
of the endpoint . ( for V3 API )'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : '' }
_api_version ( profile = profile , ** connection_args )
endpoint = __salt__ [ 'keystone.endpoint_get' ] ( name , region , profile = profile , interface = interface , ** connection_args )
def _changes ( desc ) :
return ret . get ( 'comment' , '' ) + desc + '\n'
def _create_endpoint ( ) :
if _OS_IDENTITY_API_VERSION > 2 :
ret [ 'changes' ] = __salt__ [ 'keystone.endpoint_create' ] ( name , region = region , url = url , interface = interface , profile = profile , ** connection_args )
else :
ret [ 'changes' ] = __salt__ [ 'keystone.endpoint_create' ] ( name , region = region , publicurl = publicurl , adminurl = adminurl , internalurl = internalurl , profile = profile , ** connection_args )
if endpoint and 'Error' not in endpoint and endpoint . get ( 'region' ) == region :
if _OS_IDENTITY_API_VERSION > 2 :
change_url = False
change_interface = False
if endpoint . get ( 'url' , None ) != url :
ret [ 'comment' ] = _changes ( 'URL changes from "{0}" to "{1}"' . format ( endpoint . get ( 'url' , None ) , url ) )
change_url = True
if endpoint . get ( 'interface' , None ) != interface :
ret [ 'comment' ] = _changes ( 'Interface changes from "{0}" to "{1}"' . format ( endpoint . get ( 'interface' , None ) , interface ) )
change_interface = True
if __opts__ . get ( 'test' ) and ( change_url or change_interface ) :
ret [ 'result' ] = None
ret [ 'changes' ] [ 'Endpoint' ] = 'Will be updated'
ret [ 'comment' ] += 'Endpoint for service "{0}" will be updated' . format ( name )
return ret
if change_url :
ret [ 'changes' ] [ 'url' ] = url
if change_interface :
ret [ 'changes' ] [ 'interface' ] = interface
else :
change_publicurl = False
change_adminurl = False
change_internalurl = False
if endpoint . get ( 'publicurl' , None ) != publicurl :
change_publicurl = True
ret [ 'comment' ] = _changes ( 'Public URL changes from "{0}" to "{1}"' . format ( endpoint . get ( 'publicurl' , None ) , publicurl ) )
if endpoint . get ( 'adminurl' , None ) != adminurl :
change_adminurl = True
ret [ 'comment' ] = _changes ( 'Admin URL changes from "{0}" to "{1}"' . format ( endpoint . get ( 'adminurl' , None ) , adminurl ) )
if endpoint . get ( 'internalurl' , None ) != internalurl :
change_internalurl = True
ret [ 'comment' ] = _changes ( 'Internal URL changes from "{0}" to "{1}"' . format ( endpoint . get ( 'internalurl' , None ) , internalurl ) )
if __opts__ . get ( 'test' ) and ( change_publicurl or change_adminurl or change_internalurl ) :
ret [ 'result' ] = None
ret [ 'comment' ] += 'Endpoint for service "{0}" will be updated' . format ( name )
ret [ 'changes' ] [ 'Endpoint' ] = 'Will be updated'
return ret
if change_publicurl :
ret [ 'changes' ] [ 'publicurl' ] = publicurl
if change_adminurl :
ret [ 'changes' ] [ 'adminurl' ] = adminurl
if change_internalurl :
ret [ 'changes' ] [ 'internalurl' ] = internalurl
if ret [ 'comment' ] : # changed
__salt__ [ 'keystone.endpoint_delete' ] ( name , region , profile = profile , interface = interface , ** connection_args )
_create_endpoint ( )
ret [ 'comment' ] += 'Endpoint for service "{0}" has been updated' . format ( name )
else : # Add new endpoint
if __opts__ . get ( 'test' ) :
ret [ 'result' ] = None
ret [ 'changes' ] [ 'Endpoint' ] = 'Will be created'
ret [ 'comment' ] = 'Endpoint for service "{0}" will be added' . format ( name )
return ret
_create_endpoint ( )
ret [ 'comment' ] = 'Endpoint for service "{0}" has been added' . format ( name )
if ret [ 'comment' ] == '' : # = > no changes
ret [ 'comment' ] = 'Endpoint for service "{0}" already exists' . format ( name )
return ret
|
def getRelativePath ( basepath , path ) :
"""Get a path that is relative to the given base path ."""
|
basepath = splitpath ( os . path . abspath ( basepath ) )
path = splitpath ( os . path . abspath ( path ) )
afterCommon = False
for c in basepath :
if afterCommon or path [ 0 ] != c :
path . insert ( 0 , os . path . pardir )
afterCommon = True
else :
del path [ 0 ]
return os . path . join ( * path )
|
def cut_video_stream ( stream , start , end , fmt ) :
"""cut video stream from ` start ` to ` end ` time
Parameters
stream : bytes
video file content
start : float
start time
end : float
end time
Returns
result : bytes
content of cut video"""
|
with TemporaryDirectory ( ) as tmp :
in_file = Path ( tmp ) / f"in{fmt}"
out_file = Path ( tmp ) / f"out{fmt}"
in_file . write_bytes ( stream )
try :
ret = subprocess . run ( [ "ffmpeg" , "-ss" , f"{start}" , "-i" , f"{in_file}" , "-to" , f"{end}" , "-c" , "copy" , f"{out_file}" , ] , capture_output = True , )
except FileNotFoundError :
result = stream
else :
if ret . returncode :
result = stream
else :
result = out_file . read_bytes ( )
return result
|
def generate_module ( spec , out ) :
"""Given an AMQP spec parsed into an xml . etree . ElemenTree ,
and a file - like ' out ' object to write to , generate
the skeleton of a Python module ."""
|
# HACK THE SPEC so that ' access ' is handled by ' channel ' instead of ' connection '
for amqp_class in spec . findall ( 'class' ) :
if amqp_class . attrib [ 'name' ] == 'access' :
amqp_class . attrib [ 'handler' ] = 'channel'
# Build up some helper dictionaries
for domain in spec . findall ( 'domain' ) :
domains [ domain . attrib [ 'name' ] ] = domain . attrib [ 'type' ]
for amqp_class in spec . findall ( 'class' ) :
for amqp_method in amqp_class . findall ( 'method' ) :
method_name_map [ ( amqp_class . attrib [ 'name' ] , amqp_method . attrib [ 'name' ] ) ] = ( amqp_class . attrib [ 'index' ] , amqp_method . attrib [ 'index' ] , amqp_class . attrib [ 'handler' ] . capitalize ( ) + '.' + _fixup_method_name ( amqp_class , amqp_method ) , )
# # # # Actually generate output
for amqp_class in spec . findall ( 'class' ) :
if amqp_class . attrib [ 'handler' ] == amqp_class . attrib [ 'name' ] :
generate_class ( spec , amqp_class , out )
out . write ( '_METHOD_MAP = {\n' )
for amqp_class in spec . findall ( 'class' ) :
print amqp_class . attrib
# for chassis in amqp _ class . findall ( ' chassis ' ) :
# print ' ' , chassis . attrib
for amqp_method in amqp_class . findall ( 'method' ) : # print ' ' , amqp _ method . attrib
# for chassis in amqp _ method . findall ( ' chassis ' ) :
# print ' ' , chassis . attrib
chassis = [ x . attrib [ 'name' ] for x in amqp_method . findall ( 'chassis' ) ]
if 'client' in chassis :
out . write ( " (%s, %s): (%s, %s._%s),\n" % ( amqp_class . attrib [ 'index' ] , amqp_method . attrib [ 'index' ] , amqp_class . attrib [ 'handler' ] . capitalize ( ) , amqp_class . attrib [ 'handler' ] . capitalize ( ) , _fixup_method_name ( amqp_class , amqp_method ) ) )
out . write ( '}\n\n' )
out . write ( '_METHOD_NAME_MAP = {\n' )
for amqp_class in spec . findall ( 'class' ) :
for amqp_method in amqp_class . findall ( 'method' ) :
out . write ( " (%s, %s): '%s.%s',\n" % ( amqp_class . attrib [ 'index' ] , amqp_method . attrib [ 'index' ] , amqp_class . attrib [ 'handler' ] . capitalize ( ) , _fixup_method_name ( amqp_class , amqp_method ) ) )
out . write ( '}\n' )
|
def Insert ( self , key , value , row_index ) :
"""Inserts new values at a specified offset .
Args :
key : string for header value .
value : string for a data value .
row _ index : Offset into row for data .
Raises :
IndexError : If the offset is out of bands ."""
|
if row_index < 0 :
row_index += len ( self )
if not 0 <= row_index < len ( self ) :
raise IndexError ( 'Index "%s" is out of bounds.' % row_index )
new_row = Row ( )
for idx in self . header :
if self . index ( idx ) == row_index :
new_row [ key ] = value
new_row [ idx ] = self [ idx ]
self . _keys = new_row . header
self . _values = new_row . values
del new_row
self . _BuildIndex ( )
|
def write_dat ( self ) :
"""Write ` ` system . Varout . vars ` ` to a ` ` . dat ` ` file
: return :"""
|
logger . warn ( 'This function is deprecated and replaced by `write_np_dat`.' )
ret = False
system = self . system
# compute the total number of columns , excluding time
if not system . Recorder . n :
n_vars = system . dae . m + system . dae . n
# post - computed power flows include :
# bus - ( Pi , Qi )
# line - ( Pij , Pji , Qij , Qji , Iij _ Real , Iij _ Imag , Iji _ real , Iji _ Imag )
if system . tds . config . compute_flows :
n_vars += 2 * system . Bus . n + 8 * system . Line . n + 2 * system . Area . n_combination
idx = list ( range ( n_vars ) )
else :
n_vars = len ( system . Recorder . varout_idx )
idx = system . Recorder . varout_idx
template = [ '{:<8g}' ] + [ '{:0.10f}' ] * n_vars
template = ' ' . join ( template )
# format the output in a string
out = ''
for t , line in zip ( self . t , self . vars ) :
values = [ t ] + list ( line [ idx ] )
out += template . format ( * values ) + '\n'
try :
os . makedirs ( os . path . abspath ( os . path . dirname ( system . files . dat ) ) , exist_ok = True )
with open ( system . files . dat , self . _mode ) as f :
f . write ( out )
ret = True
except IOError :
logger . error ( 'I/O Error while writing the dat file.' )
return ret
|
def markdown_toclify ( input_file , output_file = None , github = False , back_to_top = False , nolink = False , no_toc_header = False , spacer = 0 , placeholder = None , exclude_h = None , remove_dashes = False ) :
"""Function to add table of contents to markdown files .
Parameters
input _ file : str
Path to the markdown input file .
output _ file : str ( defaul : None )
Path to the markdown output file .
github : bool ( default : False )
Uses GitHub TOC syntax if True .
back _ to _ top : bool ( default : False )
Inserts back - to - top links below headings if True .
nolink : bool ( default : False )
Creates the table of contents without internal links if True .
no _ toc _ header : bool ( default : False )
Suppresses the Table of Contents header if True
spacer : int ( default : 0)
Inserts horizontal space ( in pixels ) after the table of contents .
placeholder : str ( default : None )
Inserts the TOC at the placeholder string instead
of inserting the TOC at the top of the document .
exclude _ h : list ( default None )
Excludes header levels , e . g . , if [ 2 , 3 ] , ignores header
levels 2 and 3 in the TOC .
remove _ dashes : bool ( default : False )
Removes dashes from headline slugs
Returns
cont : str
Markdown contents including the TOC ."""
|
raw_contents = read_lines ( input_file )
cleaned_contents = remove_lines ( raw_contents , remove = ( '[[back to top]' , '<a class="mk-toclify"' ) )
processed_contents , raw_headlines = tag_and_collect ( cleaned_contents , id_tag = not github , back_links = back_to_top , exclude_h = exclude_h , remove_dashes = remove_dashes )
leftjustified_headlines = positioning_headlines ( raw_headlines )
processed_headlines = create_toc ( leftjustified_headlines , hyperlink = not nolink , top_link = not nolink and not github , no_toc_header = no_toc_header )
if nolink :
processed_contents = cleaned_contents
cont = build_markdown ( toc_headlines = processed_headlines , body = processed_contents , spacer = spacer , placeholder = placeholder )
if output_file :
output_markdown ( cont , output_file )
return cont
|
def email_action_view ( self , id , action ) :
"""Perform action ' action ' on UserEmail object ' id '"""
|
# Retrieve UserEmail by id
user_email = self . db_manager . get_user_email_by_id ( id = id )
# Users may only change their own UserEmails
if not user_email or user_email . user_id != current_user . id :
return self . unauthorized_view ( )
# Delete UserEmail
if action == 'delete' : # Primary UserEmail can not be deleted
if user_email . is_primary :
return self . unauthorized_view ( )
# Delete UserEmail
self . db_manager . delete_object ( user_email )
self . db_manager . commit ( )
# Set UserEmail . is _ primary
elif action == 'make-primary' : # Disable previously primary emails
user_emails = self . db_manager . find_user_emails ( current_user )
for other_user_email in user_emails :
if other_user_email . is_primary :
other_user_email . is_primary = False
self . db_manager . save_object ( other_user_email )
# Enable current primary email
user_email . is_primary = True
self . db_manager . save_object ( user_email )
self . db_manager . commit ( )
# Send confirm email
elif action == 'confirm' :
self . _send_confirm_email_email ( user_email . user , user_email )
else :
return self . unauthorized_view ( )
return redirect ( url_for ( 'user.manage_emails' ) )
|
def announce ( self , discovery ) :
"""With the passed in Discovery class , attempt to announce to the host agent ."""
|
try :
url = self . __discovery_url ( )
logger . debug ( "making announce request to %s" % ( url ) )
response = None
response = self . client . put ( url , data = self . to_json ( discovery ) , headers = { "Content-Type" : "application/json" } , timeout = 0.8 )
if response . status_code is 200 :
self . last_seen = datetime . now ( )
except ( requests . ConnectTimeout , requests . ConnectionError ) :
logger . debug ( "announce" , exc_info = True )
finally :
return response
|
def get_bins ( self ) :
"""Gets the bin list resulting from the search .
return : ( osid . resource . BinList ) - the bin list
raise : IllegalState - list already retrieved
* compliance : mandatory - - This method must be implemented . *"""
|
if self . retrieved :
raise errors . IllegalState ( 'List has already been retrieved.' )
self . retrieved = True
return objects . BinList ( self . _results , runtime = self . _runtime )
|
def init_notification ( self , playingsong ) :
'''第一次桌面通知时加入图片'''
|
logger . debug ( 'init_notification' )
old_title = playingsong [ 'title' ]
self . cover_file = tempfile . NamedTemporaryFile ( suffix = '.jpg' , dir = self . _tempdir )
if not self . get_pic ( playingsong , self . cover_file . name ) :
return
title = playingsong [ 'title' ]
if old_title != title : # 已切换至下一首歌
return
self . has_cover = True
content = playingsong [ 'artist' ] + ' - ' + playingsong [ 'albumtitle' ]
send_notification ( title . decode ( 'utf-8' ) , content . decode ( 'utf-8' ) , self . cover_file . name )
|
def render ( self , ** kwargs ) :
"""Plots the curve and the control points polygon ."""
|
# Calling parent function
super ( VisCurve2D , self ) . render ( ** kwargs )
# Initialize variables
plot_data = [ ]
for plot in self . _plots :
pts = np . array ( plot [ 'ptsarr' ] , dtype = self . vconf . dtype )
# Plot control points
if plot [ 'type' ] == 'ctrlpts' and self . vconf . display_ctrlpts :
figure = graph_objs . Scatter ( x = pts [ : , 0 ] , y = pts [ : , 1 ] , name = plot [ 'name' ] , mode = 'lines+markers' , line = dict ( color = plot [ 'color' ] , width = self . vconf . line_width , dash = 'dash' ) )
plot_data . append ( figure )
# Plot evaluated points
if plot [ 'type' ] == 'evalpts' and self . vconf . display_evalpts :
figure = graph_objs . Scatter ( x = pts [ : , 0 ] , y = pts [ : , 1 ] , name = plot [ 'name' ] , mode = 'lines' , line = dict ( color = plot [ 'color' ] , width = self . vconf . line_width ) )
plot_data . append ( figure )
# Plot bounding box
if plot [ 'type' ] == 'bbox' and self . vconf . display_bbox :
figure = graph_objs . Scatter ( x = pts [ : , 0 ] , y = pts [ : , 1 ] , name = plot [ 'name' ] , line = dict ( color = plot [ 'color' ] , width = self . vconf . line_width , dash = 'dashdot' , ) )
plot_data . append ( figure )
# Plot extras
if plot [ 'type' ] == 'extras' :
figure = graph_objs . Scatter ( x = pts [ : , 0 ] , y = pts [ : , 1 ] , name = plot [ 'name' ] , mode = 'markers' , marker = dict ( color = plot [ 'color' ] [ 0 ] , size = plot [ 'color' ] [ 1 ] , line = dict ( width = self . vconf . line_width ) ) )
plot_data . append ( figure )
plot_layout = dict ( width = self . vconf . figure_size [ 0 ] , height = self . vconf . figure_size [ 1 ] , autosize = False , showlegend = self . vconf . display_legend , yaxis = dict ( scaleanchor = "x" , showgrid = self . vconf . display_axes , showline = self . vconf . display_axes , zeroline = self . vconf . display_axes , showticklabels = self . vconf . display_axes , ) , xaxis = dict ( showgrid = self . vconf . display_axes , showline = self . vconf . display_axes , zeroline = self . vconf . display_axes , showticklabels = self . vconf . display_axes , ) )
# Generate the figure
fig = graph_objs . Figure ( data = plot_data , layout = plot_layout )
# Process keyword arguments
fig_filename = kwargs . get ( 'fig_save_as' , None )
fig_display = kwargs . get ( 'display_plot' , True )
# Prepare plot configuration
plotfn_dict = { 'show_link' : False , 'filename' : self . vconf . figure_filename , 'image' : None if fig_display else self . vconf . figure_image_format , }
if self . vconf . no_ipython :
plotfn_dict_extra = { 'image_filename' : self . vconf . figure_image_filename if fig_filename is None else fig_filename , 'auto_open' : fig_display , }
# Python < 3.5 does not support starred expressions inside dicts
plotfn_dict . update ( plotfn_dict_extra )
# Display the plot
self . vconf . plotfn ( fig , ** plotfn_dict )
|
def _decode ( data ) :
"""Decode the base - 64 encoded string
: param data :
: return : decoded data"""
|
if not isinstance ( data , bytes_types ) :
data = six . b ( str ( data ) )
return base64 . b64decode ( data . decode ( "utf-8" ) )
|
def price ( self , from_ = None , ** kwargs ) :
"""Check pricing for a new outbound message .
An useful synonym for " message " command with " dummy " parameters set to true .
: Example :
message = client . messages . price ( from _ = " 447624800500 " , phones = " 999000001 " , text = " Hello ! " , lists = " 1909100 " )
: param str from : One of allowed Sender ID ( phone number or alphanumeric sender ID ) .
: param str text : Message text . Required if templateId is not set .
: param str templateId : Template used instead of message text . Required if text is not set .
: param str sendingTime : Message sending time in unix timestamp format . Default is now .
Optional ( required with rrule set ) .
: param str contacts : Contacts ids , separated by comma , message will be sent to .
: param str lists : Lists ids , separated by comma , message will be sent to .
: param str phones : Phone numbers , separated by comma , message will be sent to .
: param int cutExtra : Should sending method cut extra characters
which not fit supplied partsCount or return 400 Bad request response instead .
Default is false .
: param int partsCount : Maximum message parts count ( TextMagic allows sending 1 to 6 message parts ) .
Default is 6.
: param str referenceId : Custom message reference id which can be used in your application infrastructure .
: param str rrule : iCal RRULE parameter to create recurrent scheduled messages .
When used , sendingTime is mandatory as start point of sending .
: param int dummy : If 1 , just return message pricing . Message will not send ."""
|
if from_ :
kwargs [ "from" ] = from_
uri = "%s/%s" % ( self . uri , "price" )
response , instance = self . request ( "GET" , uri , params = kwargs )
return instance
|
def connect_callbacks ( self , callbacks_bag ) :
"""Connect callbacks specified in callbacks _ bag with callbacks
defined in the ui definition .
Return a list with the name of the callbacks not connected ."""
|
notconnected = [ ]
for wname , builderobj in self . objects . items ( ) :
missing = builderobj . connect_commands ( callbacks_bag )
if missing is not None :
notconnected . extend ( missing )
missing = builderobj . connect_bindings ( callbacks_bag )
if missing is not None :
notconnected . extend ( missing )
if notconnected :
notconnected = list ( set ( notconnected ) )
msg = 'Missing callbacks for commands: {}' . format ( notconnected )
logger . warning ( msg )
return notconnected
else :
return None
|
def get_data_pct ( self , xpct , ypct ) :
"""Calculate new data size for the given axis ratios .
See : meth : ` get _ limits ` .
Parameters
xpct , ypct : float
Ratio for X and Y , respectively , where 1 is 100 % .
Returns
x , y : int
Scaled dimensions ."""
|
xy_mn , xy_mx = self . get_limits ( )
width = abs ( xy_mx [ 0 ] - xy_mn [ 0 ] )
height = abs ( xy_mx [ 1 ] - xy_mn [ 1 ] )
x , y = int ( float ( xpct ) * width ) , int ( float ( ypct ) * height )
return ( x , y )
|
def add_metadata ( self , observation , info , available_at = None ) :
"""Extract metadata from a pixel observation and add it to the info"""
|
observation = observation [ 'vision' ]
if observation is None :
return
if self . network is not None and not self . network . active ( ) :
return
elif self . metadata_decoder is None :
return
elif observation is None :
return
# should return a dict with now / probe _ received _ at keys
with pyprofile . push ( 'vnc_env.diagnostics.DiagnosticsInstance.add_metadata.decode' ) :
metadata = self . metadata_decoder . decode ( observation , available_at = available_at )
if metadata is False : # No metadata ready , though it doesn ' t mean parsing failed
metadata = None
elif metadata is None :
if self . could_read_metadata :
self . could_read_metadata = False
extra_logger . info ( '[%s] Stopped being able to read metadata (expected when environment resets)' , self . label )
elif not self . could_read_metadata :
self . could_read_metadata = True
extra_logger . info ( '[%s] Started being able to read metadata' , self . label )
if self . metadata_decoder . flag_synchronous and metadata is not None :
info [ 'diagnostics.image_remote_time' ] = metadata [ 'now' ]
local_now = time . time ( )
if self . network is None : # Assume the clock skew is zero . Should only be run on the
# same machine as the VNC server , such as the universe
# instance inside of the environmenth containers .
real_clock_skew = self . zero_clock_skew
else : # Note : this is a 2 - length vector of ( min , max ) , so anything added to
# it is also going to be a 2 - length vector .
# Most of the diagnostics below are , but you have to look carefully .
real_clock_skew = self . network . reversed_clock_skew ( )
# Store real clock skew here
info [ 'stats.gauges.diagnostics.clock_skew' ] = real_clock_skew
if self . ignore_clock_skew :
clock_skew = self . zero_clock_skew
else :
clock_skew = real_clock_skew
if metadata is not None : # We ' ll generally update the observation timestamp infrequently
if self . last_observation_timestamp == metadata [ 'now' ] :
delta = None
else : # We just got a new timestamp in the observation !
self . last_observation_timestamp = metadata [ 'now' ]
observation_now = metadata [ 'now' ]
delta = observation_now - metadata [ 'available_at' ]
# Subtract * local * time it was received from the * remote * time
# displayed . Negate and reverse order to fix time ordering .
info [ 'stats.gauges.diagnostics.lag.observation' ] = - ( delta + clock_skew ) [ [ 1 , 0 ] ]
# if self . network is None :
# # The rest of diagnostics need the network , so we ' re done here
# return
probe_received_at = metadata [ 'probe_received_at' ]
if probe_received_at == 0 or self . disable_action_probes : # Happens when the env first starts
self . probe_received_at = None
elif self . probe_received_at is None : # this also would work for the equality case
self . probe_received_at = probe_received_at
elif self . probe_received_at != probe_received_at and self . probe_sent_at is None :
logger . info ( '[%s] Probe is marked as received at %s, but probe_sent_at is None. This is surprising. (HINT: do you have multiple universe instances talking to the same environment?)' , self . label , probe_received_at )
elif self . probe_received_at != probe_received_at :
extra_logger . debug ( '[%s] Next probe received: old=%s new=%s' , self . label , self . probe_received_at , probe_received_at )
self . probe_received_at = probe_received_at
# Subtract the * local * time we sent it from the * remote * time it was received
self . action_latency_skewed = probe_received_at - self . probe_sent_at
self . probe_sent_at = None
if self . action_latency_skewed :
action_lag = self . action_latency_skewed + clock_skew
self . action_latency_skewed = None
else :
action_lag = None
info [ 'stats.gauges.diagnostics.lag.action' ] = action_lag
local_now = time . time ( )
# Look at when the remote believed it parsed the score ( not
# all envs send this currently ) .
# Also , if we received no new rewards , then this values is
# None . This could indicate a high reward latency ( bad ,
# uncommon ) , or that the agent is calling step faster than new
# rewards are coming in ( good , common ) .
remote_score_now = info . get ( 'rewarder.lag.observation.timestamp' )
if remote_score_now is not None :
delta = remote_score_now - local_now
info [ 'stats.gauges.diagnostics.lag.reward' ] = - ( delta + clock_skew ) [ [ 1 , 0 ] ]
# Look at when the remote send the message , so we know how
# long it ' s taking for messages to get to us .
rewarder_message_now = info . get ( 'reward_buffer.remote_time' )
if rewarder_message_now :
delta = rewarder_message_now - local_now
info [ 'stats.gauges.diagnostics.lag.rewarder_message' ] = - ( delta + clock_skew ) [ [ 1 , 0 ] ]
|
def rest_post ( self , url , params = None , headers = None , auth = None , verify = True , cert = None ) :
"""Perform a PUT request to url with optional authentication"""
|
res = requests . post ( url , params = params , headers = headers , auth = auth , verify = verify , cert = cert )
return res . text , res . status_code
|
def _filter ( request , object_ , tags = None , more = False , orderby = 'created' ) :
"""Filters Piece objects from self based on filters , search , and range
: param tags : List of tag IDs to filter
: type tags : list
: param more - - bool , Returns more of the same filtered set of images based on session range
return list , Objects filtered"""
|
res = Result ( )
models = QUERY_MODELS
idDict = { }
objDict = { }
data = { }
modelmap = { }
length = 75
# - - Get all IDs for each model
for m in models :
modelmap [ m . model_class ( ) ] = m . model
if object_ :
idDict [ m . model ] = m . model_class ( ) . objects . filter ( gallery = object_ )
else :
idDict [ m . model ] = m . model_class ( ) . objects . all ( )
if idDict [ m . model ] is None :
continue
if tags :
for bucket in tags :
searchQuery = ""
o = None
for item in bucket :
if item == 0 : # - - filter by tagless
idDict [ m . model ] . annotate ( num_tags = Count ( 'tags' ) )
if not o :
o = Q ( )
o |= Q ( num_tags__lte = 1 )
break
elif isinstance ( item , six . integer_types ) : # - - filter by tag
if not o :
o = Q ( )
o |= Q ( tags__id = item )
else : # - - add to search string
searchQuery += item + ' '
if not HAYSTACK :
if not o :
o = Q ( )
# - - use a basic search
o |= Q ( title__icontains = item )
if HAYSTACK and searchQuery != "" : # - - once all tags have been filtered , filter by search
searchIDs = search ( searchQuery , m . model_class ( ) )
if searchIDs :
if not o :
o = Q ( )
o |= Q ( id__in = searchIDs )
if o : # - - apply the filters
idDict [ m . model ] = idDict [ m . model ] . annotate ( num_tags = Count ( 'tags' ) ) . filter ( o )
else :
idDict [ m . model ] = idDict [ m . model ] . none ( )
# - - Get all ids of filtered objects , this will be a very fast query
idDict [ m . model ] = list ( idDict [ m . model ] . order_by ( '-{}' . format ( orderby ) ) . values_list ( 'id' , flat = True ) )
lastid = request . session . get ( 'last_{}' . format ( m . model ) , 0 )
if not idDict [ m . model ] :
continue
if not more :
lastid = idDict [ m . model ] [ 0 ]
index = idDict [ m . model ] . index ( lastid )
if more and lastid != 0 :
index += 1
idDict [ m . model ] = idDict [ m . model ] [ index : index + length ]
# - - perform the main query to retrieve the objects we want
objDict [ m . model ] = m . model_class ( ) . objects . filter ( id__in = idDict [ m . model ] )
objDict [ m . model ] = objDict [ m . model ] . select_related ( 'author' ) . prefetch_related ( 'tags' ) . order_by ( '-{}' . format ( orderby ) )
objDict [ m . model ] = list ( objDict [ m . model ] )
# - - combine and sort all objects by date
objects = _sortObjects ( orderby , ** objDict ) if len ( models ) > 1 else objDict . values ( ) [ 0 ]
objects = objects [ : length ]
# - - Find out last ids
lastids = { }
for obj in objects :
lastids [ 'last_{}' . format ( modelmap [ obj . __class__ ] ) ] = obj . id
for key , value in lastids . items ( ) :
request . session [ key ] = value
# - - serialize objects
for i in objects :
res . append ( i . json ( ) )
data [ 'count' ] = len ( objects )
if settings . DEBUG :
data [ 'queries' ] = connection . queries
res . value = data
return JsonResponse ( res . asDict ( ) )
|
def make_filter ( ** tests ) :
"""Create a filter from keyword arguments ."""
|
tests = [ AttrTest ( k , v ) for k , v in tests . items ( ) ]
return Filter ( tests )
|
def instance_of ( klass , arg ) :
"""Require that a value has a particular Python type ."""
|
if not isinstance ( arg , klass ) :
raise com . IbisTypeError ( 'Given argument with type {} is not an instance of {}' . format ( type ( arg ) , klass ) )
return arg
|
def createOptimizer ( self , model ) :
"""Create a new instance of the optimizer"""
|
return torch . optim . SGD ( model . parameters ( ) , lr = self . lr , momentum = self . momentum , weight_decay = self . weight_decay )
|
def connection_made ( self ) :
"""Protocols connection established handler"""
|
LOG . info ( 'Connection to peer: %s established' , self . _neigh_conf . ip_address , extra = { 'resource_name' : self . _neigh_conf . name , 'resource_id' : self . _neigh_conf . id } )
|
def ida_connect ( host = 'localhost' , port = 18861 , retry = 10 ) :
"""Connect to an instance of IDA running our server . py .
: param host : The host to connect to
: param port : The port to connect to
: param retry : How many times to try after errors before giving up"""
|
for i in range ( retry ) :
try :
LOG . debug ( 'Connectint to %s:%d, try %d...' , host , port , i + 1 )
link = rpyc_classic . connect ( host , port )
link . eval ( '2 + 2' )
except socket . error :
time . sleep ( 1 )
continue
else :
LOG . debug ( 'Connected to %s:%d' , host , port )
return link
raise IDALinkError ( "Could not connect to %s:%d after %d tries" % ( host , port , retry ) )
|
def get_image_path ( definition ) :
"""Helper to get path of image from a definition in resource directory .
: param definition : A definition ( hazard , exposure ) .
: type definition : dict
: returns : The definition ' s image path .
: rtype : str"""
|
path = resources_path ( 'img' , 'wizard' , 'keyword-subcategory-%s.svg' % definition [ 'key' ] )
if os . path . exists ( path ) :
return path
else :
return not_set_image_path
|
def __add_recent_file ( self , fname ) :
"""Add to recent file list"""
|
if fname is None :
return
if fname in self . recent_files :
self . recent_files . remove ( fname )
self . recent_files . insert ( 0 , fname )
if len ( self . recent_files ) > self . get_option ( 'max_recent_files' ) :
self . recent_files . pop ( - 1 )
|
def kernel_pixelsize_change ( kernel , deltaPix_in , deltaPix_out ) :
"""change the pixel size of a given kernel
: param kernel :
: param deltaPix _ in :
: param deltaPix _ out :
: return :"""
|
numPix = len ( kernel )
numPix_new = int ( round ( numPix * deltaPix_in / deltaPix_out ) )
if numPix_new % 2 == 0 :
numPix_new -= 1
x_in = np . linspace ( - ( numPix - 1 ) / 2 * deltaPix_in , ( numPix - 1 ) / 2 * deltaPix_in , numPix )
x_out = np . linspace ( - ( numPix_new - 1 ) / 2 * deltaPix_out , ( numPix_new - 1 ) / 2 * deltaPix_out , numPix_new )
kernel_out = image_util . re_size_array ( x_in , x_in , kernel , x_out , x_out )
kernel_out = kernel_norm ( kernel_out )
return kernel_out
|
def set_defaults ( lvm_data ) :
"""dict : Sets all existing null string values to None ."""
|
for l in lvm_data :
for k , v in lvm_data [ l ] . items ( ) :
if v == '' :
lvm_data [ l ] [ k ] = None
return lvm_data
|
def sample_stats_prior_to_xarray ( self ) :
"""Extract sample _ stats _ prior from prior ."""
|
prior = self . prior
data = get_sample_stats ( prior )
return dict_to_dataset ( data , library = self . pystan , coords = self . coords , dims = self . dims )
|
def id_name ( label , namespace = None ) :
"""Given a name and a namespace , resolves
returns the name as namespace + ' . ' + name . If namespace
is none , the current NAMESPACE is used"""
|
if not label . startswith ( DOT ) :
if namespace is None :
namespace = NAMESPACE
ex_label = namespace + label
# The mangled namespace . labelname label
else :
if namespace is None :
namespace = GLOBAL_NAMESPACE
# Global namespace
ex_label = label
return ex_label , namespace
|
def sync ( ) :
"""Copy host - > device only if changed
: return : result of _ exec _ command ( ) execution"""
|
adb_full_cmd = [ v . ADB_COMMAND_PREFIX , v . ADB_COMMAND_SHELL , v . ADB_COMMAND_SYNC ]
return _exec_command ( adb_full_cmd )
|
def random_mini_batches ( X , Y , minibatch_size , seed = None ) :
"""Compute a list of minibatches from inputs X and targets Y .
A datapoint is expected to be represented as a column in
the data matrices X and Y ."""
|
d = X . shape [ 1 ]
size = minibatch_size
minibatches = [ ]
if Y is None :
Y = np . zeros ( ( 1 , d ) )
np . random . seed ( seed )
perm = np . random . permutation ( d )
for t in range ( 0 , d , size ) :
subset = perm [ t : t + size ]
minibatches . append ( ( X [ : , subset ] , Y [ : , subset ] ) )
return minibatches
|
def create_item_returning_id ( self , api ) :
"""Create this item in the D4S2 service .
: param api : D4S2Api object who communicates with D4S2 server .
: return str newly created id for this item"""
|
resp = api . create_item ( self )
item = resp . json ( )
return item [ 'id' ]
|
def files_mv ( self , source , dest , ** kwargs ) :
"""Moves files and directories within the MFS .
. . code - block : : python
> > > c . files _ mv ( " / test / file " , " / bla / file " )
Parameters
source : str
Existing filepath within the MFS
dest : str
Destination to which the file will be moved in the MFS"""
|
args = ( source , dest )
return self . _client . request ( '/files/mv' , args , ** kwargs )
|
def txt_line_iterator ( txt_path ) :
"""Iterate through lines of file ."""
|
with tf . gfile . Open ( txt_path ) as f :
for line in f :
yield line . strip ( )
|
def get_label_at_address ( self , address , offset = None ) :
"""Creates a label from the given memory address .
If the address belongs to the module , the label is made relative to
it ' s base address .
@ type address : int
@ param address : Memory address .
@ type offset : None or int
@ param offset : ( Optional ) Offset value .
@ rtype : str
@ return : Label pointing to the given address ."""
|
# Add the offset to the address .
if offset :
address = address + offset
# Make the label relative to the base address if no match is found .
module = self . get_name ( )
function = None
offset = address - self . get_base ( )
# Make the label relative to the entrypoint if no other match is found .
# Skip if the entry point is unknown .
start = self . get_entry_point ( )
if start and start <= address :
function = "start"
offset = address - start
# Enumerate exported functions and debug symbols ,
# then find the closest match , if possible .
try :
symbol = self . get_symbol_at_address ( address )
if symbol :
( SymbolName , SymbolAddress , SymbolSize ) = symbol
new_offset = address - SymbolAddress
if new_offset <= offset :
function = SymbolName
offset = new_offset
except WindowsError :
pass
# Parse the label and return it .
return _ModuleContainer . parse_label ( module , function , offset )
|
def _isnull ( expr ) :
"""Return a sequence or scalar according to the input indicating if the values are null .
: param expr : sequence or scalar
: return : sequence or scalar"""
|
if isinstance ( expr , SequenceExpr ) :
return IsNull ( _input = expr , _data_type = types . boolean )
elif isinstance ( expr , Scalar ) :
return IsNull ( _input = expr , _value_type = types . boolean )
|
def setup_tempdir ( dir , models , wav , alphabet , lm_binary , trie , binaries ) :
r'''Copy models , libs and binary to a directory ( new one if dir is None )'''
|
if dir is None :
dir = tempfile . mkdtemp ( suffix = 'dsbench' )
sorted_models = all_files ( models = models )
if binaries is None :
maybe_download_binaries ( dir )
else :
print ( 'Using local binaries: %s' % ( binaries ) )
shutil . copy2 ( binaries , dir )
extract_native_client_tarball ( dir )
filenames = map ( lambda x : os . path . join ( dir , os . path . basename ( x ) ) , sorted_models )
missing_models = filter ( lambda x : not os . path . isfile ( x ) , filenames )
if len ( missing_models ) > 0 : # If we have a ZIP file , directly extract it to the proper path
if is_zip_file ( models ) :
print ( 'Extracting %s to %s' % ( models [ 0 ] , dir ) )
zipfile . ZipFile ( models [ 0 ] ) . extractall ( path = dir )
print ( 'Extracted %s.' % models [ 0 ] )
else : # If one model is missing , let ' s copy everything again . Be safe .
for f in sorted_models :
print ( 'Copying %s to %s' % ( f , dir ) )
shutil . copy2 ( f , dir )
for extra_file in [ wav , alphabet , lm_binary , trie ] :
if extra_file and not os . path . isfile ( os . path . join ( dir , os . path . basename ( extra_file ) ) ) :
print ( 'Copying %s to %s' % ( extra_file , dir ) )
shutil . copy2 ( extra_file , dir )
if ssh_conn :
copy_tree ( dir )
return dir , sorted_models
|
def _data_update ( subjects , queue , run_flag ) :
"""Get data from backgound process and notify all subscribed observers with the new data"""
|
while run_flag . running :
while not queue . empty ( ) :
data = queue . get ( )
for subject in [ s for s in subjects if not s . is_disposed ] :
subject . on_next ( data )
time . sleep ( 0.1 )
|
def to_short_time_string ( self ) -> str :
"""Return the iso time string only"""
|
hour = self . time . hour
minute = self . time . minute
return f"{hour:02}:{minute:02}"
|
def config ( name , config ) :
'''Ensure that the chronos job with the given name is present and is configured
to match the given config values .
: param name : The job name
: param config : The configuration to apply ( dict )
: return : A standard Salt changes dictionary'''
|
# setup return structure
ret = { 'name' : name , 'changes' : { } , 'result' : False , 'comment' : '' , }
# get existing config if job is present
existing_config = None
if __salt__ [ 'chronos.has_job' ] ( name ) :
existing_config = __salt__ [ 'chronos.job' ] ( name ) [ 'job' ]
# compare existing config with defined config
if existing_config :
update_config = copy . deepcopy ( existing_config )
salt . utils . configcomparer . compare_and_update_config ( config , update_config , ret [ 'changes' ] , )
else : # the job is not configured - - we need to create it from scratch
ret [ 'changes' ] [ 'job' ] = { 'new' : config , 'old' : None , }
update_config = config
if ret [ 'changes' ] : # if the only change is in schedule , check to see if patterns are equivalent
if 'schedule' in ret [ 'changes' ] and len ( ret [ 'changes' ] ) == 1 :
if 'new' in ret [ 'changes' ] [ 'schedule' ] and 'old' in ret [ 'changes' ] [ 'schedule' ] :
new = ret [ 'changes' ] [ 'schedule' ] [ 'new' ]
log . debug ( 'new schedule: %s' , new )
old = ret [ 'changes' ] [ 'schedule' ] [ 'old' ]
log . debug ( 'old schedule: %s' , old )
if new and old :
_new = new . split ( '/' )
log . debug ( '_new schedule: %s' , _new )
_old = old . split ( '/' )
log . debug ( '_old schedule: %s' , _old )
if len ( _new ) == 3 and len ( _old ) == 3 :
log . debug ( '_new[0] == _old[0]: %s' , six . text_type ( _new [ 0 ] ) == six . text_type ( _old [ 0 ] ) )
log . debug ( '_new[2] == _old[2]: %s' , six . text_type ( _new [ 2 ] ) == six . text_type ( _old [ 2 ] ) )
if six . text_type ( _new [ 0 ] ) == six . text_type ( _old [ 0 ] ) and six . text_type ( _new [ 2 ] ) == six . text_type ( _old [ 2 ] ) :
log . debug ( 'schedules match--no need for changes' )
ret [ 'changes' ] = { }
# update the config if we registered any changes
log . debug ( 'schedules match--no need for changes' )
if ret [ 'changes' ] : # if test report there will be an update
if __opts__ [ 'test' ] :
ret [ 'result' ] = None
ret [ 'comment' ] = 'Chronos job {0} is set to be updated' . format ( name )
return ret
update_result = __salt__ [ 'chronos.update_job' ] ( name , update_config )
if 'exception' in update_result :
ret [ 'result' ] = False
ret [ 'comment' ] = 'Failed to update job config for {0}: {1}' . format ( name , update_result [ 'exception' ] , )
return ret
else :
ret [ 'result' ] = True
ret [ 'comment' ] = 'Updated job config for {0}' . format ( name )
return ret
ret [ 'result' ] = True
ret [ 'comment' ] = 'Chronos job {0} configured correctly' . format ( name )
return ret
|
def create_for_collection_items ( item_type , hint ) :
"""Helper method for collection items
: param item _ type :
: return :"""
|
# this leads to infinite loops
# try :
# prt _ type = get _ pretty _ type _ str ( item _ type )
# except :
# prt _ type = str ( item _ type )
return TypeInformationRequiredError ( "Cannot parse object of type {t} as a collection: this type has no valid " "PEP484 type hint about its contents: found {h}. Please use a standard " "PEP484 declaration such as Dict[str, Foo] or List[Foo]" "" . format ( t = str ( item_type ) , h = hint ) )
|
def stack ( args ) :
"""% prog stack fastafile
Create landscape plots that show the amounts of genic sequences , and repetitive
sequences along the chromosomes ."""
|
p = OptionParser ( stack . __doc__ )
p . add_option ( "--top" , default = 10 , type = "int" , help = "Draw the first N chromosomes [default: %default]" )
p . add_option ( "--stacks" , default = "Exons,Introns,DNA_transposons,Retrotransposons" , help = "Features to plot in stackplot [default: %default]" )
p . add_option ( "--switch" , help = "Change chr names based on two-column file [default: %default]" )
add_window_options ( p )
opts , args , iopts = p . set_image_options ( args , figsize = "8x8" )
if len ( args ) != 1 :
sys . exit ( not p . print_help ( ) )
fastafile , = args
top = opts . top
window , shift , subtract , merge = check_window_options ( opts )
switch = opts . switch
if switch :
switch = DictFile ( opts . switch )
stacks = opts . stacks . split ( "," )
bedfiles = get_beds ( stacks )
binfiles = get_binfiles ( bedfiles , fastafile , shift , subtract = subtract , merge = merge )
sizes = Sizes ( fastafile )
s = list ( sizes . iter_sizes ( ) ) [ : top ]
maxl = max ( x [ 1 ] for x in s )
margin = .08
inner = .02
# y distance between tracks
pf = fastafile . rsplit ( "." , 1 ) [ 0 ]
fig = plt . figure ( 1 , ( iopts . w , iopts . h ) )
root = fig . add_axes ( [ 0 , 0 , 1 , 1 ] )
# Gauge
ratio = draw_gauge ( root , margin , maxl )
# Per chromosome
yinterval = ( 1 - 2 * margin ) / ( top + 1 )
xx = margin
yy = 1 - margin
for chr , clen in s :
yy -= yinterval
xlen = clen / ratio
cc = chr
if "_" in chr :
ca , cb = chr . split ( "_" )
cc = ca [ 0 ] . upper ( ) + cb
if switch and cc in switch :
cc = "\n" . join ( ( cc , "({0})" . format ( switch [ cc ] ) ) )
root . add_patch ( Rectangle ( ( xx , yy ) , xlen , yinterval - inner , color = gray ) )
ax = fig . add_axes ( [ xx , yy , xlen , yinterval - inner ] )
nbins = clen / shift
if clen % shift :
nbins += 1
stackplot ( ax , binfiles , nbins , palette , chr , window , shift )
root . text ( xx - .04 , yy + .5 * ( yinterval - inner ) , cc , ha = "center" , va = "center" )
ax . set_xlim ( 0 , nbins )
ax . set_ylim ( 0 , 1 )
ax . set_axis_off ( )
# Legends
yy -= yinterval
xx = margin
for b , p in zip ( bedfiles , palette ) :
b = b . rsplit ( "." , 1 ) [ 0 ] . replace ( "_" , " " )
b = Registration . get ( b , b )
root . add_patch ( Rectangle ( ( xx , yy ) , inner , inner , color = p , lw = 0 ) )
xx += 2 * inner
root . text ( xx , yy , b , size = 13 )
xx += len ( b ) * .012 + inner
root . set_xlim ( 0 , 1 )
root . set_ylim ( 0 , 1 )
root . set_axis_off ( )
image_name = pf + "." + iopts . format
savefig ( image_name , dpi = iopts . dpi , iopts = iopts )
|
def wait_for_single_device ( self , timeout = None , interval = 0.5 ) :
"""Waits until a Myo is was paired * * and * * connected with the Hub and returns
it . If the * timeout * is exceeded , returns None . This function will not
return a Myo that is only paired but not connected .
# Parameters
timeout : The maximum time to wait for a device .
interval : The interval at which the function should exit sleeping . We can
not sleep endlessly , otherwise the main thread can not be exit , eg .
through a KeyboardInterrupt ."""
|
timer = TimeoutManager ( timeout )
with self . _cond : # As long as there are no Myo ' s connected , wait until we
# get notified about a change .
while not timer . check ( ) : # Check if we found a Myo that is connected .
for device in self . _devices . values ( ) :
if device . connected :
return device
self . _cond . wait ( timer . remainder ( interval ) )
return None
|
def cigar_array ( self ) :
"""cache this one to speed things up a bit"""
|
if self . _cigar :
return self . _cigar
self . _cigar = [ CIGARDatum ( int ( m [ 0 ] ) , m [ 1 ] ) for m in re . findall ( '([0-9]+)([MIDNSHP=X]+)' , self . entries . cigar ) ]
return self . _cigar
|
def get_sql_statement_with_environment ( item , args = None ) :
"""Given a SQLStatement , string or module plus command line args or a dictionary ,
return a SqlStatement and final dictionary for variable resolution .
Args :
item : a SqlStatement , % % sql module , or string containing a query .
args : a string of command line arguments or a dictionary of values .
Returns :
A SqlStatement for the query or module , plus a dictionary of variable values to use ."""
|
if isinstance ( item , basestring ) :
item = _sql_statement . SqlStatement ( item )
elif not isinstance ( item , _sql_statement . SqlStatement ) :
item = SqlModule . get_default_query_from_module ( item )
if not item :
raise Exception ( 'Expected a SQL statement or module but got %s' % str ( item ) )
env = { }
if item . module :
env . update ( item . module . __dict__ )
parser = env . get ( _utils . _SQL_MODULE_ARGPARSE , None )
if parser :
args = SqlModule . _get_sql_args ( parser , args = args )
else :
args = None
if isinstance ( args , dict ) :
env . update ( args )
return item , env
|
def argument_parser ( version = None ) :
"""Create the argument parser for ncbi - genome - download ."""
|
parser = argparse . ArgumentParser ( )
parser . add_argument ( 'group' , default = NgdConfig . get_default ( 'group' ) , help = 'The NCBI taxonomic group to download (default: %(default)s). ' 'A comma-separated list of taxonomic groups is also possible. For example: "bacteria,viral"' 'Choose from: {choices}' . format ( choices = NgdConfig . get_choices ( 'group' ) ) )
parser . add_argument ( '-s' , '--section' , dest = 'section' , choices = NgdConfig . get_choices ( 'section' ) , default = NgdConfig . get_default ( 'section' ) , help = 'NCBI section to download (default: %(default)s)' )
parser . add_argument ( '-F' , '--format' , dest = 'file_format' , default = NgdConfig . get_default ( 'file_format' ) , help = 'Which format to download (default: %(default)s).' 'A comma-separated list of formats is also possible. For example: "fasta,assembly-report". ' 'Choose from: {choices}' . format ( choices = NgdConfig . get_choices ( 'file_format' ) ) )
parser . add_argument ( '-l' , '--assembly-level' , dest = 'assembly_level' , default = NgdConfig . get_default ( 'assembly_level' ) , help = 'Assembly level of genomes to download (default: %(default)s). ' 'A comma-separated list of assembly levels is also possible. For example: "complete,chromosome". ' 'Coose from: {choices}' . format ( choices = NgdConfig . get_choices ( 'assembly_level' ) ) )
parser . add_argument ( '-g' , '--genus' , dest = 'genus' , default = NgdConfig . get_default ( 'genus' ) , help = 'Only download sequences of the provided genus. ' 'A comma-seperated list of genera is also possible. For example: ' '"Streptomyces coelicolor,Escherichia coli". (default: %(default)s)' )
parser . add_argument ( '-T' , '--species-taxid' , dest = 'species_taxid' , default = NgdConfig . get_default ( 'species_taxid' ) , help = 'Only download sequences of the provided species NCBI taxonomy ID. ' 'A comma-separated list of species taxids is also possible. For example: "52342,12325". ' '(default: %(default)s)' )
parser . add_argument ( '-t' , '--taxid' , dest = 'taxid' , default = NgdConfig . get_default ( 'taxid' ) , help = 'Only download sequences of the provided NCBI taxonomy ID. ' 'A comma-separated list of taxids is also possible. For example: "9606,9685". ' '(default: %(default)s)' )
parser . add_argument ( '-A' , '--assembly-accessions' , dest = 'assembly_accessions' , default = NgdConfig . get_default ( 'assembly_accessions' ) , help = 'Only download sequences matching the provided NCBI assembly accession(s). ' 'A comma-separated list of accessions is possible, as well as a path to a filename ' 'containing one accession per line.' )
parser . add_argument ( '-R' , '--refseq-category' , dest = 'refseq_category' , choices = NgdConfig . get_choices ( 'refseq_category' ) , default = NgdConfig . get_default ( 'refseq_category' ) , help = 'Only download sequences of the provided refseq category (default: %(default)s)' )
parser . add_argument ( '-o' , '--output-folder' , dest = 'output' , default = NgdConfig . get_default ( 'output' ) , help = 'Create output hierarchy in specified folder (default: %(default)s)' )
parser . add_argument ( '-H' , '--human-readable' , dest = 'human_readable' , action = 'store_true' , help = 'Create links in human-readable hierarchy (might fail on Windows)' )
parser . add_argument ( '-u' , '--uri' , dest = 'uri' , default = NgdConfig . get_default ( 'uri' ) , help = 'NCBI base URI to use (default: %(default)s)' )
parser . add_argument ( '-p' , '--parallel' , dest = 'parallel' , type = int , metavar = "N" , default = NgdConfig . get_default ( 'parallel' ) , help = 'Run %(metavar)s downloads in parallel (default: %(default)s)' )
parser . add_argument ( '-r' , '--retries' , dest = 'retries' , type = int , metavar = "N" , default = 0 , help = 'Retry download %(metavar)s times when connection to NCBI fails (' 'default: %(default)s)' )
parser . add_argument ( '-m' , '--metadata-table' , type = str , help = 'Save tab-delimited file with genome metadata' )
parser . add_argument ( '-n' , '--dry-run' , dest = 'dry_run' , action = 'store_true' , help = "Only check which files to download, don't download genome files." )
parser . add_argument ( '-N' , '--no-cache' , dest = 'use_cache' , action = 'store_false' , help = "Don't cache the assembly summary file in %s." % CACHE_DIR )
parser . add_argument ( '-v' , '--verbose' , action = 'store_true' , help = 'increase output verbosity' )
parser . add_argument ( '-d' , '--debug' , action = 'store_true' , help = 'print debugging information' )
parser . add_argument ( '-V' , '--version' , action = 'version' , version = version , help = 'print version information' )
parser . add_argument ( '-M' , '--type-material' , dest = 'type_material' , default = NgdConfig . get_default ( 'type_material' ) , help = 'Specifies the relation to type material for the assembly (default: %(default)s). ' '"any" will include assemblies with no relation to type material value defined, "all" will download only assemblies with a defined value. ' 'A comma-separated list of relatons. For example: "reference,synonym". ' 'Choose from: {choices} . ' . format ( choices = NgdConfig . get_choices ( 'type_material' ) ) )
return parser
|
def output ( stream ) :
"""Write the contents of the given stream to stdout ."""
|
while True :
content = stream . read ( 1024 )
if len ( content ) == 0 :
break
sys . stdout . write ( content )
|
def children ( self ) :
"""Returns list of children changesets ."""
|
return [ self . repository . get_changeset ( child . rev ( ) ) for child in self . _ctx . children ( ) if child . rev ( ) >= 0 ]
|
def entrypoint ( args = None ) :
"""Main callable for " bonobo " entrypoint .
Will load commands from " bonobo . commands " entrypoints , using stevedore ."""
|
mondrian . setup ( excepthook = True )
logger = logging . getLogger ( )
logger . setLevel ( settings . LOGGING_LEVEL . get ( ) )
parser = argparse . ArgumentParser ( )
parser . add_argument ( "--debug" , "-D" , action = "store_true" )
subparsers = parser . add_subparsers ( dest = "command" )
subparsers . required = True
commands = { }
def register_extension ( ext ) :
nonlocal commands
try :
parser = subparsers . add_parser ( ext . name )
if isinstance ( ext . plugin , type ) and issubclass ( ext . plugin , BaseCommand ) : # current way , class based .
cmd = ext . plugin ( )
cmd . add_arguments ( parser )
cmd . __name__ = ext . name
commands [ ext . name ] = cmd . handle
else : # old school , function based .
commands [ ext . name ] = ext . plugin ( parser )
except Exception :
logger . exception ( "Error while loading command {}." . format ( ext . name ) )
from stevedore import ExtensionManager
mgr = ExtensionManager ( namespace = "bonobo.commands" )
mgr . map ( register_extension )
parsed_args = parser . parse_args ( args ) . __dict__
if parsed_args . pop ( "debug" , False ) :
settings . DEBUG . set ( True )
settings . LOGGING_LEVEL . set ( logging . DEBUG )
logger . setLevel ( settings . LOGGING_LEVEL . get ( ) )
logger . debug ( "Command: " + parsed_args [ "command" ] + " Arguments: " + repr ( parsed_args ) )
# Get command handler , execute , rince .
command = commands [ parsed_args . pop ( "command" ) ]
command ( ** parsed_args )
return 0
|
def db_exists ( name , user = None , host = None , port = None , maintenance_db = None , password = None , runas = None ) :
'''Checks if a database exists on the Postgres server .
CLI Example :
. . code - block : : bash
salt ' * ' postgres . db _ exists ' dbname ' '''
|
databases = db_list ( user = user , host = host , port = port , maintenance_db = maintenance_db , password = password , runas = runas )
return name in databases
|
def pandoc_process ( app , what , name , obj , options , lines ) :
"""" Convert docstrings in Markdown into reStructureText using pandoc"""
|
if not lines :
return None
input_format = app . config . mkdsupport_use_parser
output_format = 'rst'
# Since default encoding for sphinx . ext . autodoc is unicode and pypandoc . convert _ text , which will always return a
# unicode string , expects unicode or utf - 8 encodes string , there is on need for dealing with coding
text = SEP . join ( lines )
text = pypandoc . convert_text ( text , output_format , format = input_format )
# The ' lines ' in Sphinx is a list of strings and the value should be changed
del lines [ : ]
lines . extend ( text . split ( SEP ) )
|
def read ( self , filename ) :
"""Read detector from a file , must be HDF5 format .
Reads a Detector object from an HDF5 file , usually created by eqcorrscan .
: type filename : str
: param filename : Filename to save the detector to ."""
|
f = h5py . File ( filename , "r" )
self . data = [ ]
for i in range ( f [ 'data' ] . attrs [ 'length' ] ) :
self . data . append ( f [ 'data' ] [ 'data_' + str ( i ) ] . value )
self . u = [ ]
for i in range ( f [ 'u' ] . attrs [ 'length' ] ) :
self . u . append ( f [ 'u' ] [ 'u_' + str ( i ) ] . value )
self . sigma = [ ]
for i in range ( f [ 'sigma' ] . attrs [ 'length' ] ) :
self . sigma . append ( f [ 'sigma' ] [ 'sigma_' + str ( i ) ] . value )
self . v = [ ]
for i in range ( f [ 'v' ] . attrs [ 'length' ] ) :
self . v . append ( f [ 'v' ] [ 'v_' + str ( i ) ] . value )
self . stachans = [ tuple ( stachan . decode ( 'ascii' ) . split ( '.' ) ) for stachan in f [ 'stachans' ] . value ]
self . dimension = f [ 'data' ] . attrs [ 'dimension' ]
self . filt_order = f [ 'data' ] . attrs [ 'filt_order' ]
self . highcut = f [ 'data' ] . attrs [ 'highcut' ]
self . lowcut = f [ 'data' ] . attrs [ 'lowcut' ]
self . multiplex = bool ( f [ 'data' ] . attrs [ 'multiplex' ] )
self . sampling_rate = f [ 'data' ] . attrs [ 'sampling_rate' ]
if isinstance ( f [ 'data' ] . attrs [ 'name' ] , str ) :
self . name = f [ 'data' ] . attrs [ 'name' ]
else :
self . name = f [ 'data' ] . attrs [ 'name' ] . decode ( 'ascii' )
return self
|
def init_app ( self , app ) : # type : ( Flask ) - > None
"""Init the Flask - MQTT addon ."""
|
self . client_id = app . config . get ( "MQTT_CLIENT_ID" , "" )
if isinstance ( self . client_id , unicode ) :
self . client . _client_id = self . client_id . encode ( 'utf-8' )
else :
self . client . _client_id = self . client_id
self . client . _transport = app . config . get ( "MQTT_TRANSPORT" , "tcp" ) . lower ( )
self . client . _protocol = app . config . get ( "MQTT_PROTOCOL_VERSION" , MQTTv311 )
self . client . on_connect = self . _handle_connect
self . client . on_disconnect = self . _handle_disconnect
self . username = app . config . get ( "MQTT_USERNAME" )
self . password = app . config . get ( "MQTT_PASSWORD" )
self . broker_url = app . config . get ( "MQTT_BROKER_URL" , "localhost" )
self . broker_port = app . config . get ( "MQTT_BROKER_PORT" , 1883 )
self . tls_enabled = app . config . get ( "MQTT_TLS_ENABLED" , False )
self . keepalive = app . config . get ( "MQTT_KEEPALIVE" , 60 )
self . last_will_topic = app . config . get ( "MQTT_LAST_WILL_TOPIC" )
self . last_will_message = app . config . get ( "MQTT_LAST_WILL_MESSAGE" )
self . last_will_qos = app . config . get ( "MQTT_LAST_WILL_QOS" , 0 )
self . last_will_retain = app . config . get ( "MQTT_LAST_WILL_RETAIN" , False )
if self . tls_enabled :
self . tls_ca_certs = app . config [ "MQTT_TLS_CA_CERTS" ]
self . tls_certfile = app . config . get ( "MQTT_TLS_CERTFILE" )
self . tls_keyfile = app . config . get ( "MQTT_TLS_KEYFILE" )
self . tls_cert_reqs = app . config . get ( "MQTT_TLS_CERT_REQS" , ssl . CERT_REQUIRED )
self . tls_version = app . config . get ( "MQTT_TLS_VERSION" , ssl . PROTOCOL_TLSv1 )
self . tls_ciphers = app . config . get ( "MQTT_TLS_CIPHERS" )
self . tls_insecure = app . config . get ( "MQTT_TLS_INSECURE" , False )
# set last will message
if self . last_will_topic is not None :
self . client . will_set ( self . last_will_topic , self . last_will_message , self . last_will_qos , self . last_will_retain , )
self . _connect ( )
|
def append ( self , resultFile , resultElem , all_columns = False ) :
"""Append the result for one run . Needs to be called before collect _ data ( ) ."""
|
self . _xml_results += [ ( result , resultFile ) for result in _get_run_tags_from_xml ( resultElem ) ]
for attrib , values in RunSetResult . _extract_attributes_from_result ( resultFile , resultElem ) . items ( ) :
self . attributes [ attrib ] . extend ( values )
if not self . columns :
self . columns = RunSetResult . _extract_existing_columns_from_result ( resultFile , resultElem , all_columns )
|
def get_instance ( self , payload ) :
"""Build an instance of ThisMonthInstance
: param dict payload : Payload response from the API
: returns : twilio . rest . api . v2010 . account . usage . record . this _ month . ThisMonthInstance
: rtype : twilio . rest . api . v2010 . account . usage . record . this _ month . ThisMonthInstance"""
|
return ThisMonthInstance ( self . _version , payload , account_sid = self . _solution [ 'account_sid' ] , )
|
def MAP_ADD ( self , instr ) :
key = self . ast_stack . pop ( )
value = self . ast_stack . pop ( )
self . ast_stack . append ( ( key , value ) )
'NOP'
| |
def _to_dict ( self ) :
"""Return a json dictionary representing this model ."""
|
_dict = { }
if hasattr ( self , 'concepts' ) and self . concepts is not None :
_dict [ 'concepts' ] = self . concepts . _to_dict ( )
if hasattr ( self , 'emotion' ) and self . emotion is not None :
_dict [ 'emotion' ] = self . emotion . _to_dict ( )
if hasattr ( self , 'entities' ) and self . entities is not None :
_dict [ 'entities' ] = self . entities . _to_dict ( )
if hasattr ( self , 'keywords' ) and self . keywords is not None :
_dict [ 'keywords' ] = self . keywords . _to_dict ( )
if hasattr ( self , 'metadata' ) and self . metadata is not None :
_dict [ 'metadata' ] = self . metadata . _to_dict ( )
if hasattr ( self , 'relations' ) and self . relations is not None :
_dict [ 'relations' ] = self . relations . _to_dict ( )
if hasattr ( self , 'semantic_roles' ) and self . semantic_roles is not None :
_dict [ 'semantic_roles' ] = self . semantic_roles . _to_dict ( )
if hasattr ( self , 'sentiment' ) and self . sentiment is not None :
_dict [ 'sentiment' ] = self . sentiment . _to_dict ( )
if hasattr ( self , 'categories' ) and self . categories is not None :
_dict [ 'categories' ] = self . categories . _to_dict ( )
if hasattr ( self , 'syntax' ) and self . syntax is not None :
_dict [ 'syntax' ] = self . syntax . _to_dict ( )
return _dict
|
def main ( ) :
"Process CLI arguments and call appropriate functions ."
|
try :
args = docopt . docopt ( __doc__ , version = __about__ . __version__ )
except docopt . DocoptExit :
if len ( sys . argv ) > 1 :
print ( f"{Fore.RED}Invalid command syntax, " f"check help:{Fore.RESET}\n" )
print ( __doc__ )
sys . exit ( 1 )
print_all = False
if not ( args [ "--int-width" ] or args [ "--int-height" ] or args [ "--decimal" ] ) :
print_all = True
width = float ( args [ "WIDTH" ] )
height = float ( args [ "HEIGHT" ] )
as_int_ = as_int ( width , height )
as_float_ = as_float ( width , height )
if args [ "--ndigits" ] :
as_float_ = round ( as_float_ , int ( args [ "--ndigits" ] ) )
to_print = [ ]
if args [ "--int-width" ] or print_all :
to_print . append ( f"{Fore.BLUE}{as_int_[0]!s}" )
if args [ "--int-height" ] or print_all :
to_print . append ( f"{Fore.BLUE}{as_int_[1]!s}" )
if args [ "--decimal" ] or print_all :
to_print . append ( f"{Fore.MAGENTA}{as_float_!s}" )
print ( " " . join ( to_print ) )
|
def is_single_file_metadata_valid ( file_metadata , project_member_id , filename ) :
"""Check if metadata fields like project member id , description , tags , md5 and
creation date are valid for a single file .
: param file _ metadata : This field is metadata of file .
: param project _ member _ id : This field is the project member id corresponding
to the file metadata provided .
: param filename : This field is the filename corresponding to the file
metadata provided ."""
|
if project_member_id is not None :
if not project_member_id . isdigit ( ) or len ( project_member_id ) != 8 :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' project member id must be of 8 digits from 0 to 9' )
if 'description' not in file_metadata :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' "description" is a required field of the metadata' )
if not isinstance ( file_metadata [ 'description' ] , str ) :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' "description" must be a string' )
if 'tags' not in file_metadata :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' "tags" is a required field of the metadata' )
if not isinstance ( file_metadata [ 'tags' ] , list ) :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' "tags" must be an array of strings' )
if 'creation_date' in file_metadata :
if not validate_date ( file_metadata [ 'creation_date' ] , project_member_id , filename ) :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' Dates must be in ISO 8601 format' )
if 'md5' in file_metadata :
if not re . match ( r'[a-f0-9]{32}$' , file_metadata [ 'md5' ] , flags = re . IGNORECASE ) :
raise ValueError ( 'Error: for project member id: ' , project_member_id , ' and filename: ' , filename , ' Invalid MD5 specified' )
return True
|
def public_keys ( self ) :
"""Return a list of SSH public keys ( in textual format ) ."""
|
if not self . public_keys_cache :
conn = self . conn_factory ( )
self . public_keys_cache = conn . export_public_keys ( self . identities )
return self . public_keys_cache
|
def attention_lm_moe_small ( ) :
"""Cheap model for single - gpu training .
on lm1b _ 32k :
~ 312M params
1.6 steps / sec on [ GeForce GTX TITAN X ]
After 50K steps on 8 GPUs ( synchronous ) :
eval _ log _ ppl _ per _ token = 3.31
Returns :
an hparams object ."""
|
hparams = attention_lm_moe_base ( )
hparams . num_hidden_layers = 4
hparams . hidden_size = 512
hparams . filter_size = 2048
hparams . moe_num_experts = 128
hparams . moe_layers = "2"
return hparams
|
def get_hist ( self , observable : Any , ** kwargs : Dict [ str , Any ] ) -> Any :
"""Get the histogram that may be stored in some object .
This histogram is used to project from .
Note :
The output object could just be the raw ROOT histogram .
Note :
This function is just a basic placeholder and likely should be overridden .
Args :
observable ( object ) : The input object . It could be a histogram or something more complex
kwargs : Additional arguments passed to the projection function
Return :
ROOT . TH1 or ROOT . THnBase histogram which should be projected . By default , it returns the
observable ( input object ) ."""
|
return observable
|
def lookup ( cls , name ) :
"""Try to find field C { name } .
@ return : Field descriptions , see C { matching . ConditionParser } for details ."""
|
try :
field = cls . FIELDS [ name ]
except KeyError : # Is it a custom attribute ?
field = TorrentProxy . add_manifold_attribute ( name )
return { "matcher" : field . _matcher } if field else None
|
def _add_to_quick_menu ( self , key , wf ) :
"""Appends menu entries to dashboard quickmenu according
to : attr : ` zengine . settings . QUICK _ MENU `
Args :
key : workflow name
wf : workflow menu entry"""
|
if key in settings . QUICK_MENU :
self . output [ 'quick_menu' ] . append ( wf )
|
def ls ( name , path ) :
"""List files in a path of a virtual folder .
NAME : Name of a virtual folder .
PATH : Path inside vfolder ."""
|
with Session ( ) as session :
try :
print_wait ( 'Retrieving list of files in "{}"...' . format ( path ) )
result = session . VFolder ( name ) . list_files ( path )
if 'error_msg' in result and result [ 'error_msg' ] :
print_fail ( result [ 'error_msg' ] )
return
files = json . loads ( result [ 'files' ] )
table = [ ]
headers = [ 'file name' , 'size' , 'modified' , 'mode' ]
for file in files :
mdt = datetime . fromtimestamp ( file [ 'mtime' ] )
mtime = mdt . strftime ( '%b %d %Y %H:%M:%S' )
row = [ file [ 'filename' ] , file [ 'size' ] , mtime , file [ 'mode' ] ]
table . append ( row )
print_done ( 'Retrived.' )
print ( tabulate ( table , headers = headers ) )
except Exception as e :
print_error ( e )
|
def get_page ( self , page_id ) :
"""Get short page info and body html code"""
|
try :
result = self . _request ( '/getpage/' , { 'pageid' : page_id } )
return TildaPage ( ** result )
except NetworkError :
return [ ]
|
def usearch61_smallmem_cluster ( intermediate_fasta , percent_id = 0.97 , minlen = 64 , rev = False , output_dir = "." , remove_usearch_logs = False , wordlength = 8 , usearch61_maxrejects = 32 , usearch61_maxaccepts = 1 , sizeorder = False , HALT_EXEC = False , output_uc_filepath = None , log_name = "smallmem_clustered.log" , sizeout = False , consout_filepath = None ) :
"""Performs usearch61 de novo clustering via cluster _ smallmem option
Only supposed to be used with length sorted data ( and performs length
sorting automatically ) and does not support reverse strand matching
intermediate _ fasta : fasta filepath to be clustered with usearch61
percent _ id : percentage id to cluster at
minlen : minimum sequence length
rev : will enable reverse strand matching if True
output _ dir : directory to output log , OTU mapping , and intermediate files
remove _ usearch _ logs : Saves usearch log files
wordlength : word length to use for initial high probability sequence matches
usearch61 _ maxrejects : Set to ' default ' or an int value specifying max
rejects
usearch61 _ maxaccepts : Number of accepts allowed by usearch61
HALT _ EXEC : application controller option to halt execution
output _ uc _ filepath : Path to write clusters ( . uc ) file .
log _ name : filepath to write usearch61 generated log file
sizeout : If True , will save abundance data in output fasta labels .
consout _ filepath : Needs to be set to save clustered consensus fasta
filepath used for chimera checking ."""
|
log_filepath = join ( output_dir , log_name )
params = { '--minseqlength' : minlen , '--cluster_smallmem' : intermediate_fasta , '--id' : percent_id , '--uc' : output_uc_filepath , '--wordlength' : wordlength , '--maxrejects' : usearch61_maxrejects , '--maxaccepts' : usearch61_maxaccepts , '--usersort' : True }
if sizeorder :
params [ '--sizeorder' ] = True
if not remove_usearch_logs :
params [ '--log' ] = log_filepath
if rev :
params [ '--strand' ] = 'both'
else :
params [ '--strand' ] = 'plus'
if sizeout :
params [ '--sizeout' ] = True
if consout_filepath :
params [ '--consout' ] = consout_filepath
clusters_fp = output_uc_filepath
app = Usearch61 ( params , WorkingDir = output_dir , HALT_EXEC = HALT_EXEC )
app_result = app ( )
return clusters_fp , app_result
|
def apply_trapping ( self , outlets ) :
"""Apply trapping based on algorithm described by Y . Masson [ 1 ] .
It is applied as a post - process and runs the percolation algorithm in
reverse assessing the occupancy of pore neighbors . Consider the
following scenario when running standard IP without trapping ,
3 situations can happen after each invasion step :
The number of defending clusters stays the same and clusters can
shrink
A cluster of size one is suppressed
A cluster is split into multiple clusters
In reverse the following opposite situations can happen :
The number of defending clusters stays the same and clusters can
grow
A cluster of size one is created
Mutliple clusters merge into one cluster
With trapping the reversed rules are adjusted so that :
Only clusters that do not connect to a sink can grow and merge .
At the point that a neighbor connected to a sink is touched the
trapped cluster stops growing as this is the point of trapping in
forward invasion time .
Logger info displays the invasion sequence and pore index and a message
with condition number based on the modified trapping rules and the
assignment of the pore to a given cluster .
Initially all invaded pores are given cluster label - 1
Outlets / Sinks are given - 2
New clusters that grow into fully trapped clusters are either
identified at the point of breakthrough or grow from nothing if the
full invasion sequence is run , they are assigned numbers from 0 up .
Ref :
[1 ] Masson , Y . , 2016 . A fast two - step algorithm for invasion
percolation with trapping . Computers & Geosciences , 90 , pp . 41-48
Parameters
outlets : list or array of pore indices for defending fluid to escape
through
Returns
Creates a throat array called ' pore . clusters ' in the Algorithm
dictionary . Any positive number is a trapped cluster
Also creates 2 boolean arrays Np and Nt long called ' < element > . trapped '"""
|
# First see if network is fully invaded
net = self . project . network
invaded_ps = self [ 'pore.invasion_sequence' ] > - 1
if ~ np . all ( invaded_ps ) : # Put defending phase into clusters
clusters = net . find_clusters2 ( ~ invaded_ps )
# Identify clusters that are connected to an outlet and set to - 2
# -1 is the invaded fluid
# -2 is the defender fluid able to escape
# All others now trapped clusters which grow as invasion is reversed
out_clusters = sp . unique ( clusters [ outlets ] )
for c in out_clusters :
if c >= 0 :
clusters [ clusters == c ] = - 2
else : # Go from end
clusters = np . ones ( net . Np , dtype = int ) * - 1
clusters [ outlets ] = - 2
# Turn into a list for indexing
inv_seq = np . vstack ( ( self [ 'pore.invasion_sequence' ] . astype ( int ) , np . arange ( 0 , net . Np , dtype = int ) ) ) . T
# Reverse sort list
inv_seq = inv_seq [ inv_seq [ : , 0 ] . argsort ( ) ] [ : : - 1 ]
next_cluster_num = np . max ( clusters ) + 1
# For all the steps after the inlets are set up to break - through
# Reverse the sequence and assess the neighbors cluster state
stopped_clusters = np . zeros ( net . Np , dtype = bool )
all_neighbors = net . find_neighbor_pores ( net . pores ( ) , flatten = False , include_input = True )
for un_seq , pore in inv_seq :
if pore not in outlets and un_seq > 0 : # Skip inlets and outlets
nc = clusters [ all_neighbors [ pore ] ]
# Neighboring clusters
unique_ns = np . unique ( nc [ nc != - 1 ] )
# Unique Neighbors
seq_pore = "S:" + str ( un_seq ) + " P:" + str ( pore )
if np . all ( nc == - 1 ) : # This is the start of a new trapped cluster
clusters [ pore ] = next_cluster_num
next_cluster_num += 1
msg = ( seq_pore + " C:1 new cluster number: " + str ( clusters [ pore ] ) )
logger . info ( msg )
elif len ( unique_ns ) == 1 : # Grow the only connected neighboring cluster
if not stopped_clusters [ unique_ns [ 0 ] ] :
clusters [ pore ] = unique_ns [ 0 ]
msg = ( seq_pore + " C:2 joins cluster number: " + str ( clusters [ pore ] ) )
logger . info ( msg )
else :
clusters [ pore ] = - 2
elif - 2 in unique_ns : # We have reached a sink neighbor , stop growing cluster
msg = ( seq_pore + " C:3 joins sink cluster" )
logger . info ( msg )
clusters [ pore ] = - 2
# Stop growth and merging
stopped_clusters [ unique_ns [ unique_ns > - 1 ] ] = True
else : # We might be able to do some merging
# Check if any stopped clusters are neighbors
if np . any ( stopped_clusters [ unique_ns ] ) :
msg = ( seq_pore + " C:4 joins sink cluster" )
logger . info ( msg )
clusters [ pore ] = - 2
# Stop growing all neighboring clusters
stopped_clusters [ unique_ns ] = True
else : # Merge multiple un - stopped trapped clusters
new_num = unique_ns [ 0 ]
clusters [ pore ] = new_num
for c in unique_ns :
clusters [ clusters == c ] = new_num
msg = ( seq_pore + " C:5 merge clusters: " + str ( c ) + " into " + str ( new_num ) )
logger . info ( msg )
# And now return clusters
self [ 'pore.clusters' ] = clusters
logger . info ( "Number of trapped clusters" + str ( np . sum ( np . unique ( clusters ) >= 0 ) ) )
self [ 'pore.trapped' ] = self [ 'pore.clusters' ] > - 1
trapped_ts = net . find_neighbor_throats ( self [ 'pore.trapped' ] )
self [ 'throat.trapped' ] = np . zeros ( [ net . Nt ] , dtype = bool )
self [ 'throat.trapped' ] [ trapped_ts ] = True
self [ 'pore.invasion_sequence' ] [ self [ 'pore.trapped' ] ] = - 1
self [ 'throat.invasion_sequence' ] [ self [ 'throat.trapped' ] ] = - 1
|
def _phi ( p ) : # this function is faster than using scipy . stats . norm . isf ( p )
# but the permissity of the license isn ' t explicitly listed .
# using scipy . stats . norm . isf ( p ) is an acceptable alternative
"""Modified from the author ' s original perl code ( original comments follow below )
by dfield @ yahoo - inc . com . May 3 , 2004.
Lower tail quantile for standard normal distribution function .
This function returns an approximation of the inverse cumulative
standard normal distribution function . I . e . , given P , it returns
an approximation to the X satisfying P = Pr { Z < = X } where Z is a
random variable from the standard normal distribution .
The algorithm uses a minimax approximation by rational functions
and the result has a relative error whose absolute value is less
than 1.15e - 9.
Author : Peter John Acklam
Time - stamp : 2000-07-19 18:26:14
E - mail : pjacklam @ online . no
WWW URL : http : / / home . online . no / ~ pjacklam"""
|
if p <= 0 or p >= 1 : # The original perl code exits here , we ' ll throw an exception instead
raise ValueError ( "Argument to ltqnorm %f must be in open interval (0,1)" % p )
# Coefficients in rational approximations .
a = ( - 3.969683028665376e+01 , 2.209460984245205e+02 , - 2.759285104469687e+02 , 1.383577518672690e+02 , - 3.066479806614716e+01 , 2.506628277459239e+00 )
b = ( - 5.447609879822406e+01 , 1.615858368580409e+02 , - 1.556989798598866e+02 , 6.680131188771972e+01 , - 1.328068155288572e+01 )
c = ( - 7.784894002430293e-03 , - 3.223964580411365e-01 , - 2.400758277161838e+00 , - 2.549732539343734e+00 , 4.374664141464968e+00 , 2.938163982698783e+00 )
d = ( 7.784695709041462e-03 , 3.224671290700398e-01 , 2.445134137142996e+00 , 3.754408661907416e+00 )
# Define break - points .
plow = 0.02425
phigh = 1 - plow
# Rational approximation for lower region :
if p < plow :
q = math . sqrt ( - 2 * math . log ( p ) )
return - ( ( ( ( ( c [ 0 ] * q + c [ 1 ] ) * q + c [ 2 ] ) * q + c [ 3 ] ) * q + c [ 4 ] ) * q + c [ 5 ] ) / ( ( ( ( d [ 0 ] * q + d [ 1 ] ) * q + d [ 2 ] ) * q + d [ 3 ] ) * q + 1 )
# Rational approximation for upper region :
if phigh < p :
q = math . sqrt ( - 2 * math . log ( 1 - p ) )
return ( ( ( ( ( c [ 0 ] * q + c [ 1 ] ) * q + c [ 2 ] ) * q + c [ 3 ] ) * q + c [ 4 ] ) * q + c [ 5 ] ) / ( ( ( ( d [ 0 ] * q + d [ 1 ] ) * q + d [ 2 ] ) * q + d [ 3 ] ) * q + 1 )
# Rational approximation for central region :
q = p - 0.5
r = q * q
return - ( ( ( ( ( a [ 0 ] * r + a [ 1 ] ) * r + a [ 2 ] ) * r + a [ 3 ] ) * r + a [ 4 ] ) * r + a [ 5 ] ) * q / ( ( ( ( ( b [ 0 ] * r + b [ 1 ] ) * r + b [ 2 ] ) * r + b [ 3 ] ) * r + b [ 4 ] ) * r + 1 )
|
def set_system_conf ( self , key = None , value = None , d = None ) :
"""Sets a java system property as a ( ' key ' , ' value ' ) pair of using a dictionary
{ ' key ' : ' value ' , . . . }
: param key : string
: param value : string
: param d : dictionary
: return : None"""
|
if isinstance ( d , dict ) :
self . _system . update ( d )
elif isinstance ( key , str ) and isinstance ( value , str ) :
self . _system [ key ] = value
else :
raise TypeError ( "key, value must be strings" )
|
def hexstr_if_str ( to_type , hexstr_or_primitive ) :
"""Convert to a type , assuming that strings can be only hexstr ( not unicode text )
@ param to _ type is a function that takes the arguments ( primitive , hexstr = hexstr , text = text ) ,
eg ~ to _ bytes , to _ text , to _ hex , to _ int , etc
@ param text _ or _ primitive in bytes , str , or int ."""
|
if isinstance ( hexstr_or_primitive , str ) :
( primitive , hexstr ) = ( None , hexstr_or_primitive )
if remove_0x_prefix ( hexstr ) and not is_hex ( hexstr ) :
raise ValueError ( "when sending a str, it must be a hex string. Got: {0!r}" . format ( hexstr_or_primitive , ) )
else :
( primitive , hexstr ) = ( hexstr_or_primitive , None )
return to_type ( primitive , hexstr = hexstr )
|
def read_ecmwf_macc ( filename , latitude , longitude , utc_time_range = None ) :
"""Read data from ECMWF MACC reanalysis netCDF4 file .
Parameters
filename : string
full path to netCDF4 data file .
latitude : float
latitude in degrees
longitude : float
longitude in degrees
utc _ time _ range : sequence of datetime . datetime
pair of start and stop naive or UTC date - times
Returns
data : pandas . DataFrame
dataframe for specified range of UTC date - times"""
|
ecmwf_macc = ECMWF_MACC ( filename )
try :
ilat , ilon = ecmwf_macc . get_nearest_indices ( latitude , longitude )
nctime = ecmwf_macc . data [ 'time' ]
if utc_time_range :
start_idx = netCDF4 . date2index ( utc_time_range [ 0 ] , nctime , select = 'before' )
stop_idx = netCDF4 . date2index ( utc_time_range [ - 1 ] , nctime , select = 'after' )
time_slice = slice ( start_idx , stop_idx + 1 )
else :
time_slice = slice ( 0 , ecmwf_macc . time_size )
times = netCDF4 . num2date ( nctime [ time_slice ] , nctime . units )
df = { k : ecmwf_macc . data [ k ] [ time_slice , ilat , ilon ] for k in ecmwf_macc . keys }
if ECMWF_MACC . TCWV in df : # convert total column water vapor in kg / m ^ 2 at ( 1 - atm , 25 - degC ) to
# precipitable water in cm
df [ 'precipitable_water' ] = df [ ECMWF_MACC . TCWV ] / 10.0
finally :
ecmwf_macc . data . close ( )
return pd . DataFrame ( df , index = times . astype ( 'datetime64[s]' ) )
|
def metric_update ( self , project , metric_name , filter_ , description ) :
"""API call : update a metric resource .
: type project : str
: param project : ID of the project containing the metric .
: type metric _ name : str
: param metric _ name : the name of the metric
: type filter _ : str
: param filter _ : the advanced logs filter expression defining the
entries exported by the metric .
: type description : str
: param description : description of the metric .
: rtype : dict
: returns : The metric object returned from the API ( converted from a
protobuf to a dictionary ) ."""
|
path = "projects/%s/metrics/%s" % ( project , metric_name )
metric_pb = LogMetric ( name = path , filter = filter_ , description = description )
metric_pb = self . _gapic_api . update_log_metric ( path , metric_pb )
# NOTE : LogMetric message type does not have an ` ` Any ` ` field
# so ` MessageToDict ` ` can safely be used .
return MessageToDict ( metric_pb )
|
def create_record_ptr ( self , record , data , ttl = 60 ) :
"""Create a reverse record .
: param record : the public ip address of device for which you would like to manage reverse DNS .
: param data : the record ' s value
: param integer ttl : the TTL or time - to - live value ( default : 60)"""
|
resource_record = self . _generate_create_dict ( record , 'PTR' , data , ttl )
return self . record . createObject ( resource_record )
|
def _locate_repo_files ( repo , rewrite = False ) :
'''Find what file a repo is called in .
Helper function for add _ repo ( ) and del _ repo ( )
repo
url of the repo to locate ( persistent ) .
rewrite
Whether to remove matching repository settings during this process .
Returns a list of absolute paths .'''
|
ret_val = [ ]
files = [ ]
conf_dirs = [ '/etc/xbps.d/' , '/usr/share/xbps.d/' ]
name_glob = '*.conf'
# Matches a line where first printing is " repository " and there is an equals
# sign before the repo , an optional forwardslash at the end of the repo name ,
# and it ' s possible for there to be a comment after repository = repo
regex = re . compile ( r'\s*repository\s*=\s*' + repo + r'/?\s*(#.*)?$' )
for cur_dir in conf_dirs :
files . extend ( glob . glob ( cur_dir + name_glob ) )
for filename in files :
write_buff = [ ]
with salt . utils . files . fopen ( filename , 'r' ) as cur_file :
for line in cur_file :
if regex . match ( salt . utils . stringutils . to_unicode ( line ) ) :
ret_val . append ( filename )
else :
write_buff . append ( line )
if rewrite and filename in ret_val :
if write_buff :
with salt . utils . files . fopen ( filename , 'w' ) as rewrite_file :
rewrite_file . writelines ( write_buff )
else : # Prune empty files
os . remove ( filename )
return ret_val
|
def execute_migration ( self , migration_file_relative ) :
"""This recognizes migration type and executes either
: method : ` execute _ python _ migration ` or : method : ` execute _ native _ migration `"""
|
migration_file = os . path . join ( self . db_config [ 'migrations_dir' ] , migration_file_relative )
m_type = self . repository . migration_type ( migration_file )
if m_type == 'native' :
return self . execute_native_migration ( migration_file )
if m_type == 'py' :
module = imp . load_source ( 'migration_module' , migration_file )
return self . execute_python_migration ( migration_file , module )
assert False , 'Unknown migration type %s' % migration_file
|
def main ( ) :
"""Setup . py entry point ."""
|
import codecs
setuptools . setup ( name = 'wcwidth' , version = '0.1.7' , description = ( "Measures number of Terminal column cells " "of wide-character codes" ) , long_description = codecs . open ( os . path . join ( HERE , 'README.rst' ) , 'r' , 'utf8' ) . read ( ) , author = 'Jeff Quast' , author_email = 'contact@jeffquast.com' , license = 'MIT' , packages = [ 'wcwidth' , 'wcwidth.tests' ] , url = 'https://github.com/jquast/wcwidth' , include_package_data = True , test_suite = 'wcwidth.tests' , zip_safe = True , classifiers = [ 'Intended Audience :: Developers' , 'Natural Language :: English' , 'Development Status :: 3 - Alpha' , 'Environment :: Console' , 'License :: OSI Approved :: MIT License' , 'Operating System :: POSIX' , 'Programming Language :: Python :: 2.7' , 'Programming Language :: Python :: 3.4' , 'Programming Language :: Python :: 3.5' , 'Topic :: Software Development :: Libraries' , 'Topic :: Software Development :: Localization' , 'Topic :: Software Development :: Internationalization' , 'Topic :: Terminals' ] , keywords = [ 'terminal' , 'emulator' , 'wcwidth' , 'wcswidth' , 'cjk' , 'combining' , 'xterm' , 'console' , ] , cmdclass = { 'update' : SetupUpdate } , )
|
def cart_add ( self , items , CartId = None , HMAC = None , ** kwargs ) :
"""CartAdd .
: param items :
A dictionary containing the items to be added to the cart .
Or a list containing these dictionaries .
It is not possible to create an empty cart !
example : [ { ' offer _ id ' : ' rt2ofih3f389nwiuhf8934z87o3f4h ' ,
' quantity ' : 1 } ]
: param CartId : Id of Cart
: param HMAC : HMAC of Cart , see CartCreate for more info
: return :
An : class : ` ~ . AmazonCart ` ."""
|
if not CartId or not HMAC :
raise CartException ( 'CartId and HMAC required for CartAdd call' )
if isinstance ( items , dict ) :
items = [ items ]
if len ( items ) > 10 :
raise CartException ( "You can't add more than 10 items at once" )
offer_id_key_template = 'Item.{0}.OfferListingId'
quantity_key_template = 'Item.{0}.Quantity'
for i , item in enumerate ( items ) :
kwargs [ offer_id_key_template . format ( i ) ] = item [ 'offer_id' ]
kwargs [ quantity_key_template . format ( i ) ] = item [ 'quantity' ]
response = self . api . CartAdd ( CartId = CartId , HMAC = HMAC , ** kwargs )
root = objectify . fromstring ( response )
new_cart = AmazonCart ( root )
self . _check_for_cart_error ( new_cart )
return new_cart
|
def _identify_heterogeneity_blocks_hmm ( in_file , params , work_dir , somatic_info ) :
"""Use a HMM to identify blocks of heterogeneity to use for calculating allele frequencies .
The goal is to subset the genome to a more reasonable section that contains potential
loss of heterogeneity or other allele frequency adjustment based on selection ."""
|
def _segment_by_hmm ( chrom , freqs , coords ) :
cur_coords = [ ]
for j , state in enumerate ( _predict_states ( freqs ) ) :
if state == 0 : # heterozygote region
if len ( cur_coords ) == 0 :
num_misses = 0
cur_coords . append ( coords [ j ] )
else :
num_misses += 1
if num_misses > params [ "hetblock" ] [ "allowed_misses" ] :
if len ( cur_coords ) >= params [ "hetblock" ] [ "min_alleles" ] :
yield min ( cur_coords ) , max ( cur_coords )
cur_coords = [ ]
if len ( cur_coords ) >= params [ "hetblock" ] [ "min_alleles" ] :
yield min ( cur_coords ) , max ( cur_coords )
return _identify_heterogeneity_blocks_shared ( in_file , _segment_by_hmm , params , work_dir , somatic_info )
|
def get_os ( detailed = False ) :
"""Summary :
Retrieve local operating system environment characteristics
Args :
: user ( str ) : USERNAME , only required when run on windows os
Returns :
TYPE : dict object containing key , value pairs describing
os information"""
|
try :
os_type = platform . system ( )
if os_type == 'Linux' :
os_detail = platform . uname ( )
distribution = platform . linux_distribution ( )
HOME = os . environ [ 'HOME' ]
username = os . getenv ( 'USER' )
elif os_type == 'Windows' :
username = os . getenv ( 'username' )
HOME = 'C:\\Users\\' + username
elif os_type == 'Java' :
logger . warning ( 'Unsupported OS. No information' )
except OSError as e :
raise e
except Exception as e :
logger . exception ( '%s: problem determining local os environment %s' % ( inspect . stack ( ) [ 0 ] [ 3 ] , str ( e ) ) )
if detailed and os_type == 'Linux' :
return { 'os_type' : os_type , 'os_detail' : os_detail , 'linux_distribution' : distribution , 'HOME' : HOME }
elif detailed and os_type == 'Windows' :
return { 'os_type' : os_type , 'platform' : platform , 'HOME' : HOME }
elif not detailed :
return { 'os_type' : os_type }
|
def activate ( self , experiment_key , user_id , attributes = None ) :
"""Buckets visitor and sends impression event to Optimizely .
Args :
experiment _ key : Experiment which needs to be activated .
user _ id : ID for user .
attributes : Dict representing user attributes and values which need to be recorded .
Returns :
Variation key representing the variation the user will be bucketed in .
None if user is not in experiment or if experiment is not Running ."""
|
if not self . is_valid :
self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'activate' ) )
return None
if not validator . is_non_empty_string ( experiment_key ) :
self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format ( 'experiment_key' ) )
return None
if not isinstance ( user_id , string_types ) :
self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format ( 'user_id' ) )
return None
variation_key = self . get_variation ( experiment_key , user_id , attributes )
if not variation_key :
self . logger . info ( 'Not activating user "%s".' % user_id )
return None
experiment = self . config . get_experiment_from_key ( experiment_key )
variation = self . config . get_variation_from_key ( experiment_key , variation_key )
# Create and dispatch impression event
self . logger . info ( 'Activating user "%s" in experiment "%s".' % ( user_id , experiment . key ) )
self . _send_impression_event ( experiment , variation , user_id , attributes )
return variation . key
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.