signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _Completion ( self , match ) : # pylint : disable = C6114
r"""Replaces double square brackets with variable length completion .
Completion cannot be mixed with regexp matching or ' \ ' characters
i . e . ' [ [ ( \ n ) ] ] would become ( \ ( n ) ? ) ? . '
Args :
match : A regex Match ( ) object .
Returns :
String of the format ' ( a ( b ( c ( d ) ? ) ? ) ? ) ? ' ."""
|
# Strip the outer ' [ [ ' & ' ] ] ' and replace with ( ) ? regexp pattern .
word = str ( match . group ( ) ) [ 2 : - 2 ]
return '(' + ( '(' ) . join ( word ) + ')?' * len ( word )
|
def set_dep ( self , * deps ) -> "Model" :
"""Register the dependencies for this model .
: param deps : The parent models : objects or meta dicts .
: return : self"""
|
self . meta [ "dependencies" ] = [ ( d . meta if not isinstance ( d , dict ) else d ) for d in deps ]
return self
|
def on_initialize_simulants ( self , pop_data : SimulantData ) :
"""Called by the simulation whenever new simulants are added .
This component is responsible for creating and filling four columns
in the population state table :
' age ' :
The age of the simulant in fractional years .
' sex ' :
The sex of the simulant . One of { ' Male ' , ' Female ' }
' alive ' :
Whether or not the simulant is alive . One of { ' alive ' , ' dead ' }
' entrance _ time ' :
The time that the simulant entered the simulation . The ' birthday '
for simulants that enter as newborns . A ` pandas . Timestamp ` .
Parameters
pop _ data :
A record containing the index of the new simulants , the
start of the time step the simulants are added on , the width
of the time step , and the age boundaries for the simulants to
generate ."""
|
age_start = self . config . population . age_start
age_end = self . config . population . age_end
if age_start == age_end :
age_window = pop_data . creation_window / pd . Timedelta ( days = 365 )
else :
age_window = age_end - age_start
age_draw = self . age_randomness . get_draw ( pop_data . index )
age = age_start + age_draw * age_window
if self . with_common_random_numbers :
population = pd . DataFrame ( { 'entrance_time' : pop_data . creation_time , 'age' : age . values } , index = pop_data . index )
self . register ( population )
population [ 'sex' ] = self . sex_randomness . choice ( pop_data . index , [ 'Male' , 'Female' ] )
population [ 'alive' ] = 'alive'
else :
population = pd . DataFrame ( { 'age' : age . values , 'sex' : self . sex_randomness . choice ( pop_data . index , [ 'Male' , 'Female' ] ) , 'alive' : pd . Series ( 'alive' , index = pop_data . index ) , 'entrance_time' : pop_data . creation_time } , index = pop_data . index )
self . population_view . update ( population )
|
def configfield_ref_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) :
"""Process a role that references the Task configuration field nodes
created by the ` ` lsst - config - fields ` ` , ` ` lsst - task - config - subtasks ` ` ,
and ` ` lsst - task - config - subtasks ` ` directives .
Parameters
name
The role name used in the document .
rawtext
The entire markup snippet , with role .
text
The text marked with the role .
lineno
The line number where ` ` rawtext ` ` appears in the input .
inliner
The inliner instance that called us .
options
Directive options for customization .
content
The directive content for customization .
Returns
nodes : ` list `
List of nodes to insert into the document .
messages : ` list `
List of system messages .
See also
` format _ configfield _ id `
` pending _ configfield _ xref `
` process _ pending _ configfield _ xref _ nodes `"""
|
node = pending_configfield_xref ( rawsource = text )
return [ node ] , [ ]
|
def sendResult ( self , future ) :
"""Send a terminated future back to its parent ."""
|
future = copy . copy ( future )
# Remove the ( now ) extraneous elements from future class
future . callable = future . args = future . kargs = future . greenlet = None
if not future . sendResultBack : # Don ' t reply back the result if it isn ' t asked
future . resultValue = None
self . _sendReply ( future . id . worker , pickle . dumps ( future , pickle . HIGHEST_PROTOCOL , ) , )
|
def get_all_clusters ( resource_root , view = None ) :
"""Get all clusters
@ param resource _ root : The root Resource object .
@ return : A list of ApiCluster objects ."""
|
return call ( resource_root . get , CLUSTERS_PATH , ApiCluster , True , params = view and dict ( view = view ) or None )
|
def tornadopath2openapi ( urlspec , method ) :
"""Convert Tornado URLSpec to OpenAPI - compliant path .
: param urlspec :
: type urlspec : URLSpec
: param method : Handler http method
: type method : function"""
|
if sys . version_info >= ( 3 , 3 ) :
args = list ( inspect . signature ( method ) . parameters . keys ( ) ) [ 1 : ]
else :
if getattr ( method , '__tornado_coroutine__' , False ) :
method = method . __wrapped__
args = inspect . getargspec ( method ) . args [ 1 : ]
params = tuple ( '{{{}}}' . format ( arg ) for arg in args )
try :
path_tpl = urlspec . matcher . _path
except AttributeError : # tornado < 4.5
path_tpl = urlspec . _path
path = ( path_tpl % params )
if path . count ( '/' ) > 1 :
path = path . rstrip ( '/?*' )
return path
|
def time ( host = None , port = None , db = None , password = None ) :
'''Return the current server UNIX time in seconds
CLI Example :
. . code - block : : bash
salt ' * ' redis . time'''
|
server = _connect ( host , port , db , password )
return server . time ( ) [ 0 ]
|
def _to_DOM ( self ) :
"""Dumps object data to a fully traversable DOM representation of the
object .
: returns : a ` ` xml . etree . Element ` ` object"""
|
root_node = ET . Element ( "ozone" )
reference_time_node = ET . SubElement ( root_node , "reference_time" )
reference_time_node . text = str ( self . _reference_time )
reception_time_node = ET . SubElement ( root_node , "reception_time" )
reception_time_node . text = str ( self . _reception_time )
interval_node = ET . SubElement ( root_node , "interval" )
interval_node . text = str ( self . _interval )
value_node = ET . SubElement ( root_node , "value" )
value_node . text = str ( self . du_value )
root_node . append ( self . _location . _to_DOM ( ) )
return root_node
|
def update_sentry_logging ( logging_dict : DictStrAny , sentry_dsn : Optional [ str ] , * loggers : str , level : Union [ str , int ] = None , ** kwargs : Any ) -> None :
r"""Enable Sentry logging if Sentry DSN passed .
. . note : :
Sentry logging requires ` raven < http : / / pypi . python . org / pypi / raven > ` _
library to be installed .
* * Usage * * : :
from logging . config import dictConfig
LOGGING = default _ logging _ dict ( )
SENTRY _ DSN = ' . . . '
update _ sentry _ logging ( LOGGING , SENTRY _ DSN )
dictConfig ( LOGGING )
* * Using AioHttpTransport for SentryHandler * *
This will allow to use ` ` aiohttp . client ` ` for pushing data to Sentry in
your ` ` aiohttp . web ` ` app , which means elimination of sync calls to Sentry .
from raven _ aiohttp import AioHttpTransport
update _ sentry _ logging ( LOGGING , SENTRY _ DSN , transport = AioHttpTransport )
: param logging _ dict : Logging dict .
: param sentry _ dsn :
Sentry DSN value . If ` ` None ` ` do not update logging dict at all .
: param \ * loggers :
Use Sentry logging for each logger in the sequence . If the sequence is
empty use Sentry logging to each available logger .
: param \ * \ * kwargs : Additional kwargs to be passed to ` ` SentryHandler ` ` ."""
|
# No Sentry DSN , nothing to do
if not sentry_dsn :
return
# Add Sentry handler
kwargs [ 'class' ] = 'raven.handlers.logging.SentryHandler'
kwargs [ 'dsn' ] = sentry_dsn
logging_dict [ 'handlers' ] [ 'sentry' ] = dict ( level = level or 'WARNING' , ** kwargs )
loggers = tuple ( logging_dict [ 'loggers' ] ) if not loggers else loggers
for logger in loggers : # Ignore missing loggers
logger_dict = logging_dict [ 'loggers' ] . get ( logger )
if not logger_dict :
continue
# Ignore logger from logger config
if logger_dict . pop ( 'ignore_sentry' , False ) :
continue
# Handlers list should exist
handlers = list ( logger_dict . setdefault ( 'handlers' , [ ] ) )
handlers . append ( 'sentry' )
logger_dict [ 'handlers' ] = tuple ( handlers )
|
def loadCurve ( data , groups , thresholds , absvals , fs , xlabels ) :
"""Accepts a data set from a whole test , averages reps and re - creates the
progress plot as the same as it was during live plotting . Number of thresholds
must match the size of the channel dimension"""
|
xlims = ( xlabels [ 0 ] , xlabels [ - 1 ] )
pw = ProgressWidget ( groups , xlims )
spike_counts = [ ]
# skip control
for itrace in range ( data . shape [ 0 ] ) :
count = 0
for ichan in range ( data . shape [ 2 ] ) :
flat_reps = data [ itrace , : , ichan , : ] . flatten ( )
count += len ( spikestats . spike_times ( flat_reps , thresholds [ ichan ] , fs , absvals [ ichan ] ) )
spike_counts . append ( count / ( data . shape [ 1 ] * data . shape [ 2 ] ) )
# mean spikes per rep
i = 0
for g in groups :
for x in xlabels :
pw . setPoint ( x , g , spike_counts [ i ] )
i += 1
return pw
|
def _resize ( self , ratio_x , ratio_y , resampling ) :
"""Return raster resized by ratio ."""
|
new_width = int ( np . ceil ( self . width * ratio_x ) )
new_height = int ( np . ceil ( self . height * ratio_y ) )
dest_affine = self . affine * Affine . scale ( 1 / ratio_x , 1 / ratio_y )
if self . not_loaded ( ) :
window = rasterio . windows . Window ( 0 , 0 , self . width , self . height )
resized_raster = self . get_window ( window , xsize = new_width , ysize = new_height , resampling = resampling )
else :
resized_raster = self . _reproject ( new_width , new_height , dest_affine , resampling = resampling )
return resized_raster
|
def view_sbo ( self ) :
"""View slackbuild . org"""
|
sbo_url = self . sbo_url . replace ( "/slackbuilds/" , "/repository/" )
br1 , br2 , fix_sp = "" , "" , " "
if self . meta . use_colors in [ "off" , "OFF" ] :
br1 = "("
br2 = ")"
fix_sp = ""
print ( "" )
# new line at start
self . msg . template ( 78 )
print ( "| {0}{1}SlackBuilds Repository{2}" . format ( " " * 28 , self . grey , self . endc ) )
self . msg . template ( 78 )
print ( "| {0} > {1} > {2}{3}{4}" . format ( slack_ver ( ) , sbo_url . split ( "/" ) [ - 3 ] . title ( ) , self . cyan , self . name , self . endc ) )
self . msg . template ( 78 )
print ( "| {0}Package url{1}: {2}" . format ( self . green , self . endc , sbo_url ) )
self . msg . template ( 78 )
print ( "| {0}Description: {1}{2}" . format ( self . green , self . endc , self . sbo_desc ) )
print ( "| {0}SlackBuild: {1}{2}" . format ( self . green , self . endc , self . sbo_dwn . split ( "/" ) [ - 1 ] ) )
print ( "| {0}Sources: {1}{2}" . format ( self . green , self . endc , ( ", " . join ( [ src . split ( "/" ) [ - 1 ] for src in self . source_dwn ] ) ) ) )
print ( "| {0}Requirements: {1}{2}" . format ( self . yellow , self . endc , ", " . join ( self . sbo_req ) ) )
self . msg . template ( 78 )
print ( "| {0}R{1}{2}EADME View the README file" . format ( self . red , self . endc , br2 ) )
print ( "| {0}S{1}{2}lackBuild View the .SlackBuild " "file" . format ( self . red , self . endc , br2 ) )
print ( "| In{0}{1}f{2}{3}o{4} View the .info " "file" . format ( br1 , self . red , self . endc , br2 , fix_sp ) )
if "doinst.sh" in self . sbo_files . split ( ) :
print ( "| D{0}{1}o{2}{3}inst.sh{4} View the doinst.sh " "file" . format ( br1 , self . red , self . endc , br2 , fix_sp ) )
print ( "| {0}D{1}{2}ownload Download this package" . format ( self . red , self . endc , br2 ) )
print ( "| {0}B{1}{2}uild Download and build" . format ( self . red , self . endc , br2 ) )
print ( "| {0}I{1}{2}nstall Download/Build/Install" . format ( self . red , self . endc , br2 ) )
print ( "| {0}C{1}{2}lear Clear screen" . format ( self . red , self . endc , br2 ) )
print ( "| {0}Q{1}{2}uit Quit" . format ( self . red , self . endc , br2 ) )
self . msg . template ( 78 )
|
def as_flow ( cls , obj ) :
"""Convert obj into a Flow . Accepts filepath , dict , or Flow object ."""
|
if isinstance ( obj , cls ) :
return obj
if is_string ( obj ) :
return cls . pickle_load ( obj )
elif isinstance ( obj , collections . Mapping ) :
return cls . from_dict ( obj )
else :
raise TypeError ( "Don't know how to convert type %s into a Flow" % type ( obj ) )
|
def shutdown ( self ) :
"""Sends a shutdown message to other workers ."""
|
if self . OPEN :
self . OPEN = False
scoop . SHUTDOWN_REQUESTED = True
self . socket . send ( b"SHUTDOWN" )
self . socket . close ( )
self . infoSocket . close ( )
time . sleep ( 0.3 )
|
def generate_data_for_edit_page ( self ) :
"""Generate a custom representation of table ' s fields in dictionary type
if exist edit form else use default representation .
: return : dict"""
|
if not self . can_edit :
return { }
if self . edit_form :
return self . edit_form . to_dict ( )
return self . generate_simple_data_page ( )
|
def _get_sdict ( self , env ) :
"""Returns a dictionary mapping all of the source suffixes of all
src _ builders of this Builder to the underlying Builder that
should be called first .
This dictionary is used for each target specified , so we save a
lot of extra computation by memoizing it for each construction
environment .
Note that this is re - computed each time , not cached , because there
might be changes to one of our source Builders ( or one of their
source Builders , and so on , and so on . . . ) that we can ' t " see . "
The underlying methods we call cache their computed values ,
though , so we hope repeatedly aggregating them into a dictionary
like this won ' t be too big a hit . We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck ."""
|
sdict = { }
for bld in self . get_src_builders ( env ) :
for suf in bld . src_suffixes ( env ) :
sdict [ suf ] = bld
return sdict
|
def deactivate_user ( self , user ) :
"""Deactivate a user .
: param user : A : class : ` invenio _ accounts . models . User ` instance .
: returns : The datastore instance ."""
|
res = super ( SessionAwareSQLAlchemyUserDatastore , self ) . deactivate_user ( user )
if res :
delete_user_sessions ( user )
return res
|
def setspan ( self , * args ) :
"""Sets the span of the span element anew , erases all data inside .
Arguments :
* args : Instances of : class : ` Word ` , : class : ` Morpheme ` or : class : ` Phoneme `"""
|
self . data = [ ]
for child in args :
self . append ( child )
|
def SetEncoding ( sval ) :
"""Sets the encoding variable according to the text passed
: param sval : text specification for the desired model"""
|
global encoding
s = sval . lower ( )
if s == "additive" :
encoding = Encoding . Additive
elif s == "dominant" :
encoding = Encoding . Dominant
elif s == "recessive" :
encoding = Encoding . Recessive
elif s == "genotype" :
encoding = Encoding . Genotype
elif s == "raw" :
encoding = Encoding . Raw
else :
raise InvalidSelection ( "Invalid encoding, %s, selected" % ( sval ) )
|
def limit ( self , limit ) :
"""Set absolute limit on number of images to return , or set to None to return
as many results as needed ; default 50 posts ."""
|
params = join_params ( self . parameters , { "limit" : limit } )
return self . __class__ ( ** params )
|
def step ( self , provided_inputs ) :
"""Run the simulation for a cycle
: param provided _ inputs : a dictionary mapping WireVectors ( or their names )
to their values for this step
eg : { wire : 3 , " wire _ name " : 17}"""
|
# validate _ inputs
for wire , value in provided_inputs . items ( ) :
wire = self . block . get_wirevector_by_name ( wire ) if isinstance ( wire , str ) else wire
if value > wire . bitmask or value < 0 :
raise PyrtlError ( "Wire {} has value {} which cannot be represented" " using its bitwidth" . format ( wire , value ) )
# building the simulation data
ins = { self . _to_name ( wire ) : value for wire , value in provided_inputs . items ( ) }
ins . update ( self . regs )
ins . update ( self . mems )
# propagate through logic
self . regs , self . outs , mem_writes = self . sim_func ( ins )
for mem , addr , value in mem_writes :
self . mems [ mem ] [ addr ] = value
# for tracer compatibility
self . context = self . outs . copy ( )
self . context . update ( ins )
# also gets old register values
if self . tracer is not None :
self . tracer . add_fast_step ( self )
# check the rtl assertions
check_rtl_assertions ( self )
|
def __deserialize_model ( self , data , klass ) :
"""Deserializes list or dict to model .
: param data : dict , list .
: param klass : class literal .
: return : model object ."""
|
if not klass . swagger_types :
return data
kwargs = { }
for attr , attr_type in iteritems ( klass . swagger_types ) :
if data is not None and klass . attribute_map [ attr ] in data and isinstance ( data , ( list , dict ) ) :
value = data [ klass . attribute_map [ attr ] ]
kwargs [ attr ] = self . __deserialize ( value , attr_type )
instance = klass ( ** kwargs )
return instance
|
def sed_inplace ( filename , pattern , repl ) :
'''Perform the pure - Python equivalent of in - place ` sed ` substitution : e . g . ,
` sed - i - e ' s / ' $ { pattern } ' / ' $ { repl } ' " $ { filename } " ` .'''
|
# For efficiency , precompile the passed regular expression .
pattern_compiled = re . compile ( pattern )
# For portability , NamedTemporaryFile ( ) defaults to mode " w + b "
# ( i . e . , binary writing with updating ) .
# This is usually a good thing . In this case , # however , binary writing
# imposes non - trivial encoding constraints trivially
# resolved by switching to text writing . Let ' s do that .
with tempfile . NamedTemporaryFile ( mode = 'w' , delete = False ) as tmp_file :
with open ( filename ) as src_file :
for line in src_file :
tmp_file . write ( pattern_compiled . sub ( repl , line ) )
# Overwrite the original file with the munged temporary file in a
# manner preserving file attributes ( e . g . , permissions ) .
shutil . copystat ( filename , tmp_file . name )
shutil . move ( tmp_file . name , filename )
|
def run_until ( self , endtime , timeunit = 'minutes' , save = True ) :
"""Run a case untile the specifiend endtime"""
|
integrator = self . case . solver . Integrator
integrator . rununtil ( endtime , timeunit )
if save is True :
self . case . save ( )
|
def status ( self ) :
"""Attempts to label this invoice with a status . Note that an invoice can be more than one of the choices .
We just set a priority on which status appears ."""
|
if self . paid :
return self . STATUS_PAID
if self . forgiven :
return self . STATUS_FORGIVEN
if self . closed :
return self . STATUS_CLOSED
return self . STATUS_OPEN
|
def item_from_event ( self , domain_event ) :
"""Constructs a sequenced item from a domain event ."""
|
item_args = self . construct_item_args ( domain_event )
return self . construct_sequenced_item ( item_args )
|
def get_data_id_by_slug ( self , slug ) :
"""Find data object ID for given slug .
This method queries the Resolwe API and requires network access ."""
|
resolwe_host = os . environ . get ( 'RESOLWE_HOST_URL' )
url = urllib . parse . urljoin ( resolwe_host , '/api/data?slug={}&fields=id' . format ( slug ) )
with urllib . request . urlopen ( url , timeout = 60 ) as f :
data = json . loads ( f . read ( ) . decode ( 'utf-8' ) )
if len ( data ) == 1 :
return data [ 0 ] [ 'id' ]
elif not data :
raise ValueError ( 'Data not found for slug {}' . format ( slug ) )
else :
raise ValueError ( 'More than one data object returned for slug {}' . format ( slug ) )
|
def loadInstance ( self ) :
"""Loads the plugin from the proxy information that was created from the
registry file ."""
|
if self . _loaded :
return
self . _loaded = True
module_path = self . modulePath ( )
package = projex . packageFromPath ( module_path )
path = os . path . normpath ( projex . packageRootPath ( module_path ) )
if path in sys . path :
sys . path . remove ( path )
sys . path . insert ( 0 , path )
try :
__import__ ( package )
except Exception , e :
err = Plugin ( self . name ( ) , self . version ( ) )
err . setError ( e )
err . setFilepath ( module_path )
self . _instance = err
self . setError ( e )
msg = "%s.plugin('%s') errored loading instance from %s"
opts = ( self . proxyClass ( ) . __name__ , self . name ( ) , module_path )
logger . warning ( msg % opts )
logger . error ( e )
|
def attach_remote ( self , id , name , ** kwargs ) :
"""create remote instance of widget
Arguments :
- id ( str ) : widget id
- name ( str ) : widget type name
Keyword Arguments :
- any further arguments you wish to pass
to the widget constructor"""
|
client_id = id . split ( "." ) [ 0 ]
widget = self . make_widget ( id , name , dispatcher = ProxyDispatcher ( self , link = getattr ( self . clients [ client_id ] , "link" , None ) ) , ** kwargs )
self . store_widget ( widget )
self . log_debug ( "Attached widget: %s" % id )
|
def fields ( self ) :
"""returns a list of feature fields"""
|
if 'feature' in self . _dict :
self . _attributes = self . _dict [ 'feature' ] [ 'attributes' ]
else :
self . _attributes = self . _dict [ 'attributes' ]
return self . _attributes . keys ( )
|
def setup_figure ( self , figure ) :
"""Makes any desired changes to the figure object
This method will be called once with a figure object
before any plotting has completed . Subclasses that
override this method should make sure that the base
class method is called ."""
|
for th in self . themeables . values ( ) :
th . setup_figure ( figure )
|
def delete_resource_subscription ( self , device_id = None , resource_path = None , fix_path = True ) :
"""Unsubscribe from device and / or resource _ path updates .
If device _ id or resource _ path is None , or this method is called without arguments ,
all subscriptions are removed .
Calling it with only device _ id removes subscriptions for all resources
on the given device .
: param device _ id : device to unsubscribe events from . If not
provided , all registered devices will be unsubscribed .
: param resource _ path : resource _ path to unsubscribe events from . If not
provided , all resource paths will be unsubscribed .
: param fix _ path : remove trailing / in resouce path to ensure API works .
: return : void"""
|
devices = [ _f for _f in [ device_id ] if _f ]
if not device_id :
devices = list ( self . _queues . keys ( ) )
resource_paths = [ resource_path ]
if not resource_path :
resource_paths = [ ]
for e in devices :
resource_paths . extend ( list ( self . _queues [ e ] . keys ( ) ) )
# Delete the subscriptions
for e in devices :
for r in resource_paths : # Fix the path , if required .
fixed_path = r
if fix_path and r . startswith ( "/" ) :
fixed_path = r [ 1 : ]
# Make request to API , ignoring result
self . _delete_subscription ( device_id , fixed_path )
# Remove Queue from dictionary
del self . _queues [ e ] [ r ]
return
|
def _create_api_call ( self , method , _url , kwargs ) :
"""This will create an APICall object and return it
: param method : str of the html method [ ' GET ' , ' POST ' , ' PUT ' , ' DELETE ' ]
: param _ url : str of the sub url of the api call ( ex . g / device / list )
: param kwargs : dict of additional arguments
: return : ApiCall"""
|
api_call = self . ApiCall ( name = '%s.%s' % ( _url , method ) , label = 'ID_%s' % self . _count , base_uri = self . base_uri , timeout = self . timeout , headers = self . headers , cookies = self . cookies , proxies = self . proxies , accepted_return = self . accepted_return or 'json' )
if self . max_history :
self . _count += 1
# count of _ calls
if len ( self ) > self . max_history :
self . _calls . pop ( 0 )
self . _calls [ 'ID_%s' % self . _count ] = api_call
return api_call
|
def new_gp_object ( typename ) :
"""Create an indirect pointer to a GPhoto2 type , call its matching
constructor function and return the pointer to it .
: param typename : Name of the type to create .
: return : A pointer to the specified data type ."""
|
obj_p = backend . ffi . new ( "{0}**" . format ( typename ) )
backend . CONSTRUCTORS [ typename ] ( obj_p )
return obj_p [ 0 ]
|
def image_url ( self , pixel_size = None ) :
"""Get the URL for the user icon in the desired pixel size , if it exists . If no
size is supplied , give the URL for the full - size image ."""
|
if "profile" not in self . _raw :
return
profile = self . _raw [ "profile" ]
if ( pixel_size ) :
img_key = "image_%s" % pixel_size
if img_key in profile :
return profile [ img_key ]
return profile [ self . _DEFAULT_IMAGE_KEY ]
|
def jobs_insert_load ( self , source , table_name , append = False , overwrite = False , create = False , source_format = 'CSV' , field_delimiter = ',' , allow_jagged_rows = False , allow_quoted_newlines = False , encoding = 'UTF-8' , ignore_unknown_values = False , max_bad_records = 0 , quote = '"' , skip_leading_rows = 0 ) :
"""Issues a request to load data from GCS to a BQ table
Args :
source : the URL of the source bucket ( s ) . Can include wildcards , and can be a single
string argument or a list .
table _ name : a tuple representing the full name of the destination table .
append : if True append onto existing table contents .
overwrite : if True overwrite existing table contents .
create : if True , create the table if it doesn ' t exist
source _ format : the format of the data ; default ' CSV ' . Other options are DATASTORE _ BACKUP
or NEWLINE _ DELIMITED _ JSON .
field _ delimiter : The separator for fields in a CSV file . BigQuery converts the string to
ISO - 8859-1 encoding , and then uses the first byte of the encoded string to split the data
as raw binary ( default ' , ' ) .
allow _ jagged _ rows : If True , accept rows in CSV files that are missing trailing optional
columns ; the missing values are treated as nulls ( default False ) .
allow _ quoted _ newlines : If True , allow quoted data sections in CSV files that contain newline
characters ( default False ) .
encoding : The character encoding of the data , either ' UTF - 8 ' ( the default ) or ' ISO - 8859-1 ' .
ignore _ unknown _ values : If True , accept rows that contain values that do not match the schema ;
the unknown values are ignored ( default False ) .
max _ bad _ records : The maximum number of bad records that are allowed ( and ignored ) before
returning an ' invalid ' error in the Job result ( default 0 ) .
quote : The value used to quote data sections in a CSV file ; default ' " ' . If your data does
not contain quoted sections , set the property value to an empty string . If your data
contains quoted newline characters , you must also enable allow _ quoted _ newlines .
skip _ leading _ rows : A number of rows at the top of a CSV file to skip ( default 0 ) .
Returns :
A parsed result object .
Raises :
Exception if there is an error performing the operation ."""
|
url = Api . _ENDPOINT + ( Api . _JOBS_PATH % ( table_name . project_id , '' ) )
if isinstance ( source , basestring ) :
source = [ source ]
write_disposition = 'WRITE_EMPTY'
if overwrite :
write_disposition = 'WRITE_TRUNCATE'
if append :
write_disposition = 'WRITE_APPEND'
data = { 'kind' : 'bigquery#job' , 'configuration' : { 'load' : { 'sourceUris' : source , 'destinationTable' : { 'projectId' : table_name . project_id , 'datasetId' : table_name . dataset_id , 'tableId' : table_name . table_id } , 'createDisposition' : 'CREATE_IF_NEEDED' if create else 'CREATE_NEVER' , 'writeDisposition' : write_disposition , 'sourceFormat' : source_format , 'ignoreUnknownValues' : ignore_unknown_values , 'maxBadRecords' : max_bad_records , } } }
if source_format == 'CSV' :
load_config = data [ 'configuration' ] [ 'load' ]
load_config . update ( { 'fieldDelimiter' : field_delimiter , 'allowJaggedRows' : allow_jagged_rows , 'allowQuotedNewlines' : allow_quoted_newlines , 'quote' : quote , 'encoding' : encoding , 'skipLeadingRows' : skip_leading_rows } )
return datalab . utils . Http . request ( url , data = data , credentials = self . _credentials )
|
def synchronise_device_state ( self , device_state , authentication_headers ) :
"""Synchronizing the component states with AVS
Components state must be synchronised with AVS after establishing the
downchannel stream in order to create a persistent connection with AVS .
Note that currently this function is paying lip - service synchronising
the device state : the device state is hard - coded ."""
|
payload = { 'context' : device_state , 'event' : { 'header' : { 'namespace' : 'System' , 'name' : 'SynchronizeState' , 'messageId' : '' } , 'payload' : { } } }
multipart_data = MultipartEncoder ( fields = [ ( 'metadata' , ( 'metadata' , json . dumps ( payload ) , 'application/json' , { 'Content-Disposition' : "form-data; name='metadata'" } ) ) , ] , boundary = 'boundary' )
headers = { ** authentication_headers , 'Content-Type' : multipart_data . content_type }
stream_id = self . connection . request ( 'GET' , '/v20160207/events' , body = multipart_data , headers = headers , )
response = self . connection . get_response ( stream_id )
assert response . status in [ http . client . NO_CONTENT , http . client . OK ]
|
def register ( action ) :
"""Action registration is used to support generating lists of
permitted actions from a permission set and an object pattern .
Only registered actions will be returned by such queries ."""
|
if isinstance ( action , str ) :
Action . register ( Action ( action ) )
elif isinstance ( action , Action ) :
Action . registered . add ( action )
else :
for a in action :
Action . register ( a )
|
def load_model ( ) :
"""Load a n - gram language model for mathematics in ARPA format which gets
shipped with hwrt .
Returns
A NgramLanguageModel object"""
|
logging . info ( "Load language model..." )
ngram_arpa_t = pkg_resources . resource_filename ( 'hwrt' , 'misc/ngram.arpa.tar.bz2' )
with tarfile . open ( ngram_arpa_t , 'r:bz2' ) as tar :
tarfolder = tempfile . mkdtemp ( )
tar . extractall ( path = tarfolder )
ngram_arpa_f = os . path . join ( tarfolder , 'ngram.arpa' )
with open ( ngram_arpa_f ) as f :
content = f . read ( )
ngram_model = NgramLanguageModel ( )
ngram_model . load_from_arpa_str ( content )
return ngram_model
|
def nullify ( function ) :
"Decorator . If empty list , returns None , else list ."
|
def wrapper ( * args , ** kwargs ) :
value = function ( * args , ** kwargs )
if ( type ( value ) == list and len ( value ) == 0 ) :
return None
return value
return wrapper
|
def _all_possible_partitionings ( elements , sizes ) :
'''Helper function for Game . all _ possible _ hands ( ) . Given a set of elements
and the sizes of partitions , yields all possible partitionings of the
elements into partitions of the provided sizes .
: param set elements : a set of elements to partition .
: param list sizes : a list of sizes for the partitions . The sum of the
sizes should equal the length of the set of elements .
: yields : a tuple of tuples , each inner tuple corresponding to a partition .'''
|
try : # get the size of the current partition
size = sizes [ 0 ]
except IndexError : # base case : no more sizes left
yield ( )
return
# don ' t include the current size in the recursive calls
sizes = sizes [ 1 : ]
# iterate over all possible partitions of the current size
for partition in itertools . combinations ( elements , size ) : # recursive case : pass down the remaining elements and the remaining sizes
for other_partitions in _all_possible_partitionings ( elements . difference ( partition ) , sizes ) : # put results together and yield up
yield ( partition , ) + other_partitions
|
def get_module_files ( src_directory , blacklist , list_all = False ) :
"""given a package directory return a list of all available python
module ' s files in the package and its subpackages
: type src _ directory : str
: param src _ directory :
path of the directory corresponding to the package
: type blacklist : list or tuple
: param blacklist : iterable
list of files or directories to ignore .
: type list _ all : bool
: param list _ all :
get files from all paths , including ones without _ _ init _ _ . py
: rtype : list
: return :
the list of all available python module ' s files in the package and
its subpackages"""
|
files = [ ]
for directory , dirnames , filenames in os . walk ( src_directory ) :
if directory in blacklist :
continue
_handle_blacklist ( blacklist , dirnames , filenames )
# check for _ _ init _ _ . py
if not list_all and "__init__.py" not in filenames :
dirnames [ : ] = ( )
continue
for filename in filenames :
if _is_python_file ( filename ) :
src = os . path . join ( directory , filename )
files . append ( src )
return files
|
def _send ( self , email_message ) :
"""A helper method that does the actual sending ."""
|
if not email_message . recipients ( ) :
return False
from_email = email_message . from_email
recipients = email_message . recipients ( )
try :
self . connection . messages . create ( to = recipients , from_ = from_email , body = email_message . body )
except Exception :
if not self . fail_silently :
raise
return False
return True
|
def register ( name , _callable = None ) :
"""A decorator used for register custom check .
: param name : name of check
: type : str
: param _ callable : check class or a function which return check instance
: return : _ callable or a decorator"""
|
def wrapper ( _callable ) :
registered_checks [ name ] = _callable
return _callable
# If function or class is given , do the registeration
if _callable :
return wrapper ( _callable )
return wrapper
|
def find_template_companion ( template , extension = '' , check = True ) :
"""Returns the first found template companion file"""
|
if check and not os . path . isfile ( template ) :
yield ''
return
# May be ' < stdin > ' ( click )
template = os . path . abspath ( template )
template_dirname = os . path . dirname ( template )
template_basename = os . path . basename ( template ) . split ( '.' )
current_path = template_dirname
stop_path = os . path . commonprefix ( ( os . getcwd ( ) , current_path ) )
stop_path = os . path . dirname ( stop_path )
token = template_basename [ 0 ] + '.'
while True :
for file in sorted ( os . listdir ( current_path ) ) :
if not file . startswith ( token ) :
continue
if not file . endswith ( extension ) :
continue
file_parts = file . split ( '.' )
for i in range ( 1 , len ( template_basename ) ) :
if template_basename [ : - i ] != file_parts [ : - 1 ] :
continue
if current_path == template_dirname :
if file_parts == template_basename :
continue
# Do not accept template itself
yield os . path . join ( current_path , file )
if current_path == stop_path :
break
# cd . .
current_path = os . path . split ( current_path ) [ 0 ]
|
def check_response ( response ) :
"""checks the response if the server returned an error raises an exception ."""
|
if response . status_code < 200 or response . status_code > 300 :
raise ServerError ( 'API requests returned with error: %s' % response . status_code )
try :
response_text = loads ( response . text )
except ValueError :
raise ServerError ( 'The API did not returned a JSON string.' )
if not response_text :
raise EmptyResponse ( )
if 'failure' in response_text :
if response_text [ 'failure' ] == 'Falscher Dateityp' :
raise UnsupportedFormat ( 'Please look at picflash.org ' 'witch formats are supported' )
else :
raise UnkownError ( response_text [ 'failure' ] )
|
def plot3d ( self , elevation = 20 , azimuth = 30 , cmap = 'RdBu_r' , show = True , fname = None ) :
"""Plot the raw data on a 3d sphere .
This routines becomes slow for large grids because it is based on
matplotlib3d .
Usage
x . plot3d ( [ elevation , azimuth , show , fname ] )
Parameters
elevation : float , optional , default = 20
elev parameter for the 3d projection .
azimuth : float , optional , default = 30
azim parameter for the 3d projection .
cmap : str , optional , default = ' RdBu _ r '
Name of the color map to use .
show : bool , optional , default = True
If True , plot the image to the screen .
fname : str , optional , default = None
If present , save the image to the specified file ."""
|
from mpl_toolkits . mplot3d import Axes3D
nlat , nlon = self . nlat , self . nlon
cmap = _plt . get_cmap ( cmap )
if self . kind == 'real' :
data = self . data
elif self . kind == 'complex' :
data = _np . abs ( self . data )
else :
raise ValueError ( 'Grid has to be either real or complex, not {}' . format ( self . kind ) )
lats = self . lats ( )
lons = self . lons ( )
if self . grid == 'DH' : # add south pole
lats_circular = _np . append ( lats , [ - 90. ] )
elif self . grid == 'GLQ' : # add north and south pole
lats_circular = _np . hstack ( ( [ 90. ] , lats , [ - 90. ] ) )
lons_circular = _np . append ( lons , [ lons [ 0 ] ] )
nlats_circular = len ( lats_circular )
nlons_circular = len ( lons_circular )
sshape = nlats_circular , nlons_circular
# make uv sphere and store all points
u = _np . radians ( lons_circular )
v = _np . radians ( 90. - lats_circular )
x = _np . sin ( v ) [ : , None ] * _np . cos ( u ) [ None , : ]
y = _np . sin ( v ) [ : , None ] * _np . sin ( u ) [ None , : ]
z = _np . cos ( v ) [ : , None ] * _np . ones_like ( lons_circular ) [ None , : ]
points = _np . vstack ( ( x . flatten ( ) , y . flatten ( ) , z . flatten ( ) ) )
# fill data for all points . 0 lon has to be repeated ( circular mesh )
# and the south pole has to be added in the DH grid
if self . grid == 'DH' :
magn_point = _np . zeros ( ( nlat + 1 , nlon + 1 ) )
magn_point [ : - 1 , : - 1 ] = data
magn_point [ - 1 , : ] = _np . mean ( data [ - 1 ] )
# not exact !
magn_point [ : - 1 , - 1 ] = data [ : , 0 ]
if self . grid == 'GLQ' :
magn_point = _np . zeros ( ( nlat + 2 , nlon + 1 ) )
magn_point [ 1 : - 1 , : - 1 ] = data
magn_point [ 0 , : ] = _np . mean ( data [ 0 ] )
# not exact !
magn_point [ - 1 , : ] = _np . mean ( data [ - 1 ] )
# not exact !
magn_point [ 1 : - 1 , - 1 ] = data [ : , 0 ]
# compute face color , which is the average of all neighbour points
magn_face = 1. / 4. * ( magn_point [ 1 : , 1 : ] + magn_point [ : - 1 , 1 : ] + magn_point [ 1 : , : - 1 ] + magn_point [ : - 1 , : - 1 ] )
magnmax_face = _np . max ( _np . abs ( magn_face ) )
magnmax_point = _np . max ( _np . abs ( magn_point ) )
# compute colours and displace the points
norm = _plt . Normalize ( - magnmax_face / 2. , magnmax_face / 2. , clip = True )
colors = cmap ( norm ( magn_face . flatten ( ) ) )
colors = colors . reshape ( nlats_circular - 1 , nlons_circular - 1 , 4 )
points *= ( 1. + magn_point . flatten ( ) / magnmax_point / 2. )
x = points [ 0 ] . reshape ( sshape )
y = points [ 1 ] . reshape ( sshape )
z = points [ 2 ] . reshape ( sshape )
# plot 3d radiation pattern
fig = _plt . figure ( )
ax3d = fig . add_subplot ( 1 , 1 , 1 , projection = '3d' )
ax3d . plot_surface ( x , y , z , rstride = 1 , cstride = 1 , facecolors = colors )
ax3d . set ( xlim = ( - 1. , 1. ) , ylim = ( - 1. , 1. ) , zlim = ( - 1. , 1. ) , xticks = [ - 1 , 1 ] , yticks = [ - 1 , 1 ] , zticks = [ - 1 , 1 ] )
ax3d . set_axis_off ( )
ax3d . view_init ( elev = elevation , azim = azimuth )
# show or save output
fig . tight_layout ( pad = 0.5 )
if show :
fig . show ( )
if fname is not None :
fig . savefig ( fname )
return fig , ax3d
|
def pretty_descriptor ( self ) :
"""get the class or interface name , its accessor flags , its parent
class , and any interfaces it implements"""
|
f = " " . join ( self . pretty_access_flags ( ) )
if not self . is_interface ( ) :
f += " class"
n = self . pretty_this ( )
e = self . pretty_super ( )
i = "," . join ( self . pretty_interfaces ( ) )
if i :
return "%s %s extends %s implements %s" % ( f , n , e , i )
else :
return "%s %s extends %s" % ( f , n , e )
|
def stats ( txt , color = False ) :
"Print stats"
|
if color :
txt = config . Col . OKBLUE + txt + config . Col . ENDC
print ( txt )
|
def _transition_stage ( self , step , total_steps , brightness = None ) :
"""Get a transition stage at a specific step .
: param step : The current step .
: param total _ steps : The total number of steps .
: param brightness : The brightness to transition to ( 0.0-1.0 ) .
: return : The stage at the specific step ."""
|
if brightness is not None :
self . _assert_is_brightness ( brightness )
brightness = self . _interpolate ( self . brightness , brightness , step , total_steps )
return { 'brightness' : brightness }
|
def rename_notes_folder ( self , title , folderid ) :
"""Rename a folder
: param title : New title of the folder
: param folderid : The UUID of the folder to rename"""
|
if self . standard_grant_type is not "authorization_code" :
raise DeviantartError ( "Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint." )
response = self . _req ( '/notes/folders/rename/{}' . format ( folderid ) , post_data = { 'title' : title } )
return response
|
def save_report ( self , file_path ) :
"""Write coveralls report to file ."""
|
try :
report = self . create_report ( )
except coverage . CoverageException as e :
log . error ( 'Failure to gather coverage:' , exc_info = e )
else :
with open ( file_path , 'w' ) as report_file :
report_file . write ( report )
|
def encrypt_folder ( path , sender , recipients ) :
"""This helper function should zip the contents of a folder and encrypt it as
a zip - file . Recipients are responsible for opening the zip - file ."""
|
for recipient_key in recipients :
crypto . assert_type_and_length ( 'recipient_key' , recipient_key , ( str , crypto . UserLock ) )
crypto . assert_type_and_length ( "sender_key" , sender , crypto . UserLock )
if ( not os . path . exists ( path ) ) or ( not os . path . isdir ( path ) ) :
raise OSError ( "Specified path is not a valid directory: {}" . format ( path ) )
buf = io . BytesIO ( )
zipf = zipfile . ZipFile ( buf , mode = "w" , compression = zipfile . ZIP_DEFLATED )
for root , folders , files in os . walk ( path ) :
for fn in files :
fp = os . path . join ( root , fn )
zipf . write ( fp )
zipf . close ( )
zip_contents = buf . getvalue ( )
_ , filename = os . path . split ( path )
filename += ".zip"
crypted = crypto . MiniLockFile . new ( filename , zip_contents , sender , recipients )
return crypted . contents
|
def LoadExclusions ( self , snps ) :
"""Load locus exclusions .
: param snps : Can either be a list of rsids or a file containing rsids .
: return : None
If snps is a file , the file must only contain RSIDs separated
by whitespace ( tabs , spaces and return characters ) ."""
|
snp_names = [ ]
if len ( snps ) == 1 and os . path . isfile ( snps [ 0 ] ) :
snp_names = open ( snps ) . read ( ) . strip ( ) . split ( )
else :
snp_names = snps
for snp in snp_names :
if len ( snp . strip ( ) ) > 0 :
self . ignored_rs . append ( snp )
|
def checkout ( repo , ref ) :
"""Checkout a repoself ."""
|
# Delete local branch if it exists , remote branch will be tracked
# automatically . This prevents stale local branches from causing problems .
# It also avoids problems with appending origin / to refs as that doesn ' t
# work with tags , SHAs , and upstreams not called origin .
if ref in repo . branches : # eg delete master but leave origin / master
log . warn ( "Removing local branch {b} for repo {r}" . format ( b = ref , r = repo ) )
# Can ' t delete currently checked out branch , so make sure head is
# detached before deleting .
repo . head . reset ( index = True , working_tree = True )
repo . git . checkout ( repo . head . commit . hexsha )
repo . delete_head ( ref , '--force' )
log . info ( "Checkout out repo {repo} to ref {ref}" . format ( repo = repo , ref = ref ) )
repo . head . reset ( index = True , working_tree = True )
repo . git . checkout ( ref )
repo . head . reset ( index = True , working_tree = True )
sha = repo . head . commit . hexsha
log . info ( "Current SHA for repo {repo} is {sha}" . format ( repo = repo , sha = sha ) )
|
def load ( items , default_section = _DEFAULT_SECTION ) :
"""从混合类型组中读取配置
: param default _ section :
: param items :
: return :"""
|
settings = [ ]
assert isinstance ( items , list ) , 'items必须为list'
logger . debug ( items )
for item in items :
if _is_conf ( item ) :
settings . append ( load_from_ini ( item , default_section ) )
else :
settings . append ( load_from_name ( item ) )
logger . debug ( settings )
return merge ( settings )
|
def get_wcs ( self , data_x , data_y ) :
"""Return ( re _ deg , dec _ deg ) for the ( data _ x , data _ y ) position
based on any WCS associated with the loaded image ."""
|
img = self . fitsimage . get_image ( )
ra , dec = img . pixtoradec ( data_x , data_y )
return ra , dec
|
def buildconfig_update ( orig , new , remove_nonexistent_keys = False ) :
"""Performs update of given ` orig ` BuildConfig with values from ` new ` BuildConfig .
Both BuildConfigs have to be represented as ` dict ` s .
This function :
- adds all key / value pairs to ` orig ` from ` new ` that are missing
- replaces values in ` orig ` for keys that are in both
- removes key / value pairs from ` orig ` for keys that are not in ` new ` ,
but only in dicts nested inside ` strategy ` key
( see https : / / github . com / projectatomic / osbs - client / pull / 273 # issuecomment - 148038314)"""
|
if isinstance ( orig , dict ) and isinstance ( new , dict ) :
clean_triggers ( orig , new )
if remove_nonexistent_keys :
missing = set ( orig . keys ( ) ) - set ( new . keys ( ) )
for k in missing :
orig . pop ( k )
for k , v in new . items ( ) :
if k == 'strategy' :
remove_nonexistent_keys = True
if isinstance ( orig . get ( k ) , dict ) and isinstance ( v , dict ) :
buildconfig_update ( orig [ k ] , v , remove_nonexistent_keys )
else :
orig [ k ] = v
|
def weights ( self , other ) :
"""Compute weights , given a scale or time - frequency representation
: param other : A time - frequency representation , or a scale
: return : a numpy array of weights"""
|
try :
return self . _wdata ( other )
except AttributeError :
frequency_dim = other . dimensions [ - 1 ]
return self . _wdata ( frequency_dim . scale )
|
def export_process_to_csv ( bpmn_diagram , directory , filename ) :
"""Root method of CSV export functionality .
: param bpmn _ diagram : an instance of BpmnDiagramGraph class ,
: param directory : a string object , which is a path of output directory ,
: param filename : a string object , which is a name of output file ."""
|
nodes = copy . deepcopy ( bpmn_diagram . get_nodes ( ) )
start_nodes = [ ]
export_elements = [ ]
for node in nodes :
incoming_list = node [ 1 ] . get ( consts . Consts . incoming_flow )
if len ( incoming_list ) == 0 :
start_nodes . append ( node )
if len ( start_nodes ) != 1 :
raise bpmn_exception . BpmnPythonError ( "Exporting to CSV format accepts only one start event" )
nodes_classification = utils . BpmnImportUtils . generate_nodes_clasification ( bpmn_diagram )
start_node = start_nodes . pop ( )
BpmnDiagramGraphCsvExport . export_node ( bpmn_diagram , export_elements , start_node , nodes_classification )
try :
os . makedirs ( directory )
except OSError as exception :
if exception . errno != errno . EEXIST :
raise
file_object = open ( directory + filename , "w" )
file_object . write ( "Order,Activity,Condition,Who,Subprocess,Terminated\n" )
BpmnDiagramGraphCsvExport . write_export_node_to_file ( file_object , export_elements )
file_object . close ( )
|
def _calc_mem_info ( self , unit , meminfo , memory ) :
"""Parse / proc / meminfo , grab the memory capacity and used size
then return ; Memory size ' total _ mem ' , Used _ mem , percentage
of used memory , and units of mem ( KiB , MiB , GiB ) ."""
|
if memory :
total_mem_kib = meminfo [ "MemTotal:" ]
used_mem_kib = ( total_mem_kib - meminfo [ "MemFree:" ] - ( meminfo [ "Buffers:" ] + meminfo [ "Cached:" ] + ( meminfo [ "SReclaimable:" ] - meminfo [ "Shmem:" ] ) ) )
else :
total_mem_kib = meminfo [ "SwapTotal:" ]
used_mem_kib = total_mem_kib - meminfo [ "SwapFree:" ]
used_percent = 100 * used_mem_kib / total_mem_kib
unit = "B" if unit == "dynamic" else unit
( total , total_unit ) = self . py3 . format_units ( total_mem_kib * 1024 , unit )
( used , used_unit ) = self . py3 . format_units ( used_mem_kib * 1024 , unit )
return total , total_unit , used , used_unit , used_percent
|
def _resolve_path_load ( self , cdx , is_original , failed_files ) :
"""Load specific record based on filename , offset and length
fields in the cdx .
If original = True , use the orig . * fields for the cdx
Resolve the filename to full path using specified path resolvers
If failed _ files list provided , keep track of failed resolve attempts"""
|
if is_original :
( filename , offset , length ) = ( cdx [ 'orig.filename' ] , cdx [ 'orig.offset' ] , cdx [ 'orig.length' ] )
else :
( filename , offset , length ) = ( cdx [ 'filename' ] , cdx [ 'offset' ] , cdx . get ( 'length' , '-' ) )
# optimization : if same file already failed this request ,
# don ' t try again
if failed_files is not None and filename in failed_files :
raise ArchiveLoadFailed ( 'Skipping Already Failed: ' + filename )
any_found = False
last_exc = None
last_traceback = None
for resolver in self . path_resolvers :
possible_paths = resolver ( filename , cdx )
if not possible_paths :
continue
if isinstance ( possible_paths , six . string_types ) :
possible_paths = [ possible_paths ]
for path in possible_paths :
any_found = True
try :
return ( self . record_loader . load ( path , offset , length , no_record_parse = self . no_record_parse ) )
except Exception as ue :
last_exc = ue
import sys
last_traceback = sys . exc_info ( ) [ 2 ]
# Unsuccessful if reached here
if failed_files is not None :
failed_files . append ( filename )
if last_exc : # msg = str ( last _ exc . _ _ class _ _ . _ _ name _ _ )
msg = str ( last_exc )
else :
msg = 'Archive File Not Found'
# raise ArchiveLoadFailed ( msg , filename ) , None , last _ traceback
six . reraise ( ArchiveLoadFailed , ArchiveLoadFailed ( filename + ': ' + msg ) , last_traceback )
|
def __write_record ( self , record_type , data ) :
"""Write single physical record ."""
|
length = len ( data )
crc = crc32c . crc_update ( crc32c . CRC_INIT , [ record_type ] )
crc = crc32c . crc_update ( crc , data )
crc = crc32c . crc_finalize ( crc )
self . __writer . write ( struct . pack ( _HEADER_FORMAT , _mask_crc ( crc ) , length , record_type ) )
self . __writer . write ( data )
self . __position += _HEADER_LENGTH + length
|
def todb ( table , dbo , tablename , schema = None , commit = True , create = False , drop = False , constraints = True , metadata = None , dialect = None , sample = 1000 ) :
"""Load data into an existing database table via a DB - API 2.0
connection or cursor . Note that the database table will be truncated ,
i . e . , all existing rows will be deleted prior to inserting the new data .
E . g . : :
> > > import petl as etl
> > > table = [ [ ' foo ' , ' bar ' ] ,
. . . [ ' a ' , 1 ] ,
. . . [ ' b ' , 2 ] ,
. . . [ ' c ' , 2 ] ]
> > > # using sqlite3
. . . import sqlite3
> > > connection = sqlite3 . connect ( ' example . db ' )
> > > # assuming table " foobar " already exists in the database
. . . etl . todb ( table , connection , ' foobar ' )
> > > # using psycopg2
> > > import psycopg2
> > > connection = psycopg2 . connect ( ' dbname = example user = postgres ' )
> > > # assuming table " foobar " already exists in the database
. . . etl . todb ( table , connection , ' foobar ' )
> > > # using pymysql
> > > import pymysql
> > > connection = pymysql . connect ( password = ' moonpie ' , database = ' thangs ' )
> > > # tell MySQL to use standard quote character
. . . connection . cursor ( ) . execute ( ' SET SQL _ MODE = ANSI _ QUOTES ' )
> > > # load data , assuming table " foobar " already exists in the database
. . . etl . todb ( table , connection , ' foobar ' )
N . B . , for MySQL the statement ` ` SET SQL _ MODE = ANSI _ QUOTES ` ` is required to
ensure MySQL uses SQL - 92 standard quote characters .
A cursor can also be provided instead of a connection , e . g . : :
> > > import psycopg2
> > > connection = psycopg2 . connect ( ' dbname = example user = postgres ' )
> > > cursor = connection . cursor ( )
> > > etl . todb ( table , cursor , ' foobar ' )
The parameter ` dbo ` may also be an SQLAlchemy engine , session or
connection object .
The parameter ` dbo ` may also be a string , in which case it is interpreted
as the name of a file containing an : mod : ` sqlite3 ` database .
If ` ` create = True ` ` this function will attempt to automatically create a
database table before loading the data . This functionality requires
` SQLAlchemy < http : / / www . sqlalchemy . org / > ` _ to be installed .
* * Keyword arguments : * *
table : table container
Table data to load
dbo : database object
DB - API 2.0 connection , callable returning a DB - API 2.0 cursor , or
SQLAlchemy connection , engine or session
tablename : string
Name of the table in the database
schema : string
Name of the database schema to find the table in
commit : bool
If True commit the changes
create : bool
If True attempt to create the table before loading , inferring types
from a sample of the data ( requires SQLAlchemy )
drop : bool
If True attempt to drop the table before recreating ( only relevant if
create = True )
constraints : bool
If True use length and nullable constraints ( only relevant if
create = True )
metadata : sqlalchemy . MetaData
Custom table metadata ( only relevant if create = True )
dialect : string
One of { ' access ' , ' sybase ' , ' sqlite ' , ' informix ' , ' firebird ' , ' mysql ' ,
' oracle ' , ' maxdb ' , ' postgresql ' , ' mssql ' } ( only relevant if
create = True )
sample : int
Number of rows to sample when inferring types etc . Set to 0 to use the
whole table ( only relevant if create = True )
. . note : :
This function is in principle compatible with any DB - API 2.0
compliant database driver . However , at the time of writing some DB - API
2.0 implementations , including cx _ Oracle and MySQL ' s
Connector / Python , are not compatible with this function , because they
only accept a list argument to the cursor . executemany ( ) function
called internally by : mod : ` petl ` . This can be worked around by
proxying the cursor objects , e . g . : :
> > > import cx _ Oracle
> > > connection = cx _ Oracle . Connection ( . . . )
> > > class CursorProxy ( object ) :
. . . def _ _ init _ _ ( self , cursor ) :
. . . self . _ cursor = cursor
. . . def executemany ( self , statement , parameters , * * kwargs ) :
. . . # convert parameters to a list
. . . parameters = list ( parameters )
. . . # pass through to proxied cursor
. . . return self . _ cursor . executemany ( statement , parameters , * * kwargs )
. . . def _ _ getattr _ _ ( self , item ) :
. . . return getattr ( self . _ cursor , item )
> > > def get _ cursor ( ) :
. . . return CursorProxy ( connection . cursor ( ) )
> > > import petl as etl
> > > etl . todb ( tbl , get _ cursor , . . . )
Note however that this does imply loading the entire table into
memory as a list prior to inserting into the database ."""
|
needs_closing = False
# convenience for working with sqlite3
if isinstance ( dbo , string_types ) :
import sqlite3
dbo = sqlite3 . connect ( dbo )
needs_closing = True
try :
if create :
if drop :
drop_table ( dbo , tablename , schema = schema , commit = commit )
create_table ( table , dbo , tablename , schema = schema , commit = commit , constraints = constraints , metadata = metadata , dialect = dialect , sample = sample )
_todb ( table , dbo , tablename , schema = schema , commit = commit , truncate = True )
finally :
if needs_closing :
dbo . close ( )
|
def readmarheader ( filename ) :
"""Read a header from a MarResearch . image file ."""
|
with open ( filename , 'rb' ) as f :
intheader = np . fromstring ( f . read ( 10 * 4 ) , np . int32 )
floatheader = np . fromstring ( f . read ( 15 * 4 ) , '<f4' )
strheader = f . read ( 24 )
f . read ( 4 )
otherstrings = [ f . read ( 16 ) for i in range ( 29 ) ]
return { 'Xsize' : intheader [ 0 ] , 'Ysize' : intheader [ 1 ] , 'MeasTime' : intheader [ 8 ] , 'BeamPosX' : floatheader [ 7 ] , 'BeamPosY' : floatheader [ 8 ] , 'Wavelength' : floatheader [ 9 ] , 'Dist' : floatheader [ 10 ] , '__Origin__' : 'MarResearch .image' , 'recordlength' : intheader [ 2 ] , 'highintensitypixels' : intheader [ 4 ] , 'highintensityrecords' : intheader [ 5 ] , 'Date' : dateutil . parser . parse ( strheader ) , 'Detector' : 'MARCCD' , '__particle__' : 'photon' }
|
def projected_inverse ( L ) :
"""Supernodal multifrontal projected inverse . The routine computes the projected inverse
. . math : :
Y = P ( L ^ { - T } L ^ { - 1 } )
where : math : ` L ` is a Cholesky factor . On exit , the argument : math : ` L ` contains the
projected inverse : math : ` Y ` .
: param L : : py : class : ` cspmatrix ` ( factor )"""
|
assert isinstance ( L , cspmatrix ) and L . is_factor is True , "L must be a cspmatrix factor"
n = L . symb . n
snpost = L . symb . snpost
snptr = L . symb . snptr
chptr = L . symb . chptr
chidx = L . symb . chidx
relptr = L . symb . relptr
relidx = L . symb . relidx
blkptr = L . symb . blkptr
blkval = L . blkval
stack = [ ]
for k in reversed ( list ( snpost ) ) :
nn = snptr [ k + 1 ] - snptr [ k ]
# | Nk |
na = relptr [ k + 1 ] - relptr [ k ]
# | Ak |
nj = na + nn
# invert factor of D _ { Nk , Nk }
lapack . trtri ( blkval , offsetA = blkptr [ k ] , ldA = nj , n = nn )
# zero - out strict upper triangular part of { Nj , Nj } block ( just in case ! )
for i in range ( 1 , nn ) :
blas . scal ( 0.0 , blkval , offset = blkptr [ k ] + nj * i , n = i )
# compute inv ( D _ { Nk , Nk } ) ( store in 1,1 block of F )
F = matrix ( 0.0 , ( nj , nj ) )
blas . syrk ( blkval , F , trans = 'T' , offsetA = blkptr [ k ] , ldA = nj , n = nn , k = nn )
# if supernode k is not a root node :
if na > 0 : # copy " update matrix " to 2,2 block of F
Vk = stack . pop ( )
lapack . lacpy ( Vk , F , ldB = nj , offsetB = nn * nj + nn , m = na , n = na , uplo = 'L' )
# compute S _ { Ak , Nk } = - Vk * L _ { Ak , Nk } ; store in 2,1 block of F
blas . symm ( Vk , blkval , F , m = na , n = nn , offsetB = blkptr [ k ] + nn , ldB = nj , offsetC = nn , ldC = nj , alpha = - 1.0 )
# compute S _ nn = inv ( D _ { Nk , Nk } ) - S _ { Ak , Nk } ' * L _ { Ak , Nk } ; store in 1,1 block of F
blas . gemm ( F , blkval , F , transA = 'T' , m = nn , n = nn , k = na , offsetA = nn , alpha = - 1.0 , beta = 1.0 , offsetB = blkptr [ k ] + nn , ldB = nj )
# extract update matrices if supernode k has any children
for ii in range ( chptr [ k ] , chptr [ k + 1 ] ) :
i = chidx [ ii ]
stack . append ( frontal_get_update ( F , relidx , relptr , i ) )
# copy S _ { Jk , Nk } ( i . e . , 1,1 and 2,1 blocks of F ) to blkval
lapack . lacpy ( F , blkval , m = nj , n = nn , offsetB = blkptr [ k ] , ldB = nj , uplo = 'L' )
L . _is_factor = False
return
|
def parse_data_line ( self , sline ) :
"""Parses the data line and builds the dictionary .
: param sline : a split data line to parse
: returns : the number of rows to jump and parse the next data line or return the code error - 1"""
|
# if there are less values founded than headers , it ' s an error
if len ( sline ) != len ( self . _columns ) :
self . err ( "One data line has the wrong number of items" )
return - 1
rawdict = { }
for idx , result in enumerate ( sline ) :
rawdict [ self . _columns [ idx ] ] = result
# Getting resid
resid = rawdict [ 'Sample name' ]
del rawdict [ 'Sample name' ]
# Getting date
rawdict [ 'DateTime' ] = self . csvDate2BikaDate ( rawdict [ 'Date' ] , rawdict [ 'Time' ] )
del rawdict [ 'Date' ]
del rawdict [ 'Time' ]
# Getting remarks
rawdict [ 'Remarks' ] = rawdict [ 'Remark' ]
del rawdict [ 'Remark' ]
# Getting errors
rawdict [ 'Error' ] = rawdict [ 'Error/Warning' ]
if rawdict [ 'Error/Warning' ] :
self . warn ( 'Analysis warn' , numline = self . _numline )
del rawdict [ 'Error/Warning' ]
rawdict [ 'DefaultResult' ] = 'Concentration'
self . _addRawResult ( resid , { rawdict [ 'Parameter' ] . replace ( ' ' , '' ) : rawdict } , False )
return 0
|
def open_hist ( self ) :
"""Open the HIST file located in the in self . outdir .
Returns : class : ` HistFile ` object , None if file could not be found or file is not readable ."""
|
if not self . hist_path :
if self . status == self . S_OK :
logger . critical ( "%s reached S_OK but didn't produce a HIST file in %s" % ( self , self . outdir ) )
return None
# Open the HIST file
from abipy . dynamics . hist import HistFile
try :
return HistFile ( self . hist_path )
except Exception as exc :
logger . critical ( "Exception while reading HIST file at %s:\n%s" % ( self . hist_path , str ( exc ) ) )
return None
|
def execute_job ( self , job_request ) :
"""Processes and runs the action requests contained in the job and returns a ` JobResponse ` .
: param job _ request : The job request
: type job _ request : dict
: return : A ` JobResponse ` object
: rtype : JobResponse"""
|
# Run the Job ' s Actions
job_response = JobResponse ( )
job_switches = RequestSwitchSet ( job_request [ 'context' ] [ 'switches' ] )
for i , raw_action_request in enumerate ( job_request [ 'actions' ] ) :
action_request = EnrichedActionRequest ( action = raw_action_request [ 'action' ] , body = raw_action_request . get ( 'body' , None ) , switches = job_switches , context = job_request [ 'context' ] , control = job_request [ 'control' ] , client = job_request [ 'client' ] , async_event_loop = job_request [ 'async_event_loop' ] , run_coroutine = job_request [ 'run_coroutine' ] , )
action_in_class_map = action_request . action in self . action_class_map
if action_in_class_map or action_request . action in ( 'status' , 'introspect' ) : # Get action to run
if action_in_class_map :
action = self . action_class_map [ action_request . action ] ( self . settings )
elif action_request . action == 'introspect' :
from pysoa . server . action . introspection import IntrospectionAction
action = IntrospectionAction ( server = self )
else :
if not self . _default_status_action_class :
from pysoa . server . action . status import make_default_status_action_class
self . _default_status_action_class = make_default_status_action_class ( self . __class__ )
action = self . _default_status_action_class ( self . settings )
# Wrap it in middleware
wrapper = self . make_middleware_stack ( [ m . action for m in self . middleware ] , action , )
# Execute the middleware stack
try :
action_response = wrapper ( action_request )
except ActionError as e : # Error : an error was thrown while running the Action ( or Action middleware )
action_response = ActionResponse ( action = action_request . action , errors = e . errors , )
else : # Error : Action not found .
action_response = ActionResponse ( action = action_request . action , errors = [ Error ( code = ERROR_CODE_UNKNOWN , message = 'The action "{}" was not found on this server.' . format ( action_request . action ) , field = 'action' , ) ] , )
job_response . actions . append ( action_response )
if ( action_response . errors and not job_request [ 'control' ] . get ( 'continue_on_error' , False ) ) : # Quit running Actions if an error occurred and continue _ on _ error is False
break
return job_response
|
def get_user_cumulate ( self , begin_date , end_date ) :
"""获取累计用户数据
详情请参考
http : / / mp . weixin . qq . com / wiki / 3 / ecfed6e1a0a03b5f35e5efac98e864b7 . html
: param begin _ date : 起始日期
: param end _ date : 结束日期
: return : 统计数据列表"""
|
res = self . _post ( 'getusercumulate' , data = { 'begin_date' : self . _to_date_str ( begin_date ) , 'end_date' : self . _to_date_str ( end_date ) } , result_processor = lambda x : x [ 'list' ] )
return res
|
def get_area_url ( location , distance ) :
"""Generate URL for downloading OSM data within a region .
This function defines a boundary box where the edges touch a circle of
` ` distance ` ` kilometres in radius . It is important to note that the box is
neither a square , nor bounded within the circle .
The bounding box is strictly a trapezoid whose north and south edges are
different lengths , which is longer is dependant on whether the box is
calculated for a location in the Northern or Southern hemisphere . You will
get a shorter north edge in the Northern hemisphere , and vice versa . This
is simply because we are applying a flat transformation to a spherical
object , however for all general cases the difference will be negligible .
Args :
location ( Point ) : Centre of the region
distance ( int ) : Boundary distance in kilometres
Returns :
str : URL that can be used to fetch the OSM data within ` ` distance ` ` of
` ` location ` `"""
|
locations = [ location . destination ( i , distance ) for i in range ( 0 , 360 , 90 ) ]
latitudes = list ( map ( attrgetter ( 'latitude' ) , locations ) )
longitudes = list ( map ( attrgetter ( 'longitude' ) , locations ) )
bounds = ( min ( longitudes ) , min ( latitudes ) , max ( longitudes ) , max ( latitudes ) )
return ( 'http://api.openstreetmap.org/api/0.5/map?bbox=' + ',' . join ( map ( str , bounds ) ) )
|
def plot ( self , axis , ith_plot , total_plots , limits ) :
"""Plot the histogram as a whole over all groups .
Do not plot as individual groups like other plot types ."""
|
print ( self . plot_type_str . upper ( ) + " plot" )
print ( "%5s %9s %s" % ( "id" , " #points" , "group" ) )
for idx , group in enumerate ( self . groups ) :
print ( "%5s %9s %s" % ( idx + 1 , len ( self . groups [ group ] ) , group ) )
print ( '' )
datasets = [ ]
colors = [ ]
minx = np . inf
maxx = - np . inf
for idx , group in enumerate ( self . groups ) :
x = date2num ( [ logevent . datetime for logevent in self . groups [ group ] ] )
minx = min ( minx , min ( x ) )
maxx = max ( maxx , max ( x ) )
datasets . append ( x )
color , marker = self . color_map ( group )
colors . append ( color )
if total_plots > 1 : # if more than one plot , move histogram to twin axis on the right
twin_axis = axis . twinx ( )
twin_axis . set_ylabel ( self . ylabel )
axis . set_zorder ( twin_axis . get_zorder ( ) + 1 )
# put ax ahead of ax2
axis . patch . set_visible ( False )
# hide the ' canvas '
axis = twin_axis
n_bins = max ( 1 , int ( ( maxx - minx ) * 24. * 60. * 60. / self . bucketsize ) )
if n_bins > 1000 : # warning for too many buckets
print ( "warning: %i buckets, will take a while to render. " "consider increasing --bucketsize." % n_bins )
n , bins , artists = axis . hist ( datasets , bins = n_bins , align = 'mid' , log = self . logscale , histtype = "barstacked" if self . barstacked else "bar" , color = colors , edgecolor = "none" , linewidth = 0 , alpha = 0.8 , picker = True , label = map ( str , self . groups . keys ( ) ) )
# scale current y - axis to match min and max values
axis . set_ylim ( np . min ( n ) , np . max ( n ) )
# add meta - data for picking
if len ( self . groups ) > 1 :
for g , group in enumerate ( self . groups . keys ( ) ) :
for i in range ( len ( artists [ g ] ) ) :
artists [ g ] [ i ] . _mt_plot_type = self
artists [ g ] [ i ] . _mt_group = group
artists [ g ] [ i ] . _mt_n = n [ g ] [ i ]
if self . barstacked :
artists [ g ] [ i ] . _mt_n -= ( n [ g - 1 ] [ i ] if g > 0 else 0 )
artists [ g ] [ i ] . _mt_bin = bins [ i ]
else :
for i in range ( len ( artists ) ) :
artists [ i ] . _mt_plot_type = self
artists [ i ] . _mt_group = group
artists [ i ] . _mt_n = n [ i ]
artists [ i ] . _mt_bin = bins [ i ]
return artists
|
def download_links ( self , dir_path ) :
"""Download web pages or images from search result links .
Args :
dir _ path ( str ) :
Path of directory to save downloads of : class : ` api . results ` . links"""
|
links = self . links
if not path . exists ( dir_path ) :
makedirs ( dir_path )
for i , url in enumerate ( links ) :
if 'start' in self . cseargs :
i += int ( self . cseargs [ 'start' ] )
ext = self . cseargs [ 'fileType' ]
ext = '.html' if ext == '' else '.' + ext
file_name = self . cseargs [ 'q' ] . replace ( ' ' , '_' ) + '_' + str ( i ) + ext
file_path = path . join ( dir_path , file_name )
r = requests . get ( url , stream = True )
if r . status_code == 200 :
with open ( file_path , 'wb' ) as f :
r . raw . decode_content = True
shutil . copyfileobj ( r . raw , f )
|
def event_types ( self ) :
"""Raises
IndexError
When there is no selected rater"""
|
try :
events = self . rater . find ( 'events' )
except AttributeError :
raise IndexError ( 'You need to have at least one rater' )
return [ x . get ( 'type' ) for x in events ]
|
def interpolate_tuple ( startcolor , goalcolor , steps ) :
"""Take two RGB color sets and mix them over a specified number of steps . Return the list"""
|
# white
R = startcolor [ 0 ]
G = startcolor [ 1 ]
B = startcolor [ 2 ]
targetR = goalcolor [ 0 ]
targetG = goalcolor [ 1 ]
targetB = goalcolor [ 2 ]
DiffR = targetR - R
DiffG = targetG - G
DiffB = targetB - B
buffer = [ ]
for i in range ( 0 , steps + 1 ) :
iR = R + ( DiffR * i // steps )
iG = G + ( DiffG * i // steps )
iB = B + ( DiffB * i // steps )
hR = string . replace ( hex ( iR ) , "0x" , "" )
hG = string . replace ( hex ( iG ) , "0x" , "" )
hB = string . replace ( hex ( iB ) , "0x" , "" )
if len ( hR ) == 1 :
hR = "0" + hR
if len ( hB ) == 1 :
hB = "0" + hB
if len ( hG ) == 1 :
hG = "0" + hG
color = string . upper ( "#" + hR + hG + hB )
buffer . append ( color )
return buffer
|
def index_to_time_seg ( time_seg_idx , slide_step ) :
"""将时间片索引值转换为时间片字符串
: param time _ seg _ idx :
: param slide _ step :
: return :"""
|
assert ( time_seg_idx * slide_step < const . MINUTES_IN_A_DAY )
return time_util . minutes_to_time_str ( time_seg_idx * slide_step )
|
def onBatchCreated ( self , three_pc_batch : ThreePcBatch ) :
"""A batch of requests has been created and has been applied but
committed to ledger and state .
: param ledger _ id :
: param state _ root : state root after the batch creation
: return :"""
|
ledger_id = three_pc_batch . ledger_id
if ledger_id == POOL_LEDGER_ID :
if isinstance ( self . poolManager , TxnPoolManager ) :
self . get_req_handler ( POOL_LEDGER_ID ) . onBatchCreated ( three_pc_batch . state_root , three_pc_batch . pp_time )
elif self . get_req_handler ( ledger_id ) :
self . get_req_handler ( ledger_id ) . onBatchCreated ( three_pc_batch . state_root , three_pc_batch . pp_time )
else :
logger . debug ( '{} did not know how to handle for ledger {}' . format ( self , ledger_id ) )
if ledger_id == POOL_LEDGER_ID :
three_pc_batch . primaries = self . future_primaries_handler . post_batch_applied ( three_pc_batch )
elif not three_pc_batch . primaries :
three_pc_batch . primaries = self . future_primaries_handler . get_last_primaries ( ) or self . primaries
self . audit_handler . post_batch_applied ( three_pc_batch )
self . execute_hook ( NodeHooks . POST_BATCH_CREATED , ledger_id , three_pc_batch . state_root )
|
def frame_iv ( algorithm , sequence_number ) :
"""Builds the deterministic IV for a body frame .
: param algorithm : Algorithm for which to build IV
: type algorithm : aws _ encryption _ sdk . identifiers . Algorithm
: param int sequence _ number : Frame sequence number
: returns : Generated IV
: rtype : bytes
: raises ActionNotAllowedError : if sequence number of out bounds"""
|
if sequence_number < 1 or sequence_number > MAX_FRAME_COUNT :
raise ActionNotAllowedError ( "Invalid frame sequence number: {actual}\nMust be between 1 and {max}" . format ( actual = sequence_number , max = MAX_FRAME_COUNT ) )
prefix_len = algorithm . iv_len - 4
prefix = b"\x00" * prefix_len
return prefix + struct . pack ( ">I" , sequence_number )
|
def create_or_update_cluster ( config_file , override_min_workers , override_max_workers , no_restart , restart_only , yes , override_cluster_name ) :
"""Create or updates an autoscaling Ray cluster from a config json ."""
|
config = yaml . load ( open ( config_file ) . read ( ) )
if override_min_workers is not None :
config [ "min_workers" ] = override_min_workers
if override_max_workers is not None :
config [ "max_workers" ] = override_max_workers
if override_cluster_name is not None :
config [ "cluster_name" ] = override_cluster_name
config = _bootstrap_config ( config )
get_or_create_head_node ( config , config_file , no_restart , restart_only , yes , override_cluster_name )
|
def write_fix_accuracy ( self , accuracy = None ) :
"""Write the GPS fix accuracy header : :
writer . write _ fix _ accuracy ( )
# - > HFFXA500
writer . write _ fix _ accuracy ( 25)
# - > HFFXA025
: param accuracy : the estimated GPS fix accuracy in meters ( optional )"""
|
if accuracy is None :
accuracy = 500
accuracy = int ( accuracy )
if not 0 < accuracy < 1000 :
raise ValueError ( 'Invalid fix accuracy' )
self . write_fr_header ( 'FXA' , '%03d' % accuracy )
|
def construct_pipeline_block_lambda ( env = '' , generated = None , previous_env = None , region = 'us-east-1' , region_subnets = None , settings = None , pipeline_data = None ) :
"""Create the Pipeline JSON from template .
This handles the common repeatable patterns in a pipeline , such as
judgement , infrastructure , tagger and qe .
Args :
env ( str ) : Deploy environment name , e . g . dev , stage , prod .
generated ( gogoutils . Generator ) : Gogo Application name generator .
previous _ env ( str ) : The previous deploy environment to use as
Trigger .
region ( str ) : AWS Region to deploy to .
settings ( dict ) : Environment settings from configurations .
region _ subnets ( dict ) : Subnets for a Region , e . g .
{ ' us - west - 2 ' : [ ' us - west - 2a ' , ' us - west - 2b ' , ' us - west - 2c ' ] } .
Returns :
dict : Pipeline JSON template rendered with configurations ."""
|
LOG . info ( '%s block for [%s].' , env , region )
if env . startswith ( 'prod' ) :
template_name = 'pipeline/pipeline_{}_lambda.json.j2' . format ( env )
else :
template_name = 'pipeline/pipeline_stages_lambda.json.j2'
LOG . debug ( '%s info:\n%s' , env , pformat ( settings ) )
gen_app_name = generated . app_name ( )
user_data = generate_encoded_user_data ( env = env , region = region , generated = generated , group_name = generated . project , )
# Use different variable to keep template simple
instance_security_groups = sorted ( DEFAULT_EC2_SECURITYGROUPS [ env ] )
instance_security_groups . append ( gen_app_name )
instance_security_groups . extend ( settings [ 'security_group' ] [ 'instance_extras' ] )
instance_security_groups = remove_duplicate_sg ( instance_security_groups )
LOG . info ( 'Instance security groups to attach: %s' , instance_security_groups )
data = copy . deepcopy ( settings )
data [ 'app' ] . update ( { 'appname' : gen_app_name , 'repo_name' : generated . repo , 'group_name' : generated . project , 'environment' : env , 'region' : region , 'az_dict' : json . dumps ( region_subnets ) , 'previous_env' : previous_env , 'encoded_user_data' : user_data , 'instance_security_groups' : json . dumps ( instance_security_groups ) , 'promote_restrict' : pipeline_data [ 'promote_restrict' ] , 'owner_email' : pipeline_data [ 'owner_email' ] , 'function_name' : pipeline_data [ 'lambda' ] [ 'handler' ] } )
LOG . debug ( 'Block data:\n%s' , pformat ( data ) )
pipeline_json = get_template ( template_file = template_name , data = data , formats = generated )
return pipeline_json
|
def _model_unpickle ( cls , data ) :
"""Unpickle a model by retrieving it from the database ."""
|
auto_field_value = data [ 'pk' ]
try :
obj = cls . objects . get ( pk = auto_field_value )
except Exception as e :
if isinstance ( e , OperationalError ) : # Attempt reconnect , we ' ve probably hit ;
# OperationalError ( 2006 , ' MySQL server has gone away ' )
logger . debug ( "Caught OperationalError, closing database connection." , exc_info = e )
from django . db import connection
connection . close ( )
obj = cls . objects . get ( pk = auto_field_value )
else :
raise
return obj
|
def info_file ( self ) :
"""Grab sources from . info file and store filename"""
|
sources = SBoGrep ( self . prgnam ) . source ( ) . split ( )
for source in sources :
self . sbo_sources . append ( source . split ( "/" ) [ - 1 ] )
|
def route ( self ) :
'''The relative : class : ` . Route ` served by this
: class : ` Router ` .'''
|
parent = self . _parent
if parent and parent . _route . is_leaf :
return parent . route + self . _route
else :
return self . _route
|
def wrap_paginated ( self , data , renderer_context ) :
"""Convert paginated data to JSON API with meta"""
|
pagination_keys = [ 'count' , 'next' , 'previous' , 'results' ]
for key in pagination_keys :
if not ( data and key in data ) :
raise WrapperNotApplicable ( 'Not paginated results' )
view = renderer_context . get ( "view" , None )
model = self . model_from_obj ( view )
resource_type = self . model_to_resource_type ( model )
try :
from rest_framework . utils . serializer_helpers import ReturnList
results = ReturnList ( data [ "results" ] , serializer = data . serializer . fields [ "results" ] , )
except ImportError :
results = data [ "results" ]
# Use default wrapper for results
wrapper = self . wrap_default ( results , renderer_context )
# Add pagination metadata
pagination = self . dict_class ( )
pagination [ 'previous' ] = data [ 'previous' ]
pagination [ 'next' ] = data [ 'next' ]
pagination [ 'count' ] = data [ 'count' ]
wrapper . setdefault ( 'meta' , self . dict_class ( ) )
wrapper [ 'meta' ] . setdefault ( 'pagination' , self . dict_class ( ) )
wrapper [ 'meta' ] [ 'pagination' ] . setdefault ( resource_type , self . dict_class ( ) ) . update ( pagination )
return wrapper
|
def get_logged_user ( self , ** kwargs ) :
"""Gets logged user and in case not existing creates a new one
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please define a ` callback ` function
to be invoked when receiving the response .
> > > def callback _ function ( response ) :
> > > pprint ( response )
> > > thread = api . get _ logged _ user ( callback = callback _ function )
: param callback function : The callback function
for asynchronous request . ( optional )
: return : UserSingleton
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'callback' ) :
return self . get_logged_user_with_http_info ( ** kwargs )
else :
( data ) = self . get_logged_user_with_http_info ( ** kwargs )
return data
|
def as_dict ( self ) :
"""turns attribute filter object into python dictionary"""
|
output_dictionary = dict ( )
for attribute_name , type_instance in inspect . getmembers ( self ) :
if attribute_name . startswith ( '__' ) or inspect . ismethod ( type_instance ) :
continue
if isinstance ( type_instance , bool ) :
output_dictionary [ attribute_name ] = type_instance
elif isinstance ( type_instance , self . __class__ ) :
output_dictionary [ attribute_name ] = type_instance . as_dict ( )
return output_dictionary
|
def authorize_url ( self , scope = '' , ** kwargs ) :
"""Returns the url to redirect the user to for user consent"""
|
self . _check_configuration ( "site" , "authorization_url" , "redirect_uri" , "client_id" )
if isinstance ( scope , ( list , tuple , set , frozenset ) ) :
self . _check_configuration ( "scope_sep" )
scope = self . scope_sep . join ( scope )
oauth_params = { 'redirect_uri' : self . redirect_uri , 'client_id' : self . client_id , 'scope' : scope , }
oauth_params . update ( kwargs )
return "%s%s?%s" % ( self . site , quote ( self . authorization_url ) , urlencode ( oauth_params ) )
|
def _data2rec ( schema , rec_data ) :
'''schema = OrderedDict ( {
' prio ' : int ,
' weight ' : int ,
' port ' : to _ port ,
' name ' : str ,
rec _ data = ' 10 20 25 myawesome . nl '
res = { ' prio ' : 10 , ' weight ' : 20 , ' port ' : 25 ' name ' : ' myawesome . nl ' }'''
|
try :
rec_fields = rec_data . split ( ' ' )
# spaces in digest fields are allowed
assert len ( rec_fields ) >= len ( schema )
if len ( rec_fields ) > len ( schema ) :
cutoff = len ( schema ) - 1
rec_fields = rec_fields [ 0 : cutoff ] + [ '' . join ( rec_fields [ cutoff : ] ) ]
if len ( schema ) == 1 :
res = _cast ( rec_fields [ 0 ] , next ( iter ( schema . values ( ) ) ) )
else :
res = dict ( ( ( field_name , _cast ( rec_field , rec_cast ) ) for ( field_name , rec_cast ) , rec_field in zip ( schema . items ( ) , rec_fields ) ) )
return res
except ( AssertionError , AttributeError , TypeError , ValueError ) as e :
raise ValueError ( 'Unable to cast "{0}" as "{2}": {1}' . format ( rec_data , e , ' ' . join ( schema . keys ( ) ) ) )
|
def find_element_by_class ( self , class_ , update = False ) -> Elements :
'''Finds an element by class .
Args :
class _ : The class of the element to be found .
update : If the interface has changed , this option should be True .
Returns :
The element if it was found .
Raises :
NoSuchElementException - If the element wasn ' t found .
Usage :
element = driver . find _ element _ by _ class ( ' foo ' )'''
|
return self . find_element ( by = By . CLASS , value = class_ , update = update )
|
def questions ( self ) :
"""获取收藏夹内所有问题对象 .
: return : 收藏夹内所有问题 , 返回生成器
: rtype : Question . Iterable"""
|
self . _make_soup ( )
# noinspection PyTypeChecker
for question in self . _page_get_questions ( self . soup ) :
yield question
i = 2
while True :
soup = BeautifulSoup ( self . _session . get ( self . url [ : - 1 ] + '?page=' + str ( i ) ) . text )
for question in self . _page_get_questions ( soup ) :
if question == 0 :
return
yield question
i += 1
|
def _find_calls ( self , ast_tree , called_module , called_func ) :
'''scan the abstract source tree looking for possible ways to call the called _ module
and called _ func
since - - 7-2-12 - - Jay
example - -
# import the module a couple ways :
import pout
from pout import v
from pout import v as voom
import pout as poom
# this function would return : [ ' pout . v ' , ' v ' , ' voom ' , ' poom . v ' ]
module finder might be useful someday
link - - http : / / docs . python . org / library / modulefinder . html
link - - http : / / stackoverflow . com / questions / 2572582 / return - a - list - of - imported - python - modules - used - in - a - script
ast _ tree - - _ ast . * instance - - the internal ast object that is being checked , returned from compile ( )
with ast . PyCF _ ONLY _ AST flag
called _ module - - string - - we are checking the ast for imports of this module
called _ func - - string - - we are checking the ast for aliases of this function
return - - set - - the list of possible calls the ast _ tree could make to call the called _ func'''
|
s = set ( )
# always add the default call , the set will make sure there are no dupes . . .
s . add ( "{}.{}" . format ( called_module , called_func ) )
if hasattr ( ast_tree , 'name' ) :
if ast_tree . name == called_func : # the function is defined in this module
s . add ( called_func )
if hasattr ( ast_tree , 'body' ) : # further down the rabbit hole we go
if isinstance ( ast_tree . body , Iterable ) :
for ast_body in ast_tree . body :
s . update ( self . _find_calls ( ast_body , called_module , called_func ) )
elif hasattr ( ast_tree , 'names' ) : # base case
if hasattr ( ast_tree , 'module' ) : # we are in a from . . . import . . . statement
if ast_tree . module == called_module :
for ast_name in ast_tree . names :
if ast_name . name == called_func :
s . add ( unicode ( ast_name . asname if ast_name . asname is not None else ast_name . name ) )
else : # we are in a import . . . statement
for ast_name in ast_tree . names :
if hasattr ( ast_name , 'name' ) and ( ast_name . name == called_module ) :
call = "{}.{}" . format ( ast_name . asname if ast_name . asname is not None else ast_name . name , called_func )
s . add ( call )
return s
|
def create_full_tear_sheet ( factor_data , long_short = True , group_neutral = False , by_group = False ) :
"""Creates a full tear sheet for analysis and evaluating single
return predicting ( alpha ) factor .
Parameters
factor _ data : pd . DataFrame - MultiIndex
A MultiIndex DataFrame indexed by date ( level 0 ) and asset ( level 1 ) ,
containing the values for a single alpha factor , forward returns for
each period , the factor quantile / bin that factor value belongs to , and
( optionally ) the group the asset belongs to .
- See full explanation in utils . get _ clean _ factor _ and _ forward _ returns
long _ short : bool
Should this computation happen on a long short portfolio ?
- See tears . create _ returns _ tear _ sheet for details on how this flag
affects returns analysis
group _ neutral : bool
Should this computation happen on a group neutral portfolio ?
- See tears . create _ returns _ tear _ sheet for details on how this flag
affects returns analysis
- See tears . create _ information _ tear _ sheet for details on how this
flag affects information analysis
by _ group : bool
If True , display graphs separately for each group ."""
|
plotting . plot_quantile_statistics_table ( factor_data )
create_returns_tear_sheet ( factor_data , long_short , group_neutral , by_group , set_context = False )
create_information_tear_sheet ( factor_data , group_neutral , by_group , set_context = False )
create_turnover_tear_sheet ( factor_data , set_context = False )
|
def delete_project ( self ) :
"""Delete the current project without deleting the files in the directory ."""
|
if self . current_active_project :
self . switch_to_plugin ( )
path = self . current_active_project . root_path
buttons = QMessageBox . Yes | QMessageBox . No
answer = QMessageBox . warning ( self , _ ( "Delete" ) , _ ( "Do you really want to delete <b>{filename}</b>?<br><br>" "<b>Note:</b> This action will only delete the project. " "Its files are going to be preserved on disk." ) . format ( filename = osp . basename ( path ) ) , buttons )
if answer == QMessageBox . Yes :
try :
self . close_project ( )
shutil . rmtree ( osp . join ( path , '.spyproject' ) )
except EnvironmentError as error :
QMessageBox . critical ( self , _ ( "Project Explorer" ) , _ ( "<b>Unable to delete <i>{varpath}</i></b>" "<br><br>The error message was:<br>{error}" ) . format ( varpath = path , error = to_text_string ( error ) ) )
|
def _configure_iam_role ( config ) :
"""Setup a gcp service account with IAM roles .
Creates a gcp service acconut and binds IAM roles which allow it to control
control storage / compute services . Specifically , the head node needs to have
an IAM role that allows it to create further gce instances and store items
in google cloud storage .
TODO : Allow the name / id of the service account to be configured"""
|
email = SERVICE_ACCOUNT_EMAIL_TEMPLATE . format ( account_id = DEFAULT_SERVICE_ACCOUNT_ID , project_id = config [ "provider" ] [ "project_id" ] )
service_account = _get_service_account ( email , config )
if service_account is None :
logger . info ( "_configure_iam_role: " "Creating new service account {}" . format ( DEFAULT_SERVICE_ACCOUNT_ID ) )
service_account = _create_service_account ( DEFAULT_SERVICE_ACCOUNT_ID , DEFAULT_SERVICE_ACCOUNT_CONFIG , config )
assert service_account is not None , "Failed to create service account"
_add_iam_policy_binding ( service_account , DEFAULT_SERVICE_ACCOUNT_ROLES )
config [ "head_node" ] [ "serviceAccounts" ] = [ { "email" : service_account [ "email" ] , # NOTE : The amount of access is determined by the scope + IAM
# role of the service account . Even if the cloud - platform scope
# gives ( scope ) access to the whole cloud - platform , the service
# account is limited by the IAM rights specified below .
"scopes" : [ "https://www.googleapis.com/auth/cloud-platform" ] } ]
return config
|
def _htdigest ( username , password , ** kwargs ) :
'''Provide authentication via Apache - style htdigest files'''
|
realm = kwargs . get ( 'realm' , None )
if not realm :
log . error ( 'salt.auth.file: A ^realm must be defined in ' 'external_auth:file for htdigest filetype' )
return False
from passlib . apache import HtdigestFile
pwfile = HtdigestFile ( kwargs [ 'filename' ] )
# passlib below version 1.6 uses ' verify ' function instead of ' check _ password '
if salt . utils . versions . version_cmp ( kwargs [ 'passlib_version' ] , '1.6' ) < 0 :
return pwfile . verify ( username , realm , password )
else :
return pwfile . check_password ( username , realm , password )
|
def gps_inject_data_send ( self , target_system , target_component , len , data , force_mavlink1 = False ) :
'''data for injecting into the onboard GPS ( used for DGPS )
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
len : data length ( uint8 _ t )
data : raw data ( 110 is enough for 12 satellites of RTCMv2 ) ( uint8 _ t )'''
|
return self . send ( self . gps_inject_data_encode ( target_system , target_component , len , data ) , force_mavlink1 = force_mavlink1 )
|
def LOS_CrossProj ( VType , Ds , us , kPIns , kPOuts , kRMins , Lplot = 'In' , proj = 'All' , multi = False ) :
"""Compute the parameters to plot the poloidal projection of the LOS"""
|
assert type ( VType ) is str and VType . lower ( ) in [ 'tor' , 'lin' ]
assert Lplot . lower ( ) in [ 'tot' , 'in' ]
assert type ( proj ) is str
proj = proj . lower ( )
assert proj in [ 'cross' , 'hor' , 'all' , '3d' ]
assert Ds . ndim == 2 and Ds . shape == us . shape
nL = Ds . shape [ 1 ]
k0 = kPIns if Lplot . lower ( ) == 'in' else np . zeros ( ( nL , ) )
if VType . lower ( ) == 'tor' and proj in [ 'cross' , 'all' ] :
CrossProjAng = np . arccos ( np . sqrt ( us [ 0 , : ] ** 2 + us [ 1 , : ] ** 2 ) / np . sqrt ( np . sum ( us ** 2 , axis = 0 ) ) )
nkp = np . ceil ( 25. * ( 1 - ( CrossProjAng / ( np . pi / 4 ) - 1 ) ** 2 ) + 2 )
ks = np . max ( [ kRMins , kPIns ] , axis = 0 ) if Lplot . lower ( ) == 'in' else kRMins
pts0 = [ ]
if multi :
for ii in range ( 0 , nL ) :
if np . isnan ( kPOuts [ ii ] ) :
pts0 . append ( np . array ( [ [ np . nan , np . nan ] , [ np . nan , np . nan ] ] ) )
else :
k = np . linspace ( k0 [ ii ] , kPOuts [ ii ] , nkp [ ii ] , endpoint = True )
k = np . unique ( np . append ( k , ks [ ii ] ) )
pp = Ds [ : , ii : ii + 1 ] + k [ np . newaxis , : ] * us [ : , ii : ii + 1 ]
pts0 . append ( np . array ( [ np . hypot ( pp [ 0 , : ] , pp [ 1 , : ] ) , pp [ 2 , : ] ] ) )
else :
for ii in range ( 0 , nL ) :
if np . isnan ( kPOuts [ ii ] ) :
pts0 . append ( np . array ( [ [ np . nan , np . nan , np . nan ] , [ np . nan , np . nan , np . nan ] , [ np . nan , np . nan , np . nan ] ] ) )
else :
k = np . linspace ( k0 [ ii ] , kPOuts [ ii ] , nkp [ ii ] , endpoint = True )
k = np . append ( np . unique ( np . append ( k , ks [ ii ] ) ) , np . nan )
pts0 . append ( Ds [ : , ii : ii + 1 ] + k [ np . newaxis , : ] * us [ : , ii : ii + 1 ] )
pts0 = np . concatenate ( tuple ( pts0 ) , axis = 1 )
pts0 = np . array ( [ np . hypot ( pts0 [ 0 , : ] , pts0 [ 1 , : ] ) , pts0 [ 2 , : ] ] )
if not ( VType . lower ( ) == 'tor' and proj == 'cross' ) :
pts = [ ]
if multi :
for ii in range ( 0 , nL ) :
if np . isnan ( kPOuts [ ii ] ) :
pts . append ( np . array ( [ [ np . nan , np . nan ] , [ np . nan , np . nan ] , [ np . nan , np . nan ] ] ) )
else :
k = np . array ( [ k0 [ ii ] , kPOuts [ ii ] ] )
pts . append ( Ds [ : , ii : ii + 1 ] + k [ np . newaxis , : ] * us [ : , ii : ii + 1 ] )
else :
for ii in range ( 0 , nL ) :
if np . isnan ( kPOuts [ ii ] ) :
pts . append ( np . array ( [ [ np . nan , np . nan , np . nan ] , [ np . nan , np . nan , np . nan ] , [ np . nan , np . nan , np . nan ] ] ) )
else :
k = np . array ( [ k0 [ ii ] , kPOuts [ ii ] , np . nan ] )
pts . append ( Ds [ : , ii : ii + 1 ] + k [ np . newaxis , : ] * us [ : , ii : ii + 1 ] )
pts = np . concatenate ( tuple ( pts ) , axis = 1 )
if proj == 'hor' :
pts = [ pp [ : 2 , : ] for pp in pts ] if multi else pts [ : 2 , : ]
elif proj == 'cross' :
if VType . lower ( ) == 'tor' :
pts = pts0
else :
pts = [ pp [ 1 : , : ] for pp in pts ] if multi else pts [ 1 : , : ]
elif proj == 'all' :
if multi :
if VType . lower ( ) == 'tor' :
pts = [ ( p0 , pp [ : 2 , : ] ) for ( p0 , pp ) in zip ( * [ pts0 , pts ] ) ]
else :
pts = ( pts [ 1 : , : ] , pts [ : 2 , : ] )
else :
pts = ( pts0 , pts [ : 2 , : ] ) if VType . lower ( ) == 'tor' else ( pts [ 1 : , : ] , pts [ : 2 , : ] )
return pts
|
def should_execute ( self , workload ) :
"""If we have been suspended by i3bar , only execute those modules that set the keep _ alive flag to a truthy
value . See the docs on the suspend _ signal _ handler method of the io module for more information ."""
|
if not self . _suspended . is_set ( ) :
return True
workload = unwrap_workload ( workload )
return hasattr ( workload , 'keep_alive' ) and getattr ( workload , 'keep_alive' )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.