signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def getYadisXRD ( xrd_tree ) :
"""Return the XRD element that should contain the Yadis services""" | xrd = None
# for the side - effect of assigning the last one in the list to the
# xrd variable
for xrd in xrd_tree . findall ( xrd_tag ) :
pass
# There were no elements found , or else xrd would be set to the
# last one
if xrd is None :
raise XRDSError ( 'No XRD present in tree' )
return xrd |
def hist ( self , dimension = None , num_bins = 20 , bin_range = None , adjoin = True , index = 0 , ** kwargs ) :
"""Computes and adjoins histogram along specified dimension ( s ) .
Defaults to first value dimension if present otherwise falls
back to first key dimension .
Args :
dimension : Dimension ( s ) to compute histogram on
num _ bins ( int , optional ) : Number of bins
bin _ range ( tuple optional ) : Lower and upper bounds of bins
adjoin ( bool , optional ) : Whether to adjoin histogram
index ( int , optional ) : Index of layer to apply hist to
Returns :
AdjointLayout of element and histogram or just the
histogram""" | valid_ind = isinstance ( index , int ) and ( 0 <= index < len ( self ) )
valid_label = index in [ el . label for el in self ]
if not any ( [ valid_ind , valid_label ] ) :
raise TypeError ( "Please supply a suitable index or label for the histogram data" )
hists = self . get ( index ) . hist ( adjoin = False , dimension = dimension , bin_range = bin_range , num_bins = num_bins , ** kwargs )
if not isinstance ( hists , Layout ) :
hists = [ hists ]
if not isinstance ( dimension , list ) :
dimension = [ 'Default' ]
if adjoin :
layout = self
for hist in hists :
layout = layout << hist
layout . main_layer = index
elif len ( dimension ) > 1 :
layout = hists
else :
layout = hists [ 0 ]
return layout |
def assert_free ( self , host , port = None ) :
"""Assert that the given addr is free
in that all attempts to connect fail within the timeout
or raise a PortNotFree exception .
> > > free _ port = find _ available _ local _ port ( )
> > > Checker ( ) . assert _ free ( ' localhost ' , free _ port )
> > > Checker ( ) . assert _ free ( ' 127.0.0.1 ' , free _ port )
> > > Checker ( ) . assert _ free ( ' : : 1 ' , free _ port )
Also accepts an addr tuple
> > > addr = ' : : 1 ' , free _ port , 0 , 0
> > > Checker ( ) . assert _ free ( addr )
Host might refer to a server bind address like ' : : ' , which
should use localhost to perform the check .
> > > Checker ( ) . assert _ free ( ' : : ' , free _ port )""" | if port is None and isinstance ( host , abc . Sequence ) :
host , port = host [ : 2 ]
if platform . system ( ) == 'Windows' :
host = client_host ( host )
info = socket . getaddrinfo ( host , port , socket . AF_UNSPEC , socket . SOCK_STREAM , )
list ( itertools . starmap ( self . _connect , info ) ) |
def anim_to_html ( anim , fps = None , embed_frames = True , default_mode = 'loop' ) :
"""Generate HTML representation of the animation""" | if fps is None and hasattr ( anim , '_interval' ) : # Convert interval in ms to frames per second
fps = 1000. / anim . _interval
plt . close ( anim . _fig )
if hasattr ( anim , "_html_representation" ) :
return anim . _html_representation
else : # tempfile can ' t be used here : we need a filename , and this
# fails on windows . Instead , we use a custom filename generator
# with tempfile . NamedTemporaryFile ( suffix = ' . html ' ) as f :
with _NameOnlyTemporaryFile ( suffix = '.html' ) as f :
anim . save ( f . name , writer = HTMLWriter ( fps = fps , embed_frames = embed_frames , default_mode = default_mode ) )
html = open ( f . name ) . read ( )
anim . _html_representation = html
return html |
def create_widget ( self ) :
"""Create the underlying widget .""" | d = self . declaration
if d . orientation == 'vertical' :
self . widget = ScrollView ( self . get_context ( ) , None , d . style )
else :
self . widget = HorizontalScrollView ( self . get_context ( ) , None , d . style ) |
def value ( self ) :
"""Fetch a random weighted choice""" | choice = weighted_choice ( self . _responses )
# If the choice is a tuple , join the elements into a single mapped string
if isinstance ( choice , tuple ) :
return '' . join ( map ( str , choice ) ) . strip ( )
# Otherwise , return the choice itself as a string
return str ( choice ) |
def mt_coherence ( df , xi , xj , tbp , kspec , nf , p , ** kwargs ) :
"""Construct the coherence spectrum from the yk ' s and the
weights of the usual multitaper spectrum estimation .
Note this code uses the real ( 4 ) multitaper code .
INPUT
: param df : float ; sampling rate of time series
: param xi : numpy . ndarray ; data for first series
: param xj : numpy . ndarray ; data for second series
: param tbp : float ; the time - bandwidth product
: param kspec : integer ; number of tapers to use
: param nf : integer ; number of freq points in spectrum
: param p : float ; confidence for null hypothesis test , e . g . . 95
OPTIONAL INPUT
: param iadapt : integer 0 - adaptive , 1 - constant weights
default adapt = 1
OPTIONAL OUTPUTS , the outputs are returned as dictionary , with keys as
specified below and values as numpy . ndarrays . In order to activate the
output set the corresponding kwarg in the argument list , e . g .
` ` mt _ coherence ( df , xi , xj , tbp , kspec , nf , p , freq = True , cohe = True ) ` `
: param freq : the frequency bins
: param cohe : coherence of the two series ( 0 - 1)
: param phase : the phase at each frequency
: param speci : spectrum of first series
: param specj : spectrum of second series
: param conf : p confidence value for each freq .
: param cohe _ ci : 95 % bounds on coherence ( not larger than 1)
: param phase _ ci : 95 % bounds on phase estimates
If confidence intervals are requested , then both phase and
cohe variables need to be requested as well .""" | npts = len ( xi )
if len ( xj ) != npts :
raise Exception ( "Inpurt ndarrays have mismatching length" )
mt = _MtspecType ( 'float32' )
# convert type of input arguments if necessary
xi = np . require ( xi , dtype = mt . float , requirements = [ mt . order ] )
xj = np . require ( xj , dtype = mt . float , requirements = [ mt . order ] )
# fill up optional arguments , if not given set them None
args = [ ]
for key in ( 'freq' , 'cohe' , 'phase' , 'speci' , 'specj' , 'conf' , 'cohe_ci' , 'phase_ci' , 'iadapt' ) :
kwargs . setdefault ( key , None )
if key in ( 'cohe_ci' , 'phase_ci' ) and kwargs [ key ] :
kwargs [ key ] = mt . empty ( nf , 2 )
args . append ( mt . p ( kwargs [ key ] ) )
elif key == 'iadapt' and kwargs [ key ] :
args . append ( C . byref ( C . c_int ( kwargs [ key ] ) ) )
elif kwargs [ key ] :
kwargs [ key ] = mt . empty ( nf )
args . append ( mt . p ( kwargs [ key ] ) )
else :
args . append ( kwargs [ key ] )
mtspeclib . mt_cohe_ ( C . byref ( C . c_int ( int ( npts ) ) ) , C . byref ( C . c_float ( float ( df ) ) ) , mt . p ( xi ) , mt . p ( xj ) , C . byref ( C . c_float ( float ( tbp ) ) ) , C . byref ( C . c_int ( int ( kspec ) ) ) , C . byref ( C . c_int ( int ( nf ) ) ) , C . byref ( C . c_float ( float ( p ) ) ) , * args )
# remove None values from dictionary
return dict ( [ ( k , v ) for k , v in kwargs . items ( ) if v is not None ] ) |
def make_sure_path_exists ( path ) :
"""Ensure that a directory exists .
: param path : A directory path .""" | logger . debug ( 'Making sure path exists: {}' . format ( path ) )
try :
os . makedirs ( path )
logger . debug ( 'Created directory at: {}' . format ( path ) )
except OSError as exception :
if exception . errno != errno . EEXIST :
return False
return True |
def _write_consensus_strings ( self , output ) :
'''Writes the taxonomy of each leaf to a file . If the leaf has no
taxonomy , a taxonomy string will be created using the annotations
provided to the ancestor nodes of that leaf ( meaning , it will be
decorated ) .
Parameters
output : string
File to which the taxonomy strings for each leaf in the tree will
be written in Greengenes format , e . g .
637960147 mcrA ; Euryarchaeota _ mcrA ; Methanomicrobia
637699780 mcrA ; Euryarchaeota _ mcrA ; Methanomicrobia''' | logging . info ( "Writing decorated taxonomy to file: %s" % ( output ) )
with open ( output , 'w' ) as out :
for tip in self . tree . leaf_nodes ( ) :
tax_name = tip . taxon . label . replace ( " " , "_" )
if tip . taxon . label in self . taxonomy :
tax_string = '; ' . join ( self . taxonomy [ tax_name ] )
else :
ancestor_list = [ ]
for ancestor in tip . ancestor_iter ( ) :
if ancestor . label :
split_node_name = ancestor . label . split ( ':' )
if len ( split_node_name ) == 2 :
ancestor_list += list ( reversed ( split_node_name [ 1 ] . split ( '; ' ) ) )
elif len ( split_node_name ) == 1 :
try :
float ( split_node_name [ 0 ] )
except ValueError :
ancestor_list += list ( reversed ( split_node_name [ 0 ] . split ( '; ' ) ) )
else :
raise Exception ( "Malformed node name: %s" % ancestor . label )
tax_list = list ( reversed ( ancestor_list ) )
if len ( tax_list ) < 1 :
logging . warning ( "No taxonomy found for species %s!" % ( tax_name ) )
tax_string = "Unknown"
else :
tax_string = '; ' . join ( tax_list )
output_line = "%s\t%s\n" % ( tax_name , tax_string )
out . write ( output_line ) |
def get_persons ( self ) :
"""Returns list of strings which represents persons being chated with""" | cs = self . data [ "to" ] [ "data" ]
res = [ ]
for c in cs :
res . append ( c [ "name" ] )
return res |
def main ( self , config_filename , regex ) :
""": param str config _ filename : The config filename .
: param str regex : The regular expression for columns which we want to use .
: rtype : int""" | self . _read_configuration_file ( config_filename )
if self . _constants_filename :
self . _io . title ( 'Constants' )
self . connect ( )
self . _get_old_columns ( )
self . _get_columns ( )
self . _enhance_columns ( )
self . _merge_columns ( )
self . _write_columns ( )
self . _get_labels ( regex )
self . _fill_constants ( )
self . __write_constant_class ( )
self . disconnect ( )
self . __log_number_of_constants ( )
else :
self . _io . log_verbose ( 'Constants not enabled' )
return 0 |
def _unwrap_result ( action , result ) :
"""Unwrap a request response and return only the response data .
: param str action : The action name
: param result : The result of the action
: type : result : list or dict
: rtype : dict | None""" | if not result :
return
elif action in { 'DeleteItem' , 'PutItem' , 'UpdateItem' } :
return _unwrap_delete_put_update_item ( result )
elif action == 'GetItem' :
return _unwrap_get_item ( result )
elif action == 'Query' or action == 'Scan' :
return _unwrap_query_scan ( result )
elif action == 'CreateTable' :
return _unwrap_create_table ( result )
elif action == 'DescribeTable' :
return _unwrap_describe_table ( result )
return result |
def search ( self ) :
r"""Call the Bugzilla endpoint that will do the search . It will take
the information used in other methods on the Search object and
build up the query string . If no bugs are found then an empty list
is returned .
> > > bugs = bugzilla . search _ for \
. . . . keywords ( " checkin - needed " ) \
. . . . include _ fields ( " flags " ) \
. . . . search ( )""" | params = { }
params . update ( self . _time_frame . items ( ) )
if self . _includefields :
params [ 'include_fields' ] = list ( self . _includefields )
if self . _bug_numbers :
bugs = [ ]
for bug in self . _bug_numbers :
result = self . _bugsy . request ( 'bug/%s' % bug , params = params )
bugs . append ( Bug ( self . _bugsy , ** result [ 'bugs' ] [ 0 ] ) )
return bugs
else :
if self . _component :
params [ 'component' ] = list ( self . _component )
if self . _product :
params [ 'product' ] = list ( self . _product )
if self . _keywords :
params [ 'keywords' ] = list ( self . _keywords )
if self . _assigned :
params [ 'assigned_to' ] = list ( self . _assigned )
if self . _summaries :
params [ 'short_desc_type' ] = 'allwordssubstr'
params [ 'short_desc' ] = list ( self . _summaries )
if self . _whiteboard :
params [ 'short_desc_type' ] = 'allwordssubstr'
params [ 'whiteboard' ] = list ( self . _whiteboard )
if self . _change_history [ 'fields' ] :
params [ 'chfield' ] = self . _change_history [ 'fields' ]
if self . _change_history . get ( 'value' , None ) :
params [ 'chfieldvalue' ] = self . _change_history [ 'value' ]
try :
results = self . _bugsy . request ( 'bug' , params = params )
except Exception as e :
raise SearchException ( e . msg , e . code )
return [ Bug ( self . _bugsy , ** bug ) for bug in results [ 'bugs' ] ] |
def set_alert_destination ( self , ip = None , acknowledge_required = None , acknowledge_timeout = None , retries = None , destination = 0 , channel = None ) :
"""Configure one or more parameters of an alert destination
If any parameter is ' None ' ( default ) , that parameter is left unchanged .
Otherwise , all given parameters are set by this command .
: param ip : IP address of the destination . It is currently expected
that the calling code will handle any name lookup and
present this data as IP address .
: param acknowledge _ required : Whether or not the target should expect
an acknowledgement from this alert target .
: param acknowledge _ timeout : Time to wait for acknowledgement if enabled
: param retries : How many times to attempt transmit of an alert .
: param destination : Destination index , defaults to 0.
: param channel : The channel to configure the alert on . Defaults to
current""" | if channel is None :
channel = self . get_network_channel ( )
if ip is not None :
destdata = bytearray ( ( channel , 19 , destination ) )
try :
parsedip = socket . inet_aton ( ip )
destdata . extend ( ( 0 , 0 ) )
destdata . extend ( parsedip )
destdata . extend ( b'\x00\x00\x00\x00\x00\x00' )
except socket . error :
if self . _supports_standard_ipv6 :
parsedip = socket . inet_pton ( socket . AF_INET6 , ip )
destdata . append ( 0b10000000 )
destdata . extend ( parsedip )
else :
destdata = None
self . oem_init ( )
self . _oem . set_alert_ipv6_destination ( ip , destination , channel )
if destdata :
self . xraw_command ( netfn = 0xc , command = 1 , data = destdata )
if ( acknowledge_required is not None or retries is not None or acknowledge_timeout is not None ) :
currtype = self . xraw_command ( netfn = 0xc , command = 2 , data = ( channel , 18 , destination , 0 ) )
if currtype [ 'data' ] [ 0 ] != b'\x11' :
raise exc . PyghmiException ( "Unknown parameter format" )
currtype = bytearray ( currtype [ 'data' ] [ 1 : ] )
if acknowledge_required is not None :
if acknowledge_required :
currtype [ 1 ] |= 0b10000000
else :
currtype [ 1 ] &= 0b1111111
if acknowledge_timeout is not None :
currtype [ 2 ] = acknowledge_timeout
if retries is not None :
currtype [ 3 ] = retries
destreq = bytearray ( ( channel , 18 ) )
destreq . extend ( currtype )
self . xraw_command ( netfn = 0xc , command = 1 , data = destreq )
if not ip == '0.0.0.0' :
self . _assure_alert_policy ( channel , destination ) |
def normalize_example ( self , example , hparams ) :
"""Assumes that example contains both inputs and targets .""" | length = self . max_length ( hparams )
def _to_constant_shape ( tensor ) :
tensor = tensor [ : length ]
tensor = tf . pad ( tensor , [ ( 0 , length - tf . shape ( tensor ) [ 0 ] ) ] )
return tf . reshape ( tensor , [ length ] )
if self . has_inputs :
example [ 'inputs' ] = _to_constant_shape ( example [ 'inputs' ] )
example [ 'targets' ] = _to_constant_shape ( example [ 'targets' ] )
elif 'inputs' in example :
if self . packed_length :
raise ValueError ( 'cannot concatenate packed examples on the fly.' )
inputs = example . pop ( 'inputs' ) [ : - 1 ]
# Remove EOS token .
targets = tf . concat ( [ inputs , example [ 'targets' ] ] , 0 )
example [ 'targets' ] = _to_constant_shape ( targets )
else :
example [ 'targets' ] = _to_constant_shape ( example [ 'targets' ] )
if self . packed_length :
if self . has_inputs :
if 'inputs_segmentation' in example :
example [ 'inputs_segmentation' ] = _to_constant_shape ( example [ 'inputs_segmentation' ] )
example [ 'inputs_position' ] = _to_constant_shape ( example [ 'inputs_position' ] )
else :
example [ 'inputs_segmentation' ] = tf . to_int64 ( tf . not_equal ( example [ 'inputs' ] , 0 ) )
example [ 'inputs_position' ] = ( example [ 'inputs_segmentation' ] * tf . range ( length , dtype = tf . int64 ) )
if 'targets_segmentation' in example :
example [ 'targets_segmentation' ] = _to_constant_shape ( example [ 'targets_segmentation' ] )
example [ 'targets_position' ] = _to_constant_shape ( example [ 'targets_position' ] )
else :
example [ 'targets_segmentation' ] = tf . to_int64 ( tf . not_equal ( example [ 'targets' ] , 0 ) )
example [ 'targets_position' ] = ( example [ 'targets_segmentation' ] * tf . range ( length , dtype = tf . int64 ) )
return example |
def style ( self , style : _AttrValueType ) -> None :
"""Set style attribute of this node .
If argument ` ` style ` ` is string , it will be parsed to
` ` CSSStyleDeclaration ` ` .""" | if isinstance ( style , str ) :
self . __style . _parse_str ( style )
elif style is None :
self . __style . _parse_str ( '' )
elif isinstance ( style , CSSStyleDeclaration ) :
self . __style . _owner = None
if style . _owner is not None :
new_style = CSSStyleDeclaration ( owner = self )
new_style . update ( style )
self . __style = new_style
else : # always making new decl may be better
style . _owner = self
self . __style = style
else :
raise TypeError ( 'Invalid type for style: {}' . format ( type ( style ) ) ) |
def extend_hit ( self , hit_id , number , duration_hours = None ) :
"""Extend an existing HIT and return an updated description""" | self . create_additional_assignments_for_hit ( hit_id , number )
if duration_hours is not None :
self . update_expiration_for_hit ( hit_id , duration_hours )
return self . get_hit ( hit_id ) |
def wait_run_in_executor ( func , * args , ** kwargs ) :
"""Run blocking code in a different thread and wait
for the result .
: param func : Run this function in a different thread
: param args : Parameters of the function
: param kwargs : Keyword parameters of the function
: returns : Return the result of the function""" | loop = asyncio . get_event_loop ( )
future = loop . run_in_executor ( None , functools . partial ( func , * args , ** kwargs ) )
yield from asyncio . wait ( [ future ] )
return future . result ( ) |
def _initialize_repo_cache ( ) :
"""Initialize the repository cache used for scraping .
Retrieves a list of repositories with their provider and last scraping time
from Elasticsearch .
This list can be used to check which repos need to be scraped ( e . g . after
a specific amount of time ) .""" | LOGGER . info ( "Initializing repository cache" )
# Initialize Repo Cache
repo_cache = { }
# Get all repos from Elasticsearch
for hit in GitRepo . search ( ) . query ( "match_all" ) . scan ( ) : # TODO ( fschmidt ) : Maybe we can use this list as cache for the whole
# scraper - webhook part .
# This way , we could reduce the amount of operations needed for GitHub
# and ElasticSearch
repo_cache [ hit . repo_name ] = hit . to_dict ( skip_empty = False )
return repo_cache |
def combinations ( seq , k ) :
"""Return j length subsequences of elements from the input iterable .
This version uses Numpy / Scipy and should be preferred over itertools . It avoids
the creation of all intermediate Python objects .
Examples
> > > import numpy as np
> > > from itertools import combinations as iter _ comb
> > > x = np . arange ( 3)
> > > c1 = combinations ( x , 2)
> > > print ( c1)
[ [ 0 1]
[0 2]
[1 2 ] ]
> > > c2 = np . array ( tuple ( iter _ comb ( x , 2 ) ) )
> > > print ( c2)
[ [ 0 1]
[0 2]
[1 2 ] ]""" | from itertools import combinations as _combinations , chain
from scipy . special import comb
count = comb ( len ( seq ) , k , exact = True )
res = np . fromiter ( chain . from_iterable ( _combinations ( seq , k ) ) , int , count = count * k )
return res . reshape ( - 1 , k ) |
def publishApp ( self , app_info , map_info = None , fsInfo = None ) :
"""Publishes apps to AGOL / Portal
Args :
app _ info ( list ) : A list of JSON configuration apps to publish .
map _ info ( list ) : Defaults to ` ` None ` ` .
fsInfo ( list ) : Defaults to ` ` None ` ` .
Returns :
dict : A dictionary of results objects .""" | if self . securityhandler is None :
print ( "Security handler required" )
return
appDet = None
try :
app_results = [ ]
if isinstance ( app_info , list ) :
for appDet in app_info :
app_results . append ( self . _publishAppLogic ( appDet = appDet , map_info = map_info , fsInfo = fsInfo ) )
else :
app_results . append ( self . _publishAppLogic ( appDet = app_info , map_info = map_info , fsInfo = fsInfo ) )
return app_results
except ( common . ArcRestHelperError ) as e :
raise e
except Exception as e :
line , filename , synerror = trace ( )
raise common . ArcRestHelperError ( { "function" : "publishApp" , "line" : line , "filename" : filename , "synerror" : synerror , } )
finally :
appDet = None
del appDet
gc . collect ( ) |
def recent ( category = None , pages = 1 , sort = None , order = None ) :
"""Return most recently added torrents . Can be sorted and categorized
and contain multiple pages .""" | s = Search ( )
s . recent ( category , pages , sort , order )
return s |
def corners ( self , order = 'C' ) :
"""Return the corner points as a single array .
Parameters
order : { ' C ' , ' F ' } , optional
Ordering of the axes in which the corners appear in
the output . ` ` ' C ' ` ` means that the first axis varies slowest
and the last one fastest , vice versa in ` ` ' F ' ` ` ordering .
Returns
corners : ` numpy . ndarray `
Array containing the corner coordinates . The size of the
array is ` ` 2 ^ m x ndim ` ` , where ` ` m ` ` is the number of
non - degenerate axes , i . e . the corners are stored as rows .
Examples
> > > intv = IntervalProd ( [ - 1 , 2 , 0 ] , [ - 0.5 , 3 , 0.5 ] )
> > > intv . corners ( )
array ( [ [ - 1 . , 2 . , 0 . ] ,
[ - 1 . , 2 . , 0.5 ] ,
[ - 1 . , 3 . , 0 . ] ,
[ - 1 . , 3 . , 0.5 ] ,
[ - 0.5 , 2 . , 0 . ] ,
[ - 0.5 , 2 . , 0.5 ] ,
[ - 0.5 , 3 . , 0 . ] ,
[ - 0.5 , 3 . , 0.5 ] ] )
> > > intv . corners ( order = ' F ' )
array ( [ [ - 1 . , 2 . , 0 . ] ,
[ - 0.5 , 2 . , 0 . ] ,
[ - 1 . , 3 . , 0 . ] ,
[ - 0.5 , 3 . , 0 . ] ,
[ - 1 . , 2 . , 0.5 ] ,
[ - 0.5 , 2 . , 0.5 ] ,
[ - 1 . , 3 . , 0.5 ] ,
[ - 0.5 , 3 . , 0.5 ] ] )""" | from odl . discr . grid import RectGrid
minmax_vecs = [ 0 ] * self . ndim
for axis in np . where ( ~ self . nondegen_byaxis ) [ 0 ] :
minmax_vecs [ axis ] = self . min_pt [ axis ]
for axis in np . where ( self . nondegen_byaxis ) [ 0 ] :
minmax_vecs [ axis ] = ( self . min_pt [ axis ] , self . max_pt [ axis ] )
minmax_grid = RectGrid ( * minmax_vecs )
return minmax_grid . points ( order = order ) |
def _bnd ( self , xloc , left , right , cache ) :
"""Distribution bounds .
Example :
> > > print ( chaospy . Uniform ( ) . range ( [ - 2 , 0 , 2 , 4 ] ) )
[ [ 0 . 0 . 0 . 0 . ]
[1 . 1 . 1 . 1 . ] ]
> > > print ( chaospy . Add ( chaospy . Uniform ( ) , 2 ) . range ( [ - 2 , 0 , 2 , 4 ] ) )
[ [ 2 . 2 . 2 . 2 . ]
[3 . 3 . 3 . 3 . ] ]
> > > print ( chaospy . Add ( 2 , chaospy . Uniform ( ) ) . range ( [ - 2 , 0 , 2 , 4 ] ) )
[ [ 2 . 2 . 2 . 2 . ]
[3 . 3 . 3 . 3 . ] ]
> > > print ( chaospy . Add ( 1 , 1 ) . range ( [ - 2 , 0 , 2 , 4 ] ) )
[ [ 2 . 2 . 2 . 2 . ]
[2 . 2 . 2 . 2 . ] ]""" | left = evaluation . get_forward_cache ( left , cache )
right = evaluation . get_forward_cache ( right , cache )
if isinstance ( left , Dist ) :
if isinstance ( right , Dist ) :
raise evaluation . DependencyError ( "under-defined distribution {} or {}" . format ( left , right ) )
elif not isinstance ( right , Dist ) :
return left + right , left + right
else :
left , right = right , left
right = numpy . asfarray ( right )
if len ( right . shape ) == 3 :
xloc_ = ( xloc . T - right [ 0 ] . T ) . T
lower , upper = evaluation . evaluate_bound ( left , xloc_ , cache = cache . copy ( ) )
lower0 , upper0 = ( lower . T + right [ 0 ] . T ) . T , ( upper . T + right [ 0 ] . T ) . T
xloc_ = ( xloc . T - right [ 1 ] . T ) . T
lower , upper = evaluation . evaluate_bound ( left , xloc_ , cache = cache )
lower1 , upper1 = ( lower . T + right [ 1 ] . T ) . T , ( upper . T + right [ 1 ] . T ) . T
lower = numpy . min ( [ lower0 , lower1 ] , 0 )
upper = numpy . max ( [ upper0 , upper1 ] , 0 )
else :
xloc_ = ( xloc . T - right . T ) . T
lower , upper = evaluation . evaluate_bound ( left , xloc_ , cache = cache . copy ( ) )
lower , upper = ( lower . T + right . T ) . T , ( upper . T + right . T ) . T
assert lower . shape == xloc . shape
assert upper . shape == xloc . shape
return lower , upper |
def aDiffCytoscape ( df , aging_genes , target , species = "caenorhabditis elegans" , limit = None , cutoff = 0.4 , taxon = None , host = cytoscape_host , port = cytoscape_port ) :
"""Plots tables from aDiff / cuffdiff into cytoscape using String protein queries .
Uses top changed genes as well as first neighbours and difusion fo generate subnetworks .
: param df : df as outputed by aDiff for differential gene expression
: param aging _ genes : ENS gene ids to be labeled with a diagonal
: param species : species for string app query . eg . " caenorhabditis elegans " , " drosophila melanogaster " , " mus musculus " , " homo sapiens "
: param limit : limit for string app query . Number of extra genes to recover . If None , limit = N ( query _ genes ) * . 25
: param cuttoff : confidence cuttoff for sting app query . Default = 0.4
: param taxon : taxon id for string app query . For the species shown above , taxon id will be automatically identified
: param cytoscape _ host : host address for cytoscape
: param cytoscape _ port : cytoscape port
: param target : target destination for saving files without prefix . eg . " / beegfs / group _ bit / home / JBoucas / test / N2 _ vs _ daf2"
: returns : nothing""" | # # # # # TEMPORARY FIX - STRING APP NOT ACCEPTING QUERIES ABOVE 2000 GENES # # # #
df = df . sort_values ( by = [ "q_value" ] , ascending = True )
df . reset_index ( inplace = True , drop = True )
tmp = df [ : 1999 ]
df = tmp . copy ( )
# # # # # END OF TEMPORARY FIX # # # # #
query_genes = df [ "ensembl_gene_id" ] . tolist ( )
df [ "NormInt" ] = df [ "value_1" ] * df [ "value_2" ]
df [ "NormInt" ] = df [ "NormInt" ] . apply ( lambda x : np . log10 ( np . sqrt ( x ) ) )
if not limit :
limit = int ( len ( query_genes ) * .25 )
# Annotate aging evindence
def CheckEvidence ( x , aging_genes = aging_genes ) :
if x in aging_genes :
res = "aging_gene"
else :
res = "no"
return res
df [ "evidence" ] = df [ "ensembl_gene_id" ] . apply ( lambda x : CheckEvidence ( x ) )
# fix infinit values
def FixInfs ( x ) :
if str ( x ) in [ "-inf" , "inf" ] :
res = np . nan
else :
res = x
return res
df [ "NormInt" ] = df [ "NormInt" ] . apply ( lambda x : FixInfs ( x ) )
df [ "log2(fold_change)" ] = df [ "log2(fold_change)" ] . apply ( lambda x : FixInfs ( x ) )
taxons = { "caenorhabditis elegans" : "6239" , "drosophila melanogaster" : "7227" , "mus musculus" : "10090" , "homo sapiens" : "9606" }
if not taxon :
taxon = taxons [ species ]
# destroy any existing network still present in cytoscape
response = cytoscape ( "network" , "list" , host = host , port = port )
if "networks" in response . keys ( ) :
response = response [ "networks" ]
# print response
if len ( response ) > 0 :
for r in response :
rr = cytoscape ( "network" , "destroy" , { "network" : "SUID:" + str ( r ) } , host = host , port = port )
# String protein query
query_genes = [ str ( s ) for s in query_genes ]
response = cytoscape ( "string" , "protein query" , { "query" : "," . join ( query_genes ) , "cutoff" : str ( cutoff ) , "species" : species , "limit" : str ( limit ) , "taxonID" : taxon } , host = host , port = port )
print ( "giving some time to cytoscape.." )
sys . stdout . flush ( )
sleep ( 10 )
# apply new layout
response = cytoscape ( "layout" , "force-directed" , { "defaultSpringCoefficient" : ".000004" , "defaultSpringLength" : "5" } , host = host , port = port )
# redefine defaults for node visualization
response = loadTableData ( df [ [ "ensembl_gene_id" , "log2(fold_change)" , "NormInt" , "evidence" ] ] . dropna ( ) , df_key = "ensembl_gene_id" , table_key_column = "query term" , host = host , port = port )
defaults_dic = { "NODE_SHAPE" : "ellipse" , "NODE_SIZE" : 60 , "NODE_FILL_COLOR" : "#AAAAAA" , "EDGE_TRANSPARENCY" : 120 }
defaults_list = simple_defaults ( defaults_dic )
# apply mappings - blue / white / red - from - 4 to + 4 log2FC
NODE_LABEL = mapVisualProperty ( "NODE_LABEL" , "passthrough" , "display name" , host = host , port = port )
create_styles ( "dataStyle" , defaults_list , [ NODE_LABEL ] , host = host , port = port )
response = cytoscape ( "vizmap" , "apply" , { "styles" : "dataStyle" } , host = host , port = port )
cmap = matplotlib . cm . get_cmap ( "bwr" )
norm = matplotlib . colors . Normalize ( vmin = - 4 , vmax = 4 )
min_color = matplotlib . colors . rgb2hex ( cmap ( norm ( - 4 ) ) )
center_color = matplotlib . colors . rgb2hex ( cmap ( norm ( 0 ) ) )
max_color = matplotlib . colors . rgb2hex ( cmap ( norm ( 4 ) ) )
NODE_FILL_COLOR = mapVisualProperty ( 'NODE_FILL_COLOR' , 'continuous' , 'log2(fold_change)' , lower = [ - 4 , min_color ] , center = [ 0.0 , center_color ] , upper = [ 4 , max_color ] , host = host , port = port )
# apply diamond shape and increase node size to nodes with aging evidence
NODE_SHAPE = mapVisualProperty ( 'NODE_SHAPE' , 'discrete' , 'evidence' , discrete = [ [ "aging_gene" , "no" ] , [ "DIAMOND" , "ellipse" ] ] , host = host , port = port )
NODE_SIZE = mapVisualProperty ( 'NODE_SIZE' , 'discrete' , 'evidence' , discrete = [ [ "aging_gene" , "no" ] , [ "100.0" , "60.0" ] ] , host = host , port = port )
update_style ( "dataStyle" , mappings = [ NODE_SIZE , NODE_SHAPE , NODE_FILL_COLOR ] , host = host , port = port )
response = cytoscape ( "vizmap" , "apply" , { "styles" : "dataStyle" } , host = host , port = port )
# apply mappings - reds - to Normalized expression ( as in MA plots ) to border color and border size
NormIntDf = getTableColumns ( 'node' , [ 'NormInt' ] , host = host , port = port )
if 'NormInt' in NormIntDf . columns . tolist ( ) :
min_NormInt = min ( NormIntDf . dropna ( ) [ 'NormInt' ] . tolist ( ) )
max_NormInt = max ( NormIntDf . dropna ( ) [ 'NormInt' ] . tolist ( ) )
cent_NormInt = np . mean ( [ min_NormInt , max_NormInt ] )
cmap = matplotlib . cm . get_cmap ( "Reds" )
norm = matplotlib . colors . Normalize ( vmin = min_NormInt , vmax = max_NormInt )
min_color = matplotlib . colors . rgb2hex ( cmap ( norm ( np . mean ( [ min_NormInt , max_NormInt ] ) ) ) )
center_color = matplotlib . colors . rgb2hex ( cmap ( norm ( cent_NormInt ) ) )
max_color = matplotlib . colors . rgb2hex ( cmap ( norm ( max_NormInt ) ) )
NODE_BORDER_PAINT = mapVisualProperty ( 'NODE_BORDER_PAINT' , 'continuous' , 'NormInt' , lower = [ min_NormInt , min_color ] , center = [ np . mean ( [ min_NormInt , max_NormInt ] ) , center_color ] , upper = [ max_NormInt , max_color ] , host = host , port = port )
update_style ( "dataStyle" , mappings = [ NODE_BORDER_PAINT ] , host = host , port = port )
response = cytoscape ( "vizmap" , "apply" , { "styles" : "dataStyle" } , host = host , port = port )
NODE_BORDER_WIDTH = mapVisualProperty ( 'NODE_BORDER_WIDTH' , 'continuous' , 'NormInt' , lower = [ min_NormInt , 2 ] , center = [ np . mean ( [ min_NormInt , max_NormInt ] ) , 4 ] , upper = [ max_NormInt , 8 ] , host = host , port = port )
update_style ( "dataStyle" , mappings = [ NODE_BORDER_WIDTH ] , host = host , port = port )
response = cytoscape ( "vizmap" , "apply" , { "styles" : "dataStyle" } , host = host , port = port )
response = cytoscape ( "network" , "rename" , { "name" : 'main String network' } , host = host , port = port )
# create network with edges only
response = cytoscape ( "network" , "select" , { "edgeList" : "all" , "extendEdges" : "true" } , host = host , port = port )
response = cytoscape ( "network" , "create" , { "source" : "current" , "nodeList" : "selected" } , host = host , port = port )
response = cytoscape ( "network" , "rename" , { "name" : 'main String network (edges only)' } , host = host , port = port )
# top 10 changed genes > first neighbours
response = cytoscape ( "network" , "set current" , { "network" : "main String network (edges only)" } , host = host , port = port )
log2fcDf = getTableColumns ( 'node' , [ 'log2(fold_change)' ] , host = host , port = port )
if 'log2(fold_change)' in log2fcDf . columns . tolist ( ) :
log2fcDf [ 'log2(fold_change)' ] = log2fcDf [ 'log2(fold_change)' ] . apply ( lambda x : abs ( x ) )
log2fcDf = log2fcDf . sort_values ( by = [ 'log2(fold_change)' ] , ascending = False )
top_nodes = log2fcDf . index . tolist ( ) [ : int ( len ( log2fcDf ) * .10 ) ]
response = cytoscape ( "network" , "select" , { "nodeList" : "name:" + "," . join ( top_nodes ) } , host = host , port = port )
response = cytoscape ( "network" , "select" , { "firstNeighbors" : "" , "direction" : "any" , "network" : "current" } , host = host , port = port )
response = cytoscape ( "network" , "create" , { "source" : "current" , "nodeList" : "selected" } , host = host , port = port )
response = cytoscape ( "network" , "select" , { "edgeList" : "all" , "extendEdges" : "true" } , host = host , port = port )
response = cytoscape ( "network" , "delete" , { "nodeList" : "unselected" } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
response = cytoscape ( "layout" , "force-directed" , host = host , port = port )
response = cytoscape ( "network" , "rename" , { "name" : 'top ' + str ( int ( len ( log2fcDf ) * .10 ) ) + ' changed firstNeighbors' } , host = host , port = port )
# top 10 changed genes difusion
response = cytoscape ( "network" , "set current" , { "network" : "main String network (edges only)" } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
response = cytoscape ( "network" , "select" , { "nodeList" : "name:" + "," . join ( top_nodes ) } , host = host , port = port )
response = cytoscape ( "diffusion" , "diffuse" , host = host , port = port )
response = cytoscape ( "network" , "create" , { "source" : "current" , "nodeList" : "selected" } , host = host , port = port )
response = cytoscape ( "network" , "select" , { "edgeList" : "all" , "extendEdges" : "true" } , host = host , port = port )
response = cytoscape ( "network" , "delete" , { "nodeList" : "unselected" } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
response = cytoscape ( "layout" , "force-directed" , host = host , port = port )
response = cytoscape ( "network" , "rename" , { "name" : 'top ' + str ( int ( len ( log2fcDf ) * .10 ) ) + ' changed diffusion' } , host = host , port = port )
def MAKETMP ( ) :
( fd , f ) = tempfile . mkstemp ( )
f = "/tmp/" + f . split ( "/" ) [ - 1 ]
return f
cys = MAKETMP ( )
cyjs = MAKETMP ( )
main_png = MAKETMP ( )
main_pdf = MAKETMP ( )
edg_png = MAKETMP ( )
edg_pdf = MAKETMP ( )
neig_png = MAKETMP ( )
neig_pdf = MAKETMP ( )
dif_png = MAKETMP ( )
dif_pdf = MAKETMP ( )
response = cytoscape ( "session" , "save as" , { "file" : cys } , host = host , port = port )
response = cytoscape ( "network" , "export" , { "options" : 'CYJS' , "OutputFile" : cyjs } , host = host , port = port )
response = cytoscape ( "network" , "set current" , { "network" : "main String network" } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
sleep ( 5 )
response = cytoscape ( "view" , "export" , { "options" : "PNG" , "OutputFile" : main_png } , host = host , port = port )
response = cytoscape ( "view" , "export" , { "options" : "PDF" , "OutputFile" : main_pdf } , host = host , port = port )
response = cytoscape ( "network" , "set current" , { "network" : "main String network (edges only)" } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
sleep ( 5 )
response = cytoscape ( "view" , "export" , { "options" : "PNG" , "OutputFile" : edg_png } , host = host , port = port )
response = cytoscape ( "view" , "export" , { "options" : "PDF" , "OutputFile" : edg_pdf } , host = host , port = port )
try :
response = cytoscape ( "network" , "set current" , { "network" : 'top ' + str ( int ( len ( log2fcDf ) * .10 ) ) + ' changed firstNeighbors' } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
sleep ( 5 )
response = cytoscape ( "view" , "export" , { "options" : "PNG" , "OutputFile" : neig_png } , host = host , port = port )
response = cytoscape ( "view" , "export" , { "options" : "PDF" , "OutputFile" : neig_pdf } , host = host , port = port )
except :
print ( "No " + "changed firstNeighbors" )
sys . stdout . flush ( )
try :
response = cytoscape ( "network" , "set current" , { "network" : 'top ' + str ( int ( len ( log2fcDf ) * .10 ) ) + ' changed diffusion' } , host = host , port = port )
response = cytoscape ( "network" , "deselect" , { "edgeList" : "all" , "nodeList" : "all" } , host = host , port = port )
sleep ( 5 )
response = cytoscape ( "view" , "export" , { "options" : "PNG" , "OutputFile" : dif_png } , host = host , port = port )
response = cytoscape ( "view" , "export" , { "options" : "PDF" , "OutputFile" : dif_pdf } , host = host , port = port )
except :
print ( "No " + "changed diffusion" )
sys . stdout . flush ( )
ssh = paramiko . SSHClient ( )
ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) )
ssh . connect ( host )
ftp_client = ssh . open_sftp ( )
for f , extension , local in zip ( [ cys , cyjs , main_png , main_pdf , edg_png , edg_pdf , neig_png , neig_pdf , dif_png , dif_pdf ] , [ ".cys" , ".cyjs" , ".png" , ".pdf" , ".png" , ".pdf" , ".png" , ".pdf" , ".png" , ".pdf" ] , [ target + ".cys" , target + ".cyjs" , target + ".main.png" , target + ".main.pdf" , target + ".main.edges.png" , target + ".main.edges.pdf" , target + ".topFirstNeighbors.png" , target + ".topFirstNeighbors.pdf" , target + ".topDiffusion.png" , target + ".topDiffusion.pdf" ] ) :
try :
ftp_client . get ( f + extension , local )
ssh_stdin , ssh_stdout , ssh_stderr = ssh . exec_command ( "rm " + f + extension )
except :
print ( "No " + local )
sys . stdout . flush ( ) |
def image_shift ( xshift = 0 , yshift = 0 , axes = "gca" ) :
"""This will shift an image to a new location on x and y .""" | if axes == "gca" :
axes = _pylab . gca ( )
e = axes . images [ 0 ] . get_extent ( )
e [ 0 ] = e [ 0 ] + xshift
e [ 1 ] = e [ 1 ] + xshift
e [ 2 ] = e [ 2 ] + yshift
e [ 3 ] = e [ 3 ] + yshift
axes . images [ 0 ] . set_extent ( e )
_pylab . draw ( ) |
def row_coordinates ( self , X ) :
"""Returns the row principal coordinates .
The row principal coordinates are obtained by projecting ` X ` on the right eigenvectors .""" | utils . validation . check_is_fitted ( self , 's_' )
# Extract index
index = X . index if isinstance ( X , pd . DataFrame ) else None
# Copy data
if self . copy :
X = np . copy ( X )
# Scale data
if hasattr ( self , 'scaler_' ) :
X = self . scaler_ . transform ( X )
return pd . DataFrame ( data = X . dot ( self . V_ . T ) , index = index ) |
def hosted_numbers ( self ) :
""": returns : Version hosted _ numbers of preview
: rtype : twilio . rest . preview . hosted _ numbers . HostedNumbers""" | if self . _hosted_numbers is None :
self . _hosted_numbers = HostedNumbers ( self )
return self . _hosted_numbers |
def pesach_dow ( self ) :
"""Return the first day of week for Pesach .""" | jdn = conv . hdate_to_jdn ( HebrewDate ( self . hdate . year , Months . Nisan , 15 ) )
return ( jdn + 1 ) % 7 + 1 |
def run ( self ) :
"""A bit bulky atm . . .""" | self . close_connection = False
try :
while True :
self . started_response = False
self . status = ""
self . outheaders = [ ]
self . sent_headers = False
self . chunked_write = False
self . write_buffer = StringIO . StringIO ( )
self . content_length = None
# Copy the class environ into self .
ENVIRON = self . environ = self . connection_environ . copy ( )
self . environ . update ( self . server_environ )
request_line = yield self . connfh . readline ( )
if request_line == "\r\n" : # RFC 2616 sec 4.1 : " . . . it should ignore the CRLF . "
tolerance = 5
while tolerance and request_line == "\r\n" :
request_line = yield self . connfh . readline ( )
tolerance -= 1
if not tolerance :
return
method , path , req_protocol = request_line . strip ( ) . split ( " " , 2 )
ENVIRON [ "REQUEST_METHOD" ] = method
ENVIRON [ "CONTENT_LENGTH" ] = ''
scheme , location , path , params , qs , frag = urlparse ( path )
if frag :
yield self . simple_response ( "400 Bad Request" , "Illegal #fragment in Request-URI." )
return
if scheme :
ENVIRON [ "wsgi.url_scheme" ] = scheme
if params :
path = path + ";" + params
ENVIRON [ "SCRIPT_NAME" ] = ""
# Unquote the path + params ( e . g . " / this % 20path " - > " this path " ) .
# http : / / www . w3 . org / Protocols / rfc2616 / rfc2616 - sec5 . html # sec5.1.2
# But note that " . . . a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded . " http : / / www . ietf . org / rfc / rfc2396 . txt , sec 2.4.2
atoms = [ unquote ( x ) for x in quoted_slash . split ( path ) ]
path = "%2F" . join ( atoms )
ENVIRON [ "PATH_INFO" ] = path
# Note that , like wsgiref and most other WSGI servers ,
# we unquote the path but not the query string .
ENVIRON [ "QUERY_STRING" ] = qs
# Compare request and server HTTP protocol versions , in case our
# server does not support the requested protocol . Limit our output
# to min ( req , server ) . We want the following output :
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that , in ( b ) , the response will be " HTTP / 1.1 " even though
# the client only understands 1.0 . RFC 2616 10.5.6 says we should
# only return 505 if the _ major _ version is different .
rp = int ( req_protocol [ 5 ] ) , int ( req_protocol [ 7 ] )
server_protocol = ENVIRON [ "ACTUAL_SERVER_PROTOCOL" ]
sp = int ( server_protocol [ 5 ] ) , int ( server_protocol [ 7 ] )
if sp [ 0 ] != rp [ 0 ] :
yield self . simple_response ( "505 HTTP Version Not Supported" )
return
# Bah . " SERVER _ PROTOCOL " is actually the REQUEST protocol .
ENVIRON [ "SERVER_PROTOCOL" ] = req_protocol
self . response_protocol = "HTTP/%s.%s" % min ( rp , sp )
# If the Request - URI was an absoluteURI , use its location atom .
if location :
ENVIRON [ "SERVER_NAME" ] = location
# then all the http headers
try :
while True :
line = yield self . connfh . readline ( )
if line == '\r\n' : # Normal end of headers
break
if line [ 0 ] in ' \t' : # It ' s a continuation line .
v = line . strip ( )
else :
k , v = line . split ( ":" , 1 )
k , v = k . strip ( ) . upper ( ) , v . strip ( )
envname = "HTTP_" + k . replace ( "-" , "_" )
if k in comma_separated_headers :
existing = ENVIRON . get ( envname )
if existing :
v = ", " . join ( ( existing , v ) )
ENVIRON [ envname ] = v
ct = ENVIRON . pop ( "HTTP_CONTENT_TYPE" , None )
if ct :
ENVIRON [ "CONTENT_TYPE" ] = ct
cl = ENVIRON . pop ( "HTTP_CONTENT_LENGTH" , None )
if cl :
ENVIRON [ "CONTENT_LENGTH" ] = cl
except ValueError , ex :
yield self . simple_response ( "400 Bad Request" , repr ( ex . args ) )
return
creds = ENVIRON . get ( "HTTP_AUTHORIZATION" , "" ) . split ( " " , 1 )
ENVIRON [ "AUTH_TYPE" ] = creds [ 0 ]
if creds [ 0 ] . lower ( ) == 'basic' :
user , pw = base64 . decodestring ( creds [ 1 ] ) . split ( ":" , 1 )
ENVIRON [ "REMOTE_USER" ] = user
# Persistent connection support
if req_protocol == "HTTP/1.1" :
if ENVIRON . get ( "HTTP_CONNECTION" , "" ) == "close" :
self . close_connection = True
else : # HTTP / 1.0
if ENVIRON . get ( "HTTP_CONNECTION" , "" ) . lower ( ) != "keep-alive" :
self . close_connection = True
# Transfer - Encoding support
te = None
if self . response_protocol == "HTTP/1.1" :
te = ENVIRON . get ( "HTTP_TRANSFER_ENCODING" )
if te :
te = [ x . strip ( ) . lower ( ) for x in te . split ( "," ) if x . strip ( ) ]
if te : # reject transfer encodings for now
yield self . simple_response ( "501 Unimplemented" )
self . close_connection = True
return
ENV_COGEN_PROXY = ENVIRON [ 'cogen.wsgi' ] = async . COGENProxy ( content_length = int ( ENVIRON . get ( 'CONTENT_LENGTH' , None ) or 0 ) or None , read_count = 0 , operation = None , result = None , exception = None )
ENVIRON [ 'cogen.http_connection' ] = self
ENVIRON [ 'cogen.core' ] = async . COGENOperationWrapper ( ENV_COGEN_PROXY , core )
ENVIRON [ 'cogen.call' ] = async . COGENCallWrapper ( ENV_COGEN_PROXY )
ENVIRON [ 'cogen.input' ] = async . COGENOperationWrapper ( ENV_COGEN_PROXY , self . connfh )
ENVIRON [ 'cogen.yield' ] = async . COGENSimpleWrapper ( ENV_COGEN_PROXY )
response = self . wsgi_app ( ENVIRON , self . start_response )
# ~ print ' WSGI RESPONSE : ' , response
try :
if isinstance ( response , WSGIFileWrapper ) : # set tcp _ cork to pack the header with the file data
if hasattr ( socket , "TCP_CORK" ) :
self . conn . setsockopt ( socket . IPPROTO_TCP , socket . TCP_CORK , 1 )
assert self . started_response , "App returned the wsgi.file_wrapper but didn't call start_response."
assert not self . sent_headers
self . sent_headers = True
yield sockets . SendAll ( self . conn , self . render_headers ( ) + self . write_buffer . getvalue ( ) )
offset = response . filelike . tell ( )
if self . chunked_write :
fsize = os . fstat ( response . filelike . fileno ( ) ) . st_size
yield sockets . SendAll ( self . conn , hex ( int ( fsize - offset ) ) + "\r\n" )
yield self . conn . sendfile ( response . filelike , blocksize = response . blocksize , offset = offset , length = self . content_length , timeout = self . sendfile_timeout )
if self . chunked_write :
yield sockets . SendAll ( self . conn , "\r\n" )
# also , tcp _ cork will make the file data sent on packet boundaries ,
# wich is a good thing
if hasattr ( socket , "TCP_CORK" ) :
self . conn . setsockopt ( socket . IPPROTO_TCP , socket . TCP_CORK , 0 )
else :
for chunk in response :
if chunk :
assert self . started_response , "App sended a value but hasn't called start_response."
if not self . sent_headers :
self . sent_headers = True
headers = [ self . render_headers ( ) , self . write_buffer . getvalue ( ) ]
else :
headers = [ ]
if self . chunked_write :
buf = [ hex ( len ( chunk ) ) [ 2 : ] , "\r\n" , chunk , "\r\n" ]
if headers :
headers . extend ( buf )
yield sockets . SendAll ( self . conn , "" . join ( headers ) )
else :
yield sockets . SendAll ( self . conn , "" . join ( buf ) )
else :
if headers :
headers . append ( chunk )
yield sockets . SendAll ( self . conn , "" . join ( headers ) )
else :
yield sockets . SendAll ( self . conn , chunk )
else :
if self . started_response :
if not self . sent_headers :
self . sent_headers = True
yield sockets . SendAll ( self . conn , self . render_headers ( ) + self . write_buffer . getvalue ( ) )
if ENV_COGEN_PROXY . operation :
op = ENV_COGEN_PROXY . operation
ENV_COGEN_PROXY . operation = None
try : # ~ print ' WSGI OP : ' , op
ENV_COGEN_PROXY . exception = None
ENV_COGEN_PROXY . result = yield op
# ~ print ' WSGI OP RESULT : ' , ENVIRON [ ' cogen . wsgi ' ] . result
except : # ~ print ' WSGI OP EXCEPTION : ' , sys . exc _ info ( )
ENV_COGEN_PROXY . exception = sys . exc_info ( )
ENV_COGEN_PROXY . result = ENV_COGEN_PROXY . exception [ 1 ]
del op
finally :
if hasattr ( response , 'close' ) :
response . close ( )
if self . started_response :
if not self . sent_headers :
self . sent_headers = True
yield sockets . SendAll ( self . conn , self . render_headers ( ) + self . write_buffer . getvalue ( ) )
else :
import warnings
warnings . warn ( "App was consumed and hasn't called start_response" )
if self . chunked_write :
yield sockets . SendAll ( self . conn , "0\r\n\r\n" )
if self . close_connection :
return
# TODO : consume any unread data
except ( socket . error , OSError , pywinerror ) , e :
errno = e . args [ 0 ]
if errno not in useless_socket_errors :
yield self . simple_response ( "500 Internal Server Error" , format_exc ( ) )
return
except ( OperationTimeout , ConnectionClosed , SocketError ) :
return
except ( KeyboardInterrupt , SystemExit , GeneratorExit , MemoryError ) :
raise
except :
if not self . started_response :
yield self . simple_response ( "500 Internal Server Error" , format_exc ( ) )
else :
print "*" * 60
traceback . print_exc ( )
print "*" * 60
sys . exc_clear ( )
finally :
self . conn . close ( )
ENVIRON = self . environ = None |
def run ( self ) :
"""Set up the process environment in preparation for running an Ansible
module . This monkey - patches the Ansible libraries in various places to
prevent it from trying to kill the process on completion , and to
prevent it from reading sys . stdin .
: returns :
Module result dictionary .""" | self . setup ( )
if self . detach :
self . econtext . detach ( )
try :
return self . _run ( )
finally :
self . revert ( ) |
def vehicle_registration_code ( self , locale : Optional [ str ] = None ) -> str :
"""Get vehicle registration code of country .
: param locale : Registration code for locale ( country ) .
: return : Vehicle registration code .""" | if locale :
return VRC_BY_LOCALES [ locale ]
return self . random . choice ( VR_CODES ) |
def get_data ( self , href = None ) :
"""Gets data from an insight with data links such as captions .
' href ' the relative href to the data . May not be None .
Returns the content of the data as a string .
If the response status is not 2xx , throws an APIException .""" | # Argument error checking .
assert href is not None
raw_result = self . get ( href )
if raw_result . status < 200 or raw_result . status > 202 :
raise APIException ( raw_result . status , raw_result . json )
return raw_result . json |
def label_peri_signals ( self , time_signals , label_names = [ ] , units = None , data_units = None , copy = True , pre_signal = 100.0 , post_signal = 1000.0 , ** kwargs ) :
"""creates a labeled spike data structure
time _ label _ array is list of lists ( or matrix ) , containing a timestamp in the
first column ( or first element of each element ) and indizes that are to be
applied to the data in the remaining columns / elements .
This function will leave out and duplicate spikes to manage overlapping time signals .
If you want to get spikes relative to a time signal with felxible limits , use ` label _ by _ time ` ,
which will not add or remove spikes , but only shift spikes according to the
adjecent time signals .""" | if self . data_format == 'empty' :
return SpikeContainer ( None , units = self . units , copy_from = self )
time_signals [ 0 ] = convert_time ( time_signals [ 0 ] , from_units = data_units , to_units = units )
spike_times = self . spike_times . convert ( 0 , units ) . matrix . copy ( )
# this is read only
new_matrix = [ ]
for t in range ( len ( time_signals [ 0 ] ) ) :
condition = ( spike_times [ : , 0 ] >= time_signals [ 0 ] [ t ] - pre_signal ) * ( spike_times [ : , 0 ] < time_signals [ 0 ] [ t ] + post_signal )
new_spikes = ( spike_times [ condition , 0 ] - time_signals [ 0 ] [ t ] ) [ : , np . newaxis ]
old_labels = spike_times [ condition , 1 : ]
new_labels = [ [ time_signals [ _i ] [ t ] for _i in range ( 1 , time_signals . shape [ 0 ] ) ] ] * np . sum ( condition )
if np . sum ( condition ) > 0 :
new_matrix . append ( np . concatenate ( [ new_spikes , old_labels , new_labels ] , axis = 1 ) )
if len ( new_matrix ) == 0 :
return SpikeContainer ( None , copy_from = self )
new_matrix = np . concatenate ( new_matrix , 0 )
if copy :
new_spike_times = LabeledMatrix ( new_matrix , self . spike_times . labels + label_names )
new_spike_times . labels [ 0 ] . units = units
new_spike_times . labels [ 0 ] . min = pre_signal
new_spike_times . labels [ 0 ] . max = post_signal
s = SpikeContainer ( new_spike_times , copy_from = self )
return s
else :
new_spike_times = LabeledMatrix ( new_matrix , self . spike_times . labels + label_names )
new_spike_times . labels [ 0 ] . units = units
new_spike_times . labels [ 0 ] . min = pre_signal
new_spike_times . labels [ 0 ] . max = post_signal
self . set_spike_times ( new_spike_times )
return self |
def set_limits ( self , low = None , high = None ) :
"""Adjusts the limits on the rows retrieved . We use low / high to set these ,
as it makes it more Pythonic to read and write . When the API query is
created , they are converted to the appropriate offset and limit values .
Any limits passed in here are applied relative to the existing
constraints . So low is added to the current low value and both will be
clamped to any existing high value .""" | if high is not None :
if self . high_mark is not None :
self . high_mark = min ( self . high_mark , self . low_mark + high )
else :
self . high_mark = self . low_mark + high
if low is not None :
if self . high_mark is not None :
self . low_mark = min ( self . high_mark , self . low_mark + low )
else :
self . low_mark = self . low_mark + low |
def disable_servicegroup_passive_host_checks ( self , servicegroup ) :
"""Disable passive host checks for a servicegroup
Format of the line that triggers function call : :
DISABLE _ SERVICEGROUP _ PASSIVE _ HOST _ CHECKS ; < servicegroup _ name >
: param servicegroup : servicegroup to disable
: type servicegroup : alignak . objects . servicegroup . Servicegroup
: return : None""" | for service_id in servicegroup . get_services ( ) :
if service_id in self . daemon . services :
host_id = self . daemon . services [ service_id ] . host
self . disable_passive_host_checks ( self . daemon . hosts [ host_id ] ) |
def paintEvent ( self , event ) :
"""Paints the widget based on its values
: param event | < QPaintEvent >""" | with XPainter ( self ) as painter :
count = self . maximum ( ) - self . minimum ( )
value = self . value ( )
w = self . pixmapSize ( ) . width ( )
h = self . pixmapSize ( ) . height ( )
x = 2
y = ( self . height ( ) - h ) / 2
delta_x = ( self . width ( ) - 4 - ( w * count - 1 ) ) / ( count - 1 )
full_pixmap = self . fullPixmap ( ) . scaled ( w , h )
empty_pixmap = self . emptyPixmap ( ) . scaled ( w , h )
for i in range ( count ) :
if ( i < value ) :
painter . drawPixmap ( x , y , full_pixmap )
else :
painter . drawPixmap ( x , y , empty_pixmap )
x += w + delta_x |
def instruction_path ( cls , project , instruction ) :
"""Return a fully - qualified instruction string .""" | return google . api_core . path_template . expand ( "projects/{project}/instructions/{instruction}" , project = project , instruction = instruction , ) |
def compare_config ( self , target , init = True , indent_level = 0 ) :
"""This method will return all the necessary commands to get from the config we are in to the target
config .
Args :
* * * target * * ( : class : ` ~ pyFG . forticonfig . FortiConfig ` ) - Target config .
* * * init * * ( bool ) - This tells to the method if this is the first call to the method or if we are inside the recursion . You can ignore this parameter .
* * * indent _ level * * ( int ) - This tells the method how deep you are in the recursion . You can ignore it .
Returns :
A string containing all the necessary commands to reach the target config .""" | if init :
fwd = self . full_path_fwd
bwd = self . full_path_bwd
else :
fwd = self . rel_path_fwd
bwd = self . rel_path_bwd
indent = 4 * indent_level * ' '
if indent_level == 0 and self . vdom is not None :
if self . vdom == 'global' :
pre = 'conf global\n'
else :
pre = 'conf vdom\n edit %s\n' % self . vdom
post = 'end'
else :
pre = ''
post = ''
pre_block = '%s%s' % ( indent , fwd )
post_block = '%s%s' % ( indent , bwd )
my_params = self . parameters . keys ( )
ot_params = target . parameters . keys ( )
text = ''
for param in my_params :
if param not in ot_params :
text += ' %sunset %s\n' % ( indent , param )
else : # We ignore quotes when comparing values
if str ( self . get_param ( param ) ) . replace ( '"' , '' ) != str ( target . get_param ( param ) ) . replace ( '"' , '' ) :
text += ' %sset %s %s\n' % ( indent , param , target . get_param ( param ) )
for param in ot_params :
if param not in my_params :
text += ' %sset %s %s\n' % ( indent , param , target . get_param ( param ) )
my_blocks = self . sub_blocks . keys ( )
ot_blocks = target . sub_blocks . keys ( )
for block_name in my_blocks :
if block_name not in ot_blocks :
text += " %sdelete %s\n" % ( indent , block_name )
else :
text += self [ block_name ] . compare_config ( target [ block_name ] , False , indent_level + 1 )
for block_name in ot_blocks :
if block_name not in my_blocks :
text += target [ block_name ] . to_text ( True , indent_level + 1 , True )
if text == '' :
return ''
else :
return '%s%s%s%s%s' % ( pre , pre_block , text , post_block , post ) |
def draw ( self ) :
"""Draws the Text in the window .""" | if not self . visible :
return
# If this input text has focus , draw an outline around the text image
if self . focus :
pygame . draw . rect ( self . window , self . focusColor , self . focusedImageRect , 1 )
# Blit in the image of text ( set earlier in _ updateImage )
self . window . blit ( self . textImage , self . loc )
# If this field has focus , see if it is time to blink the cursor
if self . focus :
self . cursorMsCounter = self . cursorMsCounter + self . clock . get_time ( )
if self . cursorMsCounter >= self . cursorSwitchMs :
self . cursorMsCounter = self . cursorMsCounter % self . cursorSwitchMs
self . cursorVisible = not self . cursorVisible
if self . cursorVisible :
cursorOffset = self . font . size ( self . text [ : self . cursorPosition ] ) [ 0 ]
if self . cursorPosition > 0 : # Try to get between characters
cursorOffset = cursorOffset - 1
if cursorOffset < self . width : # if the loc is within the text area , draw it
self . cursorLoc [ 0 ] = self . loc [ 0 ] + cursorOffset
self . window . blit ( self . cursorSurface , self . cursorLoc )
self . clock . tick ( ) |
def get_bit_series ( self , bits = None ) :
"""Get the ` StateTimeSeries ` for each bit of this ` StateVector ` .
Parameters
bits : ` list ` , optional
a list of bit indices or bit names , defaults to all bits
Returns
bitseries : ` StateTimeSeriesDict `
a ` dict ` of ` StateTimeSeries ` , one for each given bit""" | if bits is None :
bits = [ b for b in self . bits if b not in { None , '' } ]
bindex = [ ]
for bit in bits :
try :
bindex . append ( ( self . bits . index ( bit ) , bit ) )
except ( IndexError , ValueError ) as exc :
exc . args = ( 'Bit %r not found in StateVector' % bit , )
raise
self . _bitseries = StateTimeSeriesDict ( )
for i , bit in bindex :
self . _bitseries [ bit ] = StateTimeSeries ( self . value >> i & 1 , name = bit , epoch = self . x0 . value , channel = self . channel , sample_rate = self . sample_rate )
return self . _bitseries |
def mutate ( self , p_mutate ) :
"""Check each element for mutation , swapping " 0 " for " 1 " and vice - versa .""" | new_dna = [ ]
for bit in self . dna :
if random . random ( ) < p_mutate :
bit = '1' if bit == '0' else '0'
new_dna . append ( bit )
self . dna = '' . join ( new_dna ) |
def all_files ( file_or_directory ) :
'return all files under file _ or _ directory .' | if os . path . isdir ( file_or_directory ) :
return [ os . path . join ( dirname , filename ) for dirname , dirnames , filenames in os . walk ( file_or_directory ) for filename in filenames ]
else :
return [ file_or_directory ] |
def main ( ) :
"""main .""" | parser = create_parser ( )
args = parser . parse_args ( )
if hasattr ( args , 'handler' ) :
args . handler ( args )
else :
parser . print_help ( ) |
def cp ( src , dst ) :
"""Copy a file or directory .
If source is a directory , this recursively copies the directory
and its contents . If the destination is a directory , then this
creates a copy of the source in the destination directory with the
same basename .
If the destination already exists , this will attempt to overwrite
it .
Arguments :
src ( string ) : path to the source file or directory .
dst ( string ) : path to the destination file or directory .
Raises :
IOError : if source does not exist .""" | if isdir ( src ) : # Overwrite an existing directory .
if isdir ( dst ) :
rm ( dst )
shutil . copytree ( src , dst )
elif isfile ( src ) :
shutil . copy ( src , dst )
else :
raise IOError ( "Source '{0}' not found" . format ( src ) ) |
def service_changed ( self , event ) :
"""Called by Pelix when an events changes""" | kind = event . get_kind ( )
reference = event . get_service_reference ( )
if kind in ( pelix . ServiceEvent . REGISTERED , pelix . ServiceEvent . MODIFIED ) : # A service matches our filter
self . set_shell ( reference )
else :
with self . _lock : # Service is not matching our filter anymore
self . clear_shell ( )
# Request for a new binding
self . search_shell ( ) |
def loss ( logits , labels ) :
"""Calculates the loss from the logits and the labels .
Args :
logits : Logits tensor , float - [ batch _ size , NUM _ CLASSES ] .
labels : Labels tensor , int32 - [ batch _ size ] .
Returns :
loss : Loss tensor of type float .""" | labels = tf . to_int64 ( labels )
cross_entropy = tf . nn . sparse_softmax_cross_entropy_with_logits ( logits = logits , labels = labels , name = 'xentropy' )
return tf . reduce_mean ( cross_entropy , name = 'xentropy_mean' ) |
def calc_qjoints_v1 ( self ) :
"""Apply the routing equation .
Required derived parameters :
| NmbSegments |
| C1 |
| C2 |
| C3 |
Updated state sequence :
| QJoints |
Basic equation :
: math : ` Q _ { space + 1 , time + 1 } =
c1 \\ cdot Q _ { space , time + 1 } +
c2 \\ cdot Q _ { space , time } +
c3 \\ cdot Q _ { space + 1 , time } `
Examples :
Firstly , define a reach divided into four segments :
> > > from hydpy . models . hstream import *
> > > parameterstep ( ' 1d ' )
> > > derived . nmbsegments ( 4)
> > > states . qjoints . shape = 5
Zero damping is achieved through the following coefficients :
> > > derived . c1(0.0)
> > > derived . c2(1.0)
> > > derived . c3(0.0)
For initialization , assume a base flow of 2m3 / s :
> > > states . qjoints . old = 2.0
> > > states . qjoints . new = 2.0
Through successive assignements of different discharge values
to the upper junction one can see that these discharge values
are simply shifted from each junction to the respective lower
junction at each time step :
> > > states . qjoints [ 0 ] = 5.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 5.0 , 2.0 , 2.0 , 2.0 , 2.0)
> > > states . qjoints [ 0 ] = 8.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 8.0 , 5.0 , 2.0 , 2.0 , 2.0)
> > > states . qjoints [ 0 ] = 6.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 6.0 , 8.0 , 5.0 , 2.0 , 2.0)
With the maximum damping allowed , the values of the derived
parameters are :
> > > derived . c1(0.5)
> > > derived . c2(0.0)
> > > derived . c3(0.5)
Assuming again a base flow of 2m3 / s and the same input values
results in :
> > > states . qjoints . old = 2.0
> > > states . qjoints . new = 2.0
> > > states . qjoints [ 0 ] = 5.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 5.0 , 3.5 , 2.75 , 2.375 , 2.1875)
> > > states . qjoints [ 0 ] = 8.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 8.0 , 5.75 , 4.25 , 3.3125 , 2.75)
> > > states . qjoints [ 0 ] = 6.0
> > > model . calc _ qjoints _ v1 ( )
> > > model . new2old ( )
> > > states . qjoints
qjoints ( 6.0 , 5.875 , 5.0625 , 4.1875 , 3.46875)""" | der = self . parameters . derived . fastaccess
new = self . sequences . states . fastaccess_new
old = self . sequences . states . fastaccess_old
for j in range ( der . nmbsegments ) :
new . qjoints [ j + 1 ] = ( der . c1 * new . qjoints [ j ] + der . c2 * old . qjoints [ j ] + der . c3 * old . qjoints [ j + 1 ] ) |
def finalize_sv ( orig_vcf , data , items ) :
"""Finalize structural variants , adding effects and splitting if needed .""" | paired = vcfutils . get_paired ( items )
# For paired / somatic , attach combined calls to tumor sample
if paired :
sample_vcf = orig_vcf if paired . tumor_name == dd . get_sample_name ( data ) else None
else :
sample_vcf = "%s-%s.vcf.gz" % ( utils . splitext_plus ( orig_vcf ) [ 0 ] , dd . get_sample_name ( data ) )
sample_vcf = vcfutils . select_sample ( orig_vcf , dd . get_sample_name ( data ) , sample_vcf , data [ "config" ] )
if sample_vcf :
effects_vcf , _ = effects . add_to_vcf ( sample_vcf , data , "snpeff" )
else :
effects_vcf = None
return effects_vcf or sample_vcf |
def find_data_files ( self , package , src_dir ) :
"""Return filenames for package ' s data files in ' src _ dir '""" | patterns = self . _get_platform_patterns ( self . package_data , package , src_dir , )
globs_expanded = map ( glob , patterns )
# flatten the expanded globs into an iterable of matches
globs_matches = itertools . chain . from_iterable ( globs_expanded )
glob_files = filter ( os . path . isfile , globs_matches )
files = itertools . chain ( self . manifest_files . get ( package , [ ] ) , glob_files , )
return self . exclude_data_files ( package , src_dir , files ) |
def _toggle_monitoring ( self , action , no_ssh = False ) :
"""Enable or disable monitoring on a machine
: param action : Can be either " enable " or " disable " """ | payload = { 'action' : action , 'name' : self . name , 'no_ssh' : no_ssh , 'public_ips' : self . info [ 'public_ips' ] , 'dns_name' : self . info [ 'extra' ] . get ( 'dns_name' , 'n/a' ) }
data = json . dumps ( payload )
req = self . request ( self . mist_client . uri + "/clouds/" + self . cloud . id + "/machines/" + self . id + "/monitoring" , data = data )
req . post ( ) |
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) :
"""See : meth : ` superclass method
< . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > `
for spec of input and result values .""" | assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types )
C = self . COEFFS [ imt ]
mean = ( self . _get_magnitude_scaling ( C , rup . mag ) + self . _get_distance_scaling ( C , rup . mag , dists . rhypo ) )
if imt . name in "SA PGA" :
mean = np . log ( np . exp ( mean ) / ( 100.0 * g ) )
stddevs = self . _compute_std ( C , stddev_types , len ( dists . rhypo ) )
return mean , stddevs |
def com ( self , center1_x , center1_y , center2_x , center2_y , Fm ) :
""": return : center of mass""" | com_x = ( Fm * center1_x + center2_x ) / ( Fm + 1. )
com_y = ( Fm * center1_y + center2_y ) / ( Fm + 1. )
return com_x , com_y |
def update_comment ( self , comment_id , body ) :
"""Update a specific comment . This can be used to edit the content of an
existing comment .""" | path = '/msg/update_comment'
req = ET . Element ( 'request' )
ET . SubElement ( req , 'comment_id' ) . text = str ( int ( comment_id ) )
comment = ET . SubElement ( req , 'comment' )
ET . SubElement ( comment , 'body' ) . text = str ( body )
return self . _request ( path , req ) |
def get_activation_key ( self , user ) :
"""Generate the activation key which will be emailed to the user .""" | return signing . dumps ( obj = user . get_username ( ) , salt = REGISTRATION_SALT ) |
def fix_location_tag ( dom ) :
"""Repair the < mods : location > tag ( the XSLT template returns things related to
paper books , not electronic documents ) .""" | location = dom . match ( "mods:mods" , "mods:location" , )
# if no location tag found , there is nothing to be fixed
if not location :
return
location = first ( location )
# fix only < mods : location > containing < mods : physicalLocation > tags
if not location . find ( "mods:physicalLocation" ) :
return
url = location . find ( "mods:url" , { "usage" : "primary display" } )
# parse URL
if url :
url = first ( url ) . getContent ( )
else :
urls = filter ( lambda x : x . getContent ( ) , location . find ( "mods:url" ) )
if not urls :
return
url_tag = max ( urls , key = lambda x : len ( x . getContent ( ) ) )
url = url_tag . getContent ( )
# replace the code with new tag
replacer = dhtmlparser . parseString ( """
<mods:location>
<mods:holdingSimple>
<mods:copyInformation>
<mods:electronicLocator>""" + url + """</mods:electronicLocator>
</mods:copyInformation>
</mods:holdingSimple>
</mods:location>
""" )
location . replaceWith ( first ( replacer . find ( "mods:location" ) ) )
dhtmlparser . makeDoubleLinked ( dom ) |
def valid ( name , maxlength = None ) :
'''Return the lowercase name if this name adheres to requirements , None
otherwise .
The requirements are :
* only alphanumeric characters or dashes
* no number at the start
* no double dashes or dashes at the start or end of the name
* no empty string
* no string bigger than maxlength''' | if not name :
return None
if maxlength is not None and len ( name ) > maxlength :
return None
name = name . lower ( )
if name . endswith ( '-' ) or name . startswith ( '-' ) or '--' in name :
return None
if name [ 0 ] in string . digits :
return None
for c in name :
if c not in string . digits + string . ascii_lowercase + '-' :
return None
return name |
def dict_intersection ( dict1 , dict2 , combine = False , combine_op = op . add ) :
r"""Args :
dict1 ( dict ) :
dict2 ( dict ) :
combine ( bool ) : Combines keys only if the values are equal if False else
values are combined using combine _ op ( default = False )
combine _ op ( func ) : ( default = op . add )
Returns :
dict : mergedict _
CommandLine :
python - m utool . util _ dict - - exec - dict _ intersection
Example :
> > > # ENABLE _ DOCTEST
> > > from utool . util _ dict import * # NOQA
> > > import utool as ut
> > > dict1 = { ' a ' : 1 , ' b ' : 2 , ' c ' : 3 , ' d ' : 4}
> > > dict2 = { ' b ' : 2 , ' c ' : 3 , ' d ' : 5 , ' e ' : 21 , ' f ' : 42}
> > > combine = False
> > > mergedict _ = dict _ intersection ( dict1 , dict2 , combine )
> > > result = ( ' mergedict _ = % s ' % ( ut . repr4 ( mergedict _ , nl = False ) , ) )
> > > print ( result )
mergedict _ = { ' b ' : 2 , ' c ' : 3}""" | isect_keys = set ( dict1 . keys ( ) ) . intersection ( set ( dict2 . keys ( ) ) )
if combine : # TODO : depricate this
dict_isect = { k : combine_op ( dict1 [ k ] , dict2 [ k ] ) for k in isect_keys }
else : # maintain order if possible
if isinstance ( dict1 , OrderedDict ) :
isect_keys_ = [ k for k in dict1 . keys ( ) if k in isect_keys ]
_dict_cls = OrderedDict
else :
isect_keys_ = isect_keys
_dict_cls = dict
dict_isect = _dict_cls ( ( k , dict1 [ k ] ) for k in isect_keys_ if dict1 [ k ] == dict2 [ k ] )
return dict_isect |
def get_touch_dict ( self , ind = None , out = bool ) :
"""Get a dictionnary of Cls _ Name struct with indices of Rays touching
Only includes Struct object with compute = True
( as returned by self . lStruct _ _ computeInOut _ computeInOut )
Also return the associated colors
If in is not None , the indices for each Struct are split between :
- indok : rays touching Struct and in ind
- indout : rays touching Struct but not in ind""" | if self . config is None :
msg = "Config must be set in order to get touch dict !"
raise Exception ( msg )
dElt = { }
ind = self . _check_indch ( ind , out = bool )
for ss in self . lStruct_computeInOut :
kn = "%s_%s" % ( ss . __class__ . __name__ , ss . Id . Name )
indtouch = self . select ( touch = kn , out = bool )
if np . any ( indtouch ) :
indok = indtouch & ind
indout = indtouch & ~ ind
if np . any ( indok ) or np . any ( indout ) :
if out == int :
indok = indok . nonzero ( ) [ 0 ]
indout = indout . nonzero ( ) [ 0 ]
dElt [ kn ] = { 'indok' : indok , 'indout' : indout , 'col' : ss . get_color ( ) }
return dElt |
def build_damage_array ( data , damage_dt ) :
""": param data : an array of shape ( A , L , 1 , D ) or ( A , L , 2 , D )
: param damage _ dt : a damage composite data type loss _ type - > states
: returns : a composite array of length N and dtype damage _ dt""" | A , L , MS , D = data . shape
dmg = numpy . zeros ( A , damage_dt )
for a in range ( A ) :
for l , lt in enumerate ( damage_dt . names ) :
std = any ( f for f in damage_dt [ lt ] . names if f . endswith ( '_stdv' ) )
if MS == 1 or not std : # there is only the mean value
dmg [ lt ] [ a ] = tuple ( data [ a , l , 0 ] )
else : # there are both mean and stddev
# data [ a , l ] . T has shape ( D , 2)
dmg [ lt ] [ a ] = tuple ( numpy . concatenate ( data [ a , l ] . T ) )
return dmg |
def warn ( message , category = None , stacklevel = 1 , emitstacklevel = 1 ) :
"""Issue a warning , or maybe ignore it or raise an exception .
Duplicate of the standard library warn function except it takes the
following argument :
` emitstacklevel ` : default to 1 , number of stackframe to consider when
matching the module that emits this warning .""" | # Check if message is already a Warning object
# # # Get category # # #
if isinstance ( message , Warning ) :
category = message . __class__
# Check category argument
if category is None :
category = UserWarning
if not ( isinstance ( category , type ) and issubclass ( category , Warning ) ) :
raise TypeError ( "category must be a Warning subclass, " "not '{:s}'" . format ( type ( category ) . __name__ ) )
# Get context information
try :
frame = _get_stack_frame ( stacklevel )
except ValueError :
globals = sys . __dict__
lineno = 1
else :
globals = frame . f_globals
lineno = frame . f_lineno
try :
eframe = _get_stack_frame ( emitstacklevel )
except ValueError :
eglobals = sys . __dict__
else :
eglobals = eframe . f_globals
if '__name__' in eglobals :
emodule = eglobals [ '__name__' ]
else :
emodule = "<string>"
# # # Get Filename # # #
if '__name__' in globals :
module = globals [ '__name__' ]
else :
module = "<string>"
# # # Get Filename # # #
filename = globals . get ( '__file__' )
if filename :
fnl = filename . lower ( )
if fnl . endswith ( ".pyc" ) :
filename = filename [ : - 1 ]
else :
if module == "__main__" :
try :
filename = sys . argv [ 0 ]
except AttributeError : # embedded interpreters don ' t have sys . argv , see bug # 839151
filename = '__main__'
if not filename :
filename = module
registry = globals . setdefault ( "__warningregistry__" , { } )
warn_explicit ( message , category , filename , lineno , module , registry , globals , emit_module = emodule ) |
def find_file ( path , saltenv = 'base' , ** kwargs ) :
'''Search the environment for the relative path''' | fnd = { 'path' : '' , 'rel' : '' }
for container in __opts__ . get ( 'azurefs' , [ ] ) :
if container . get ( 'saltenv' , 'base' ) != saltenv :
continue
full = os . path . join ( _get_container_path ( container ) , path )
if os . path . isfile ( full ) and not salt . fileserver . is_file_ignored ( __opts__ , path ) :
fnd [ 'path' ] = full
fnd [ 'rel' ] = path
try : # Converting the stat result to a list , the elements of the
# list correspond to the following stat _ result params :
# 0 = > st _ mode = 33188
# 1 = > st _ ino = 10227377
# 2 = > st _ dev = 65026
# 3 = > st _ nlink = 1
# 4 = > st _ uid = 1000
# 5 = > st _ gid = 1000
# 6 = > st _ size = 1056233
# 7 = > st _ atime = 1468284229
# 8 = > st _ mtime = 1456338235
# 9 = > st _ ctime = 1456338235
fnd [ 'stat' ] = list ( os . stat ( full ) )
except Exception :
pass
return fnd
return fnd |
def execute_request ( self , url , http_method , query_params , post_data ) :
"""Makes a request to the specified url endpoint with the
specified http method , params and post data .
Args :
url ( string ) : The url to the API without query params .
Example : " https : / / api . housecanary . com / v2 / property / value "
http _ method ( string ) : The http method to use for the request .
query _ params ( dict ) : Dictionary of query params to add to the request .
post _ data : Json post data to send in the body of the request .
Returns :
The result of calling this instance ' s OutputGenerator process _ response method
on the requests . Response object .
If no OutputGenerator is specified for this instance , returns the requests . Response .""" | response = requests . request ( http_method , url , params = query_params , auth = self . _auth , json = post_data , headers = { 'User-Agent' : USER_AGENT } )
if isinstance ( self . _output_generator , str ) and self . _output_generator . lower ( ) == "json" : # shortcut for just getting json back
return response . json ( )
elif self . _output_generator is not None :
return self . _output_generator . process_response ( response )
else :
return response |
def upload_to_cache_server ( fpath ) :
"""Uploads . torrent file to a cache server .
Returns upload file URL .
: rtype : str""" | url_base = 'http://torrage.info'
url_upload = '%s/autoupload.php' % url_base
url_download = '%s/torrent.php?h=' % url_base
file_field = 'torrent'
try :
import requests
response = requests . post ( url_upload , files = { file_field : open ( fpath , 'rb' ) } , timeout = REMOTE_TIMEOUT )
response . raise_for_status ( )
info_cache = response . text
return url_download + info_cache
except ( ImportError , requests . RequestException ) as e : # Now trace is lost . ` raise from ` to consider .
raise RemoteUploadError ( 'Unable to upload to %s: %s' % ( url_upload , e ) ) |
def clear_option_value ( self , opt_name ) :
"""Clear the stored option value ( so the default will be used )
: param opt _ name : option name
: type opt _ name : str""" | if not self . has_option ( opt_name ) :
raise ValueError ( "Unknow option name (%s)" % opt_name )
self . _options [ opt_name ] . clear ( ) |
def fromcols ( selection , n_sessions , eqdata , ** kwargs ) :
"""Generate features from selected columns of a dataframe .
Parameters
selection : list or tuple of str
Columns to be used as features .
n _ sessions : int
Number of sessions over which to create features .
eqdata : DataFrame
Data from which to generate feature set . Must contain
as columns the values from which the features are to
be generated .
constfeat : bool , optional
Whether or not the returned features will have the constant
feature .
Returns
features : DataFrame""" | _constfeat = kwargs . get ( 'constfeat' , True )
_outcols = [ 'Constant' ] if _constfeat else [ ]
_n_rows = len ( eqdata . index )
for _col in selection :
_outcols += map ( partial ( _concat , strval = ' ' + _col ) , range ( - n_sessions + 1 , 1 ) )
_features = pd . DataFrame ( index = eqdata . index [ n_sessions - 1 : ] , columns = _outcols , dtype = np . float64 )
_offset = 0
if _constfeat :
_features . iloc [ : , 0 ] = 1.
_offset += 1
for _col in selection :
_values = eqdata . loc [ : , _col ] . values
for i in range ( n_sessions ) :
_features . iloc [ : , _offset + i ] = _values [ i : _n_rows - n_sessions + i + 1 ]
_offset += n_sessions
return _features |
def collect_plugins ( self , modules = None ) :
"""Collects all the plugins from ` modules ` .
If modules is None , collects the plugins from the loaded modules .
All plugins are passed through the module filters , if any are any ,
and returned as a list .""" | if modules is None :
modules = self . get_loaded_modules ( )
else :
modules = util . return_list ( modules )
plugins = [ ]
for module in modules :
module_plugins = [ ( item [ 1 ] , item [ 0 ] ) for item in inspect . getmembers ( module ) if item [ 1 ] and item [ 0 ] != '__builtins__' ]
module_plugins , names = zip ( * module_plugins )
module_plugins = self . _filter_modules ( module_plugins , names )
plugins . extend ( module_plugins )
return plugins |
def set_ssl_logging ( self , enable = False , func = _ssl_logging_cb ) :
u'''Enable or disable SSL logging
: param True | False enable : Enable or disable SSL logging
: param func : Callback function for logging''' | if enable :
SSL_CTX_set_info_callback ( self . _ctx , func )
else :
SSL_CTX_set_info_callback ( self . _ctx , 0 ) |
def overlay_gateway_access_lists_ipv6_out_ipv6_acl_out_name ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
overlay_gateway = ET . SubElement ( config , "overlay-gateway" , xmlns = "urn:brocade.com:mgmt:brocade-tunnels" )
name_key = ET . SubElement ( overlay_gateway , "name" )
name_key . text = kwargs . pop ( 'name' )
access_lists = ET . SubElement ( overlay_gateway , "access-lists" )
ipv6 = ET . SubElement ( access_lists , "ipv6" )
out = ET . SubElement ( ipv6 , "out" )
ipv6_acl_out_name = ET . SubElement ( out , "ipv6-acl-out-name" )
ipv6_acl_out_name . text = kwargs . pop ( 'ipv6_acl_out_name' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def lookup_hash_prefix ( self , cues ) :
"""Lookup hash prefixes by cue ( first 4 bytes of hash )
Returns a tuple of ( value , negative _ cache _ expired ) .""" | q = '''SELECT value, MAX(negative_expires_at < current_timestamp) AS negative_cache_expired
FROM hash_prefix WHERE cue IN ({}) GROUP BY 1
'''
output = [ ]
with self . get_cursor ( ) as dbc :
dbc . execute ( q . format ( ',' . join ( [ '?' ] * len ( cues ) ) ) , [ sqlite3 . Binary ( cue ) for cue in cues ] )
for h in dbc . fetchall ( ) :
value , negative_cache_expired = h
output . append ( ( bytes ( value ) , negative_cache_expired ) )
return output |
def required_attributes ( element , * attributes ) :
"""Check element for required attributes . Raise ` ` NotValidXmlException ` ` on error .
: param element : ElementTree element
: param attributes : list of attributes names to check
: raises NotValidXmlException : if some argument is missing""" | if not reduce ( lambda still_valid , param : still_valid and param in element . attrib , attributes , True ) :
raise NotValidXmlException ( msg_err_missing_attributes ( element . tag , * attributes ) ) |
def search ( self ) :
"""Search for a url by returning the value from the first callback that
returns a non - None value""" | for cb in SearchUrl . search_callbacks :
try :
v = cb ( self )
if v is not None :
return v
except Exception as e :
raise |
def create_contact ( self , * args , ** kwargs ) :
"""Creates a contact""" | url = 'contacts'
data = { 'view_all_tickets' : False , 'description' : 'Freshdesk Contact' }
data . update ( kwargs )
return Contact ( ** self . _api . _post ( url , data = json . dumps ( data ) ) ) |
def validate ( self , payload , required = None , strict = None ) :
'''Validates a given JSON payload according to the rules defiined for all
the fields / keys in the sub - class .
: param dict payload : deserialized JSON object .
: param bool required : if every field / key is required and must be
present in the payload .
: param bool strict : if : py : meth : ` validate ` should detect and report any
fields / keys that are present in the payload but not
defined in the sub - class .
: returns : a tuple of two items . First item is a : class : ` bool `
indicating if the payload was successfully validated and the
second item is ` ` None ` ` . If the payload was not valid , then
then the second item is a : py : class : ` dict ` of errors .''' | # replace datatypes . Function . func if not already replaced
self . _replace_string_args ( )
required = required if required is not None else self . required
strict = strict if strict is not None else self . strict
errors = PayloadErrors ( )
fields = copy . deepcopy ( list ( self . _fields ) )
for key , value in iteritems ( payload ) :
if key not in self . _fields :
if strict :
errors [ key ] . append ( self . strict_error )
else :
getattr ( self , key ) . test ( key , value , payload = payload , errors = errors [ key ] )
# Remove the key that has been checked
fields . remove ( key )
for field in fields :
rule = getattr ( self , field )
if rule . required is None :
required = required
else :
required = rule . required
if required :
errors [ field ] . append ( self . required_error )
elif isinstance ( rule , Function ) :
rule . test ( field , payload . get ( field , None ) , payload = payload , errors = errors [ field ] )
return ( False , errors . to_dict ( ) ) if errors . has_errors ( ) else ( True , None ) |
def similarity ( w1 , w2 , threshold = 0.5 ) :
"""compare two strings ' words ' , and
return ratio of smiliarity , be it larger than the threshold ,
or 0 otherwise .
NOTE : if the result more like junk , increase the threshold value .""" | ratio = SM ( None , str ( w1 ) . lower ( ) , str ( w2 ) . lower ( ) ) . ratio ( )
return ratio if ratio > threshold else 0 |
def poll ( self , force_rescan = False ) :
"""A generator producing ( path , line ) tuples with lines seen since the last time poll ( ) was called . Will not block . Checks for new / deleted / rotated files every ` interval ` seconds , but will check every time if ` force _ rescan ` is True . ( default False )""" | # Check for new , deleted , and rotated files .
if force_rescan or time . time ( ) > self . _last_scan + self . _interval :
self . _rescan ( skip_to_end = False )
self . _last_scan = time . time ( )
filereaders = { }
for path , tailedfile in self . _tailedfiles . iteritems ( ) :
filereaders [ path ] = tailedfile . readlines ( )
# One line is read from each file in turn , in an attempt to read
# from all files evenly . They ' ll be in an undefined order because
# of using a dict for filereaders , but that ' s not a problem
# because some entropy here is desirable for evenness .
while len ( filereaders ) > 0 :
for path in filereaders . keys ( ) :
lines = filereaders [ path ]
try :
line , offset = lines . next ( )
except StopIteration : # Reached end the of this file .
del filereaders [ path ]
break
yield ( path , offset ) , line |
def collect ( self ) :
"""Yields metrics from the collectors in the registry .""" | collectors = None
with self . _lock :
collectors = copy . copy ( self . _collector_to_names )
for collector in collectors :
for metric in collector . collect ( ) :
yield metric |
def _find_image_bounding_boxes ( filenames , image_to_bboxes ) :
"""Find the bounding boxes for a given image file .
Args :
filenames : list of strings ; each string is a path to an image file .
image _ to _ bboxes : dictionary mapping image file names to a list of
bounding boxes . This list contains 0 + bounding boxes .
Returns :
List of bounding boxes for each image . Note that each entry in this
list might contain from 0 + entries corresponding to the number of bounding
box annotations for the image .""" | num_image_bbox = 0
bboxes = [ ]
for f in filenames :
basename = os . path . basename ( f )
if basename in image_to_bboxes :
bboxes . append ( image_to_bboxes [ basename ] )
num_image_bbox += 1
else :
bboxes . append ( [ ] )
print ( 'Found %d images with bboxes out of %d images' % ( num_image_bbox , len ( filenames ) ) )
return bboxes |
async def parse_get_cred_def_response ( get_cred_def_response : str ) -> ( str , str ) :
"""Parse a GET _ CRED _ DEF response to get Credential Definition in the format compatible with Anoncreds API .
: param get _ cred _ def _ response : response of GET _ CRED _ DEF request .
: return : Credential Definition Id and Credential Definition json .
id : string - identifier of credential definition
schemaId : string - identifier of stored in ledger schema
type : string - type of the credential definition . CL is the only supported type now .
tag : string - allows to distinct between credential definitions for the same issuer and schema
value : Dictionary with Credential Definition ' s data : {
primary : primary credential public key ,
Optional < revocation > : revocation credential public key
ver : Version of the Credential Definition json""" | logger = logging . getLogger ( __name__ )
logger . debug ( "parse_get_cred_def_response: >>> get_cred_def_response: %r" , get_cred_def_response )
if not hasattr ( parse_get_cred_def_response , "cb" ) :
logger . debug ( "parse_get_cred_def_response: Creating callback" )
parse_get_cred_def_response . cb = create_cb ( CFUNCTYPE ( None , c_int32 , c_int32 , c_char_p , c_char_p ) )
c_get_cred_def_response = c_char_p ( get_cred_def_response . encode ( 'utf-8' ) )
( cred_def_id , cred_def_json ) = await do_call ( 'indy_parse_get_cred_def_response' , c_get_cred_def_response , parse_get_cred_def_response . cb )
res = ( cred_def_id . decode ( ) , cred_def_json . decode ( ) )
logger . debug ( "parse_get_cred_def_response: <<< res: %r" , res )
return res |
def logsumexp ( tensor : torch . Tensor , dim : int = - 1 , keepdim : bool = False ) -> torch . Tensor :
"""A numerically stable computation of logsumexp . This is mathematically equivalent to
` tensor . exp ( ) . sum ( dim , keep = keepdim ) . log ( ) ` . This function is typically used for summing log
probabilities .
Parameters
tensor : torch . FloatTensor , required .
A tensor of arbitrary size .
dim : int , optional ( default = - 1)
The dimension of the tensor to apply the logsumexp to .
keepdim : bool , optional ( default = False )
Whether to retain a dimension of size one at the dimension we reduce over .""" | max_score , _ = tensor . max ( dim , keepdim = keepdim )
if keepdim :
stable_vec = tensor - max_score
else :
stable_vec = tensor - max_score . unsqueeze ( dim )
return max_score + ( stable_vec . exp ( ) . sum ( dim , keepdim = keepdim ) ) . log ( ) |
def create_from_row ( cls , table_row ) :
"""Build and return a ` FileHandle ` from an ` astropy . table . row . Row `""" | kwargs = { }
for key in table_row . colnames :
kwargs [ key ] = table_row [ key ]
try :
return cls ( ** kwargs )
except KeyError :
print ( kwargs ) |
def _pull_player_data ( self ) :
"""Pull and aggregate all player information .
Pull the player ' s HTML stats page and parse unique properties , such as
the player ' s height , weight , and name . Next , combine all stats for all
seasons plus the player ' s career stats into a single object which can
easily be iterated upon .
Returns
dictionary
Returns a dictionary of the player ' s combined stats where each key
is a string of the season and the value is the season ' s associated
stats .""" | player_info = self . _retrieve_html_page ( )
if not player_info :
return
self . _parse_player_information ( player_info )
all_stats = self . _combine_all_stats ( player_info )
setattr ( self , '_season' , list ( all_stats . keys ( ) ) )
return all_stats |
def on ( self , method , path = None , headers = None , text = None , json = None ) :
'''Sends response to matching parameters one time and removes it from list of expectations
: type method : str
: param method : request method : ` ` ' GET ' ` ` , ` ` ' POST ' ` ` , etc . can be some custom string
: type path : str
: param path : request path including query parameters
: type headers : dict
: param headers : dictionary of headers to expect . If omitted any headers will do
: type text : str
: param text : request text to expect . If ommited any text will match
: type json : dict
: param json : request json to expect . If ommited any json will match ,
if present text param will be ignored
: rtype : Rule
: returns : newly created expectation rule''' | rule = Rule ( method , path , headers , text , json )
return self . _add_rule_to ( rule , self . _rules ) |
async def input ( dev : Device , input , output ) :
"""Get and change outputs .""" | inputs = await dev . get_inputs ( )
if input :
click . echo ( "Activating %s" % input )
try :
input = next ( ( x for x in inputs if x . title == input ) )
except StopIteration :
click . echo ( "Unable to find input %s" % input )
return
zone = None
if output :
zone = await dev . get_zone ( output )
if zone . uri not in input . outputs :
click . echo ( "Input %s not valid for zone %s" % ( input . title , output ) )
return
await input . activate ( zone )
else :
click . echo ( "Inputs:" )
for input in inputs :
act = False
if input . active :
act = True
click . echo ( " * " + click . style ( str ( input ) , bold = act ) )
for out in input . outputs :
click . echo ( " - %s" % out ) |
def move_entry ( self , entry = None , group = None ) :
"""Move an entry to another group .
A v1Group group and a v1Entry entry are needed .""" | if entry is None or group is None or type ( entry ) is not v1Entry or type ( group ) is not v1Group :
raise KPError ( "Need an entry and a group." )
elif entry not in self . entries :
raise KPError ( "No entry found." )
elif group in self . groups :
entry . group . entries . remove ( entry )
group . entries . append ( entry )
entry . group_id = group . id_
entry . group = group
return True
else :
raise KPError ( "No group found." ) |
def resource_from_etree ( self , etree , resource_class ) :
"""Construct a Resource from an etree .
Parameters :
etree - the etree to parse
resource _ class - class of Resource object to create
The parsing is properly namespace aware but we search just
for the elements wanted and leave everything else alone . Will
raise an error if there are multiple < loc > or multiple < lastmod >
elements . Otherwise , provided there is a < loc > element then will
go ahead and extract as much as possible .
All errors raised are SitemapParseError with messages intended
to help debug problematic sitemap XML .""" | loc_elements = etree . findall ( '{' + SITEMAP_NS + "}loc" )
if ( len ( loc_elements ) > 1 ) :
raise SitemapParseError ( "Multiple <loc> elements while parsing <url> in sitemap" )
elif ( len ( loc_elements ) == 0 ) :
raise SitemapParseError ( "Missing <loc> element while parsing <url> in sitemap" )
else :
loc = loc_elements [ 0 ] . text
if ( loc is None or loc == '' ) :
raise SitemapParseError ( "Bad <loc> element with no content while parsing <url> in sitemap" )
# must at least have a URI , make this object
resource = resource_class ( uri = loc )
# and hopefully a lastmod datetime ( but none is OK )
lastmod_elements = etree . findall ( '{' + SITEMAP_NS + "}lastmod" )
if ( len ( lastmod_elements ) > 1 ) :
raise SitemapParseError ( "Multiple <lastmod> elements while parsing <url> in sitemap" )
elif ( len ( lastmod_elements ) == 1 ) :
resource . lastmod = lastmod_elements [ 0 ] . text
# proceed to look for other resource attributes in an rs : md element
md_elements = etree . findall ( '{' + RS_NS + "}md" )
if ( len ( md_elements ) > 1 ) :
raise SitemapParseError ( "Found multiple (%d) <rs:md> elements for %s" , ( len ( md_elements ) , loc ) )
elif ( len ( md_elements ) == 1 ) : # have on element , look at attributes
md = self . md_from_etree ( md_elements [ 0 ] , context = loc )
# simple attributes that map directly to Resource object attributes
for att in ( 'capability' , 'change' , 'length' , 'path' , 'mime_type' ) :
if ( att in md ) :
setattr ( resource , att , md [ att ] )
# The ResourceSync beta spec lists md5 , sha - 1 and sha - 256 fixity
# digest types . Parse and warn of errors ignored .
if ( 'hash' in md ) :
try :
resource . hash = md [ 'hash' ]
except ValueError as e :
self . logger . warning ( "%s in <rs:md> for %s" % ( str ( e ) , loc ) )
# look for rs : ln elements ( optional )
ln_elements = etree . findall ( '{' + RS_NS + "}ln" )
if ( len ( ln_elements ) > 0 ) :
resource . ln = [ ]
for ln_element in ln_elements :
resource . ln . append ( self . ln_from_etree ( ln_element , loc ) )
return ( resource ) |
def get_to_purge_archived_resources ( user , table ) :
"""List the entries to be purged from the database .""" | if user . is_not_super_admin ( ) :
raise dci_exc . Unauthorized ( )
archived_resources = get_archived_resources ( table )
return flask . jsonify ( { table . name : archived_resources , '_meta' : { 'count' : len ( archived_resources ) } } ) |
def verify_from_file ( self , data_path , sig_path = None , keyrings = None , homedir = None ) :
'''` data _ path ` < string > The path to the data to verify .
` sig _ path ` < string > The signature file , if detached from the data .
` keyrings ` < list of string > Additional keyrings to search in .
` homedir ` < string > Override the configured homedir .''' | cmd_line = [ 'gpg' , '--homedir' , homedir or self . homedir ]
cmd_line . extend ( self . _get_keyrings_cl ( keyrings ) )
cmd_line . append ( '--verify' )
if sig_path :
cmd_line . extend ( [ sig_path , data_path ] )
else :
cmd_line . append ( data_path )
p = subprocess . Popen ( cmd_line , stderr = subprocess . PIPE )
stdout , stderr = p . communicate ( )
if p . returncode :
raise GpgBinaryError ( stderr )
return True |
def get_child_value ( parent , name , allow_missing = 0 ) :
"""return the value of the child element with name in the parent Element""" | if not parent . HasElement ( name ) :
if allow_missing :
return np . nan
else :
raise Exception ( 'failed to find child element %s in parent' % name )
else :
return XmlHelper . as_value ( parent . GetElement ( name ) ) |
def resolve_from_dictionary ( dictionary , key_list , default_value = None ) :
"""Take value from a given key list from dictionary .
Example : given dictionary d , key _ list = [ ' foo ' , ' bar ' ] ,
it will try to resolve d [ ' foo ' ] [ ' bar ' ] . If not possible ,
return default _ value .
: param dictionary : A dictionary to resolve .
: type dictionary : dict
: param key _ list : A list of key to resolve .
: type key _ list : list [ str ] , str
: param default _ value : Any arbitrary default value to return .
: return : intended value , if fails , return default _ value .
. . versionadded : : 4.0""" | try :
current_value = dictionary
key_list = key_list if isinstance ( key_list , list ) else [ key_list ]
for key in key_list :
current_value = current_value [ key ]
return current_value
except KeyError :
return default_value |
def _pushdate_urls ( cls , pushdate , branch , target_platform ) :
"""Multiple entries exist per push date . Iterate over all until a working entry is found""" | url_base = cls . URL_BASE + '/namespaces/gecko.v2.mozilla-' + branch + '.pushdate.' + pushdate
try :
base = HTTP_SESSION . post ( url_base , json = { } )
base . raise_for_status ( )
except requests . exceptions . RequestException as exc :
raise FetcherException ( exc )
product = 'mobile' if 'android' in target_platform else 'firefox'
json = base . json ( )
for namespace in sorted ( json [ 'namespaces' ] , key = lambda x : x [ 'name' ] ) :
yield cls . URL_BASE + '/task/' + namespace [ 'namespace' ] + '.' + product + '.' + target_platform |
def _init_metadata ( self ) :
"""stub""" | TextAnswerFormRecord . _init_metadata ( self )
FilesAnswerFormRecord . _init_metadata ( self )
super ( AnswerTextAndFilesMixin , self ) . _init_metadata ( ) |
def read ( self , ulBuffer , pDst , unBytes ) :
"""reads up to unBytes from buffer into * pDst , returning number of bytes read in * punRead""" | fn = self . function_table . read
punRead = c_uint32 ( )
result = fn ( ulBuffer , pDst , unBytes , byref ( punRead ) )
return result , punRead . value |
def _Parse ( self ) :
"""Extracts attributes and extents from the volume .""" | tsk_vs_part = self . _file_entry . GetTSKVsPart ( )
tsk_addr = getattr ( tsk_vs_part , 'addr' , None )
if tsk_addr is not None :
address = volume_system . VolumeAttribute ( 'address' , tsk_addr )
self . _AddAttribute ( address )
tsk_desc = getattr ( tsk_vs_part , 'desc' , None )
if tsk_desc is not None : # pytsk3 returns an UTF - 8 encoded byte string .
try :
tsk_desc = tsk_desc . decode ( 'utf8' )
self . _AddAttribute ( volume_system . VolumeAttribute ( 'description' , tsk_desc ) )
except UnicodeError :
pass
start_sector = tsk_partition . TSKVsPartGetStartSector ( tsk_vs_part )
number_of_sectors = tsk_partition . TSKVsPartGetNumberOfSectors ( tsk_vs_part )
volume_extent = volume_system . VolumeExtent ( start_sector * self . _bytes_per_sector , number_of_sectors * self . _bytes_per_sector )
self . _extents . append ( volume_extent ) |
def _use_widgets ( objs ) :
'''Whether a collection of Bokeh objects contains a any Widget
Args :
objs ( seq [ Model or Document ] ) :
Returns :
bool''' | from . . models . widgets import Widget
return _any ( objs , lambda obj : isinstance ( obj , Widget ) ) |
def replace ( self , expression , replacements ) :
"""All purpose method to reduce an expression by applying
successive replacement rules .
` expression ` is either a SymPy expression
or a key in ` scipy _ data _ fitting . Model . expressions ` .
` replacements ` can be any of the following ,
or a list of any combination of the following :
- A replacement tuple as in ` scipy _ data _ fitting . Model . replacements ` .
- The name of a replacement in ` scipy _ data _ fitting . Model . replacements ` .
- The name of a replacement group in ` scipy _ data _ fitting . Model . replacement _ groups ` .
Examples :
# ! python
> > > model . replace ( x + y , ( x , z ) )
z + y
> > > model . replace ( ' expression ' , ( x , z ) )
> > > model . replace ( ' expression ' , ' replacement ' )
> > > model . replace ( ' expression ' , [ ' replacement _ 1 ' , ' replacement _ 2 ' ] )
> > > model . replace ( ' expression ' , [ ' replacement ' , ' group ' ] )""" | # When expression is a string ,
# get the expressions from self . expressions .
if isinstance ( expression , str ) :
expression = self . expressions [ expression ]
# Allow for replacements to be empty .
if not replacements :
return expression
# Allow replacements to be a string .
if isinstance ( replacements , str ) :
if replacements in self . replacements :
return self . replace ( expression , self . replacements [ replacements ] )
elif replacements in self . replacement_groups :
return self . replace ( expression , self . replacement_groups [ replacements ] )
# When replacements is a list of strings or tuples ,
# Use reduce to make all the replacements .
if all ( isinstance ( item , str ) for item in replacements ) or all ( isinstance ( item , tuple ) for item in replacements ) :
return functools . reduce ( self . replace , replacements , expression )
# Otherwise make the replacement .
return expression . replace ( * replacements ) |
def _create_threads ( self ) :
"""This method creates job instances .""" | creator = JobCreator ( self . config , self . observers . jobs , self . logger )
self . jobs = creator . job_factory ( ) |
def intuition ( args ) :
'''Main simulation wrapper
Load the configuration , run the engine and return the analyze .''' | # Use the provided context builder to fill :
# - config : General behavior
# - strategy : Modules properties
# - market : The universe we will trade on
with setup . Context ( args [ 'context' ] ) as context : # Backtest or live engine .
# Registers configuration and setups data client
simulation = Simulation ( )
# Intuition building blocks
modules = context [ 'config' ] [ 'modules' ]
# Prepare benchmark , timezone , trading calendar
simulation . configure_environment ( context [ 'config' ] [ 'index' ] [ - 1 ] , context [ 'market' ] . benchmark , context [ 'market' ] . timezone )
# Wire togetether modules and initialize them
simulation . build ( args [ 'session' ] , modules , context [ 'strategy' ] )
# Build data generator
# NOTE How can I use several sources ?
data = { 'universe' : context [ 'market' ] , 'index' : context [ 'config' ] [ 'index' ] }
# Add user settings
data . update ( context [ 'strategy' ] [ 'data' ] )
# Load backtest and / or live module ( s )
if 'backtest' in modules :
data [ 'backtest' ] = utils . intuition_module ( modules [ 'backtest' ] )
if 'live' in modules :
data [ 'live' ] = utils . intuition_module ( modules [ 'live' ] )
# Run the simulation and return an intuition . core . analyzes object
return simulation ( datafeed . HybridDataFactory ( ** data ) , args [ 'bot' ] ) |
def start ( self ) :
"""Start the connection to a transport .""" | self . _init_topics ( )
poll_thread = threading . Thread ( target = self . _poll_queue )
poll_thread . start ( ) |
def persistent_object_context_changed ( self ) :
"""Override from PersistentObject .""" | super ( ) . persistent_object_context_changed ( )
def source_registered ( source ) :
self . __source = source
def source_unregistered ( source = None ) :
pass
def reference_registered ( property_name , reference ) :
self . __referenced_objects [ property_name ] = reference
def reference_unregistered ( property_name , reference = None ) :
pass
if self . persistent_object_context :
self . persistent_object_context . subscribe ( self . source_uuid , source_registered , source_unregistered )
for property_name , value in self . __properties . items ( ) :
if isinstance ( value , dict ) and value . get ( "type" ) in { "data_item" , "display_item" , "data_source" , "graphic" , "structure" } and "uuid" in value :
self . persistent_object_context . subscribe ( uuid . UUID ( value [ "uuid" ] ) , functools . partial ( reference_registered , property_name ) , functools . partial ( reference_unregistered , property_name ) )
else :
source_unregistered ( )
for property_name , value in self . __properties . items ( ) :
if isinstance ( value , dict ) and value . get ( "type" ) in { "data_item" , "display_item" , "data_source" , "graphic" , "structure" } and "uuid" in value :
reference_unregistered ( property_name ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.