signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def getResourceFileList ( self , pid ) :
"""Get a listing of files within a resource .
: param pid : The HydroShare ID of the resource whose resource files are to be listed .
: raises : HydroShareArgumentException if any parameters are invalid .
: raises : HydroShareNotAuthorized if user is not authorized to perform action .
: raises : HydroShareNotFound if the resource was not found .
: raises : HydroShareHTTPException if an unexpected HTTP response code is encountered .
: return : A generator that can be used to fetch dict objects , each dict representing
the JSON object representation of the resource returned by the REST end point . For example :
" count " : 95,
" next " : " https : / / www . hydroshare . org / hsapi / resource / 32a08bc23a86e471282a832143491b49 / file _ list / ? page = 2 " ,
" previous " : null ,
" results " : [
" url " : " http : / / www . hydroshare . org / django _ irods / download / 32a08bc23a86e471282a832143491b49 / data / contents / foo / bar . txt " ,
" size " : 23550,
" content _ type " : " text / plain "
" url " : " http : / / www . hydroshare . org / django _ irods / download / 32a08bc23a86e471282a832143491b49 / data / contents / dem . tif " ,
" size " : 107545,
" content _ type " : " image / tiff "
" url " : " http : / / www . hydroshare . org / django _ irods / download / 32a08bc23a86e471282a832143491b49 / data / contents / data . csv " ,
" size " : 148,
" content _ type " : " text / csv "
" url " : " http : / / www . hydroshare . org / django _ irods / download / 32a08bc23a86e471282a832143491b49 / data / contents / data . sqlite " ,
" size " : 267118,
" content _ type " : " application / x - sqlite3"
" url " : " http : / / www . hydroshare . org / django _ irods / download / 32a08bc23a86e471282a832143491b49 / data / contents / viz . png " ,
" size " : 128,
" content _ type " : " image / png " """
|
url = "{url_base}/resource/{pid}/files/" . format ( url_base = self . url_base , pid = pid )
return resultsListGenerator ( self , url )
|
def parse_datetime ( s ) : # source : http : / / stackoverflow . com / questions / 2211362 / how - to - parse - xsddatetime - format
"""Returns ( datetime , tz offset in minutes ) or ( None , None ) ."""
|
m = re . match ( r""" ^
(?P<year>-?[0-9]{4}) - (?P<month>[0-9]{2}) - (?P<day>[0-9]{2})
T (?P<hour>[0-9]{2}) : (?P<minute>[0-9]{2}) : (?P<second>[0-9]{2})
(?P<microsecond>\.[0-9]{1,6})?
(?P<tz>
Z | (?P<tz_hr>[-+][0-9]{2}) : (?P<tz_min>[0-9]{2})
)?
$ """ , s , re . X )
if m is not None :
values = m . groupdict ( )
# if values [ " tz " ] in ( " Z " , None ) :
# tz = 0
# else :
# tz = int ( values [ " tz _ hr " ] ) * 60 + int ( values [ " tz _ min " ] )
if values [ "microsecond" ] is None :
values [ "microsecond" ] = 0
else :
values [ "microsecond" ] = values [ "microsecond" ] [ 1 : ]
values [ "microsecond" ] += "0" * ( 6 - len ( values [ "microsecond" ] ) )
values = dict ( ( k , int ( v ) ) for k , v in values . items ( ) if not k . startswith ( "tz" ) )
try :
return datetime ( ** values )
# , tz
except ValueError :
pass
return None
|
def copy ( self ) :
"""Create a copy of this ChemicalEntity"""
|
inst = super ( type ( self ) , type ( self ) ) . empty ( ** self . dimensions )
# Need to copy all attributes , fields , relations
inst . __attributes__ = { k : v . copy ( ) for k , v in self . __attributes__ . items ( ) }
inst . __fields__ = { k : v . copy ( ) for k , v in self . __fields__ . items ( ) }
inst . __relations__ = { k : v . copy ( ) for k , v in self . __relations__ . items ( ) }
inst . maps = { k : m . copy ( ) for k , m in self . maps . items ( ) }
inst . dimensions = self . dimensions . copy ( )
return inst
|
def signature_validate ( signature , error = None ) :
"is signature a valid sequence of zero or more complete types ."
|
error , my_error = _get_error ( error )
result = dbus . dbus_signature_validate ( signature . encode ( ) , error . _dbobj ) != 0
my_error . raise_if_set ( )
return result
|
def bothify ( self , text = '## ??' , letters = string . ascii_letters ) :
"""Replaces all placeholders with random numbers and letters .
: param text : string to be parsed
: returns : string with all numerical and letter placeholders filled in"""
|
return self . lexify ( self . numerify ( text ) , letters = letters )
|
def order ( self , order ) :
'''Adds an Order to this query .
Args :
see : py : class : ` Order < datastore . query . Order > ` constructor
Returns self for JS - like method chaining : :
query . order ( ' + age ' ) . order ( ' - home ' )'''
|
order = order if isinstance ( order , Order ) else Order ( order )
# ensure order gets attr values the same way the rest of the query does .
order . object_getattr = self . object_getattr
self . orders . append ( order )
return self
|
def list_services ( profile = None , api_key = None ) :
'''List services belonging to this account
CLI Example :
salt myminion pagerduty . list _ services my - pagerduty - account'''
|
return salt . utils . pagerduty . list_items ( 'services' , 'name' , __salt__ [ 'config.option' ] ( profile ) , api_key , opts = __opts__ )
|
def _maybe_coerce_values ( self , values ) :
"""Unbox to an extension array .
This will unbox an ExtensionArray stored in an Index or Series .
ExtensionArrays pass through . No dtype coercion is done .
Parameters
values : Index , Series , ExtensionArray
Returns
ExtensionArray"""
|
if isinstance ( values , ( ABCIndexClass , ABCSeries ) ) :
values = values . _values
return values
|
def user_get ( alias = None , userids = None , ** kwargs ) :
'''Retrieve users according to the given parameters .
. . versionadded : : 2016.3.0
: param alias : user alias
: param userids : return only users with the given IDs
: param _ connection _ user : Optional - zabbix user ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ password : Optional - zabbix password ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ url : Optional - url of zabbix frontend ( can also be set in opts , pillar , see module ' s docstring )
: return : Array with details of convenient users , False on failure of if no user found .
CLI Example :
. . code - block : : bash
salt ' * ' zabbix . user _ get james'''
|
conn_args = _login ( ** kwargs )
ret = { }
try :
if conn_args :
method = 'user.get'
params = { "output" : "extend" , "filter" : { } }
if not userids and not alias :
return { 'result' : False , 'comment' : 'Please submit alias or userids parameter to retrieve users.' }
if alias :
params [ 'filter' ] . setdefault ( 'alias' , alias )
if userids :
params . setdefault ( 'userids' , userids )
params = _params_extend ( params , ** kwargs )
ret = _query ( method , params , conn_args [ 'url' ] , conn_args [ 'auth' ] )
return ret [ 'result' ] if ret [ 'result' ] else False
else :
raise KeyError
except KeyError :
return ret
|
def set_extra_selections ( self , key , extra_selections ) :
"""Set extra selections for a key .
Also assign draw orders to leave current _ cell and current _ line
in the backgrund ( and avoid them to cover other decorations )
NOTE : This will remove previous decorations added to the same key .
Args :
key ( str ) name of the extra selections group .
extra _ selections ( list of sourcecode . api . TextDecoration ) ."""
|
# use draw orders to highlight current _ cell and current _ line first
draw_order = DRAW_ORDERS . get ( key )
if draw_order is None :
draw_order = DRAW_ORDERS . get ( 'on_top' )
for selection in extra_selections :
selection . draw_order = draw_order
self . clear_extra_selections ( key )
self . extra_selections_dict [ key ] = extra_selections
|
def get_discord_leaderboard ( self , guild ) :
"""Expects : ` int ` - Discord Guild ID
Returns : ` trainerdex . DiscordLeaderboard `"""
|
r = requests . get ( api_url + 'leaderboard/discord/' + str ( guild ) + '/' , headers = self . headers )
print ( request_status ( r ) )
r . raise_for_status ( )
return DiscordLeaderboard ( r . json ( ) )
|
def is_running ( self ) :
"""Property method that returns a bool specifying if the process is
currently running . This will return true if the state is active , idle
or initializing .
: rtype : bool"""
|
return self . _state in [ self . STATE_ACTIVE , self . STATE_IDLE , self . STATE_INITIALIZING ]
|
def OnDeleteRows ( self , event ) :
"""Deletes rows from all tables of the grid"""
|
bbox = self . grid . selection . get_bbox ( )
if bbox is None or bbox [ 1 ] [ 0 ] is None : # Insert rows at cursor
del_point = self . grid . actions . cursor [ 0 ]
no_rows = 1
else : # Insert at lower edge of bounding box
del_point = bbox [ 0 ] [ 0 ]
no_rows = self . _get_no_rowscols ( bbox ) [ 0 ]
with undo . group ( _ ( "Delete rows" ) ) :
self . grid . actions . delete_rows ( del_point , no_rows )
self . grid . GetTable ( ) . ResetView ( )
# Update the default sized cell sizes
self . grid . actions . zoom ( )
event . Skip ( )
|
def acquire_proxy ( self , host , port , use_ssl = False , host_key = None , tunnel = True ) :
'''Check out a connection .
This function is the same as acquire but with extra arguments
concerning proxies .
Coroutine .'''
|
if self . _host_filter and not self . _host_filter . test ( host ) :
connection = yield from super ( ) . acquire ( host , port , use_ssl , host_key )
return connection
host_key = host_key or ( host , port , use_ssl )
proxy_host , proxy_port = self . _proxy_address
connection = yield from super ( ) . acquire ( proxy_host , proxy_port , self . _proxy_ssl , host_key = host_key )
connection . proxied = True
_logger . debug ( 'Request for proxy connection.' )
if connection . closed ( ) :
_logger . debug ( 'Connecting to proxy.' )
yield from connection . connect ( )
if tunnel :
yield from self . _establish_tunnel ( connection , ( host , port ) )
if use_ssl :
ssl_connection = yield from connection . start_tls ( self . _ssl_context )
ssl_connection . proxied = True
ssl_connection . tunneled = True
self . _connection_map [ ssl_connection ] = connection
connection . wrapped_connection = ssl_connection
return ssl_connection
if connection . wrapped_connection :
ssl_connection = connection . wrapped_connection
self . _connection_map [ ssl_connection ] = connection
return ssl_connection
else :
return connection
|
def multi ( self ) :
"""Start a transactional block of the pipeline after WATCH commands
are issued . End the transactional block with ` execute ` ."""
|
if self . explicit_transaction :
raise RedisError ( "Cannot issue nested calls to MULTI" )
if self . commands :
raise RedisError ( "Commands without an initial WATCH have already been issued" )
self . explicit_transaction = True
|
def _find_combo_data ( widget , value ) :
"""Returns the index in a combo box where itemData = = value
Raises a ValueError if data is not found"""
|
# Here we check that the result is True , because some classes may overload
# = = and return other kinds of objects whether true or false .
for idx in range ( widget . count ( ) ) :
if widget . itemData ( idx ) is value or ( widget . itemData ( idx ) == value ) is True :
return idx
else :
raise ValueError ( "%s not found in combo box" % ( value , ) )
|
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) :
"""Precipitation Read from File Method"""
|
# Set file extension property
self . fileExtension = extension
# Dictionary of keywords / cards and parse function names
KEYWORDS = ( 'EVENT' , )
# Parse file into chunks associated with keywords / cards
with open ( path , 'r' ) as f :
chunks = pt . chunk ( KEYWORDS , f )
# Parse chunks associated with each key
for key , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list
for chunk in chunkList :
result = gak . eventChunk ( key , chunk )
self . _createGsshaPyObjects ( result )
# Add this PrecipFile to the database session
session . add ( self )
|
def get_remaining_time ( program ) :
'''Get the remaining time in seconds of a program that is currently on .'''
|
now = datetime . datetime . now ( )
program_start = program . get ( 'start_time' )
program_end = program . get ( 'end_time' )
if not program_start or not program_end :
_LOGGER . error ( 'Could not determine program start and/or end times.' )
_LOGGER . debug ( 'Program data: %s' , program )
return
if now > program_end :
_LOGGER . error ( 'The provided program has already ended.' )
_LOGGER . debug ( 'Program data: %s' , program )
return 0
progress = now - program_start
return progress . seconds
|
def make_handlers ( base_url , server_processes ) :
"""Get tornado handlers for registered server _ processes"""
|
handlers = [ ]
for sp in server_processes :
handler = _make_serverproxy_handler ( sp . name , sp . command , sp . environment , sp . timeout , sp . absolute_url , sp . port , )
handlers . append ( ( ujoin ( base_url , sp . name , r'(.*)' ) , handler , dict ( state = { } ) , ) )
handlers . append ( ( ujoin ( base_url , sp . name ) , AddSlashHandler ) )
return handlers
|
def plot ( data : Dict [ str , np . array ] , fields : List [ str ] = None , * args , ** kwargs ) :
"""Plot simulation data .
: data : A dictionary of arrays .
: fields : A list of variables you want to plot ( e . g . [ ' x ' , y ' , ' c ' ] )"""
|
if plt is None :
return
if fields is None :
fields = [ 'x' , 'y' , 'm' , 'c' ]
labels = [ ]
lines = [ ]
for field in fields :
if min ( data [ field ] . shape ) > 0 :
f_lines = plt . plot ( data [ 't' ] , data [ field ] , * args , ** kwargs )
lines . extend ( f_lines )
labels . extend ( data [ 'labels' ] [ field ] )
plt . legend ( lines , labels , ncol = 2 , loc = 'best' )
plt . xlabel ( 't, sec' )
plt . grid ( )
|
def format_unencoded ( self , tokensource , outfile ) :
"""The formatting process uses several nested generators ; which of
them are used is determined by the user ' s options .
Each generator should take at least one argument , ` ` inner ` ` ,
and wrap the pieces of text generated by this .
Always yield 2 - tuples : ( code , text ) . If " code " is 1 , the text
is part of the original tokensource being highlighted , if it ' s
0 , the text is some piece of wrapping . This makes it possible to
use several different wrappers that process the original source
linewise , e . g . line number generators ."""
|
source = self . _format_lines ( tokensource )
if self . hl_lines :
source = self . _highlight_lines ( source )
if not self . nowrap :
if self . linenos == 2 :
source = self . _wrap_inlinelinenos ( source )
if self . lineanchors :
source = self . _wrap_lineanchors ( source )
if self . linespans :
source = self . _wrap_linespans ( source )
source = self . wrap ( source , outfile )
if self . linenos == 1 :
source = self . _wrap_tablelinenos ( source )
if self . full :
source = self . _wrap_full ( source , outfile )
for t , piece in source :
outfile . write ( piece )
|
def drawMeanOneLognormal ( N , sigma = 1.0 , seed = 0 ) :
'''Generate arrays of mean one lognormal draws . The sigma input can be a number
or list - like . If a number , output is a length N array of draws from the
lognormal distribution with standard deviation sigma . If a list , output is
a length T list whose t - th entry is a length N array of draws from the
lognormal with standard deviation sigma [ t ] .
Parameters
N : int
Number of draws in each row .
sigma : float or [ float ]
One or more standard deviations . Number of elements T in sigma
determines number of rows of output .
seed : int
Seed for random number generator .
Returns :
draws : np . array or [ np . array ]
T - length list of arrays of mean one lognormal draws each of size N , or
a single array of size N ( if sigma is a scalar ) .'''
|
# Set up the RNG
RNG = np . random . RandomState ( seed )
if isinstance ( sigma , float ) : # Return a single array of length N
mu = - 0.5 * sigma ** 2
draws = RNG . lognormal ( mean = mu , sigma = sigma , size = N )
else : # Set up empty list to populate , then loop and populate list with draws
draws = [ ]
for sig in sigma :
mu = - 0.5 * ( sig ** 2 )
draws . append ( RNG . lognormal ( mean = mu , sigma = sig , size = N ) )
return draws
|
def get_objective_query_session_for_objective_bank ( self , objective_bank_id = None ) :
"""Gets the OsidSession associated with the objective query service
for the given objective bank .
arg : objectiveBankId ( osid . id . Id ) : the Id of the objective
bank
return : ( osid . learning . ObjectiveQuerySession ) - an
ObjectiveQuerySession
raise : NotFound - objectiveBankId not found
raise : NullArgument - objectiveBankId is null
raise : OperationFailed - unable to complete request
raise : Unimplemented - supports _ objective _ query ( ) or
supports _ visible _ federation ( ) is false
compliance : optional - This method must be implemented if
supports _ objective _ query ( ) and
supports _ visible _ federation ( ) are true ."""
|
if not objective_bank_id :
raise NullArgument
if not self . supports_objective_query ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
try :
session = sessions . ObjectiveQuerySession ( objective_bank_id , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session
|
def get_morph_files ( directory ) :
'''Get a list of all morphology files in a directory
Returns :
list with all files with extensions ' . swc ' , ' h5 ' or ' . asc ' ( case insensitive )'''
|
lsdir = ( os . path . join ( directory , m ) for m in os . listdir ( directory ) )
return list ( filter ( _is_morphology_file , lsdir ) )
|
def optional_args ( proxy = None ) :
'''Return the connection optional args .
. . note : :
Sensible data will not be returned .
. . versionadded : : 2017.7.0
CLI Example - select all devices connecting via port 1234:
. . code - block : : bash
salt - G ' optional _ args : port : 1234 ' test . ping
Output :
. . code - block : : yaml
device1:
True
device2:
True'''
|
opt_args = _get_device_grain ( 'optional_args' , proxy = proxy ) or { }
if opt_args and _FORBIDDEN_OPT_ARGS :
for arg in _FORBIDDEN_OPT_ARGS :
opt_args . pop ( arg , None )
return { 'optional_args' : opt_args }
|
def touch ( self , name , data ) :
"""Create a ' file ' analog called ' name ' and put ' data ' to the d _ data dictionary
under key ' name ' .
The ' name ' can contain a path specifier ."""
|
b_OK = True
str_here = self . cwd ( )
# print ( " here ! " )
# print ( self . snode _ current )
# print ( self . snode _ current . d _ nodes )
l_path = name . split ( '/' )
if len ( l_path ) > 1 :
self . cd ( '/' . join ( l_path [ 0 : - 1 ] ) )
name = l_path [ - 1 ]
self . snode_current . d_data [ name ] = data
# print ( self . snode _ current )
self . cd ( str_here )
return b_OK
|
def sum_of_gaussian_factory ( N ) :
"""Return a model of the sum of N Gaussians and a constant background ."""
|
name = "SumNGauss%d" % N
attr = { }
# parameters
for i in range ( N ) :
key = "amplitude_%d" % i
attr [ key ] = Parameter ( key )
key = "center_%d" % i
attr [ key ] = Parameter ( key )
key = "stddev_%d" % i
attr [ key ] = Parameter ( key )
attr [ 'background' ] = Parameter ( 'background' , default = 0.0 )
def fit_eval ( self , x , * args ) :
result = x * 0 + args [ - 1 ]
for i in range ( N ) :
result += args [ 3 * i ] * np . exp ( - 0.5 * ( x - args [ 3 * i + 1 ] ) ** 2 / args [ 3 * i + 2 ] ** 2 )
return result
attr [ 'evaluate' ] = fit_eval
def deriv ( self , x , * args ) :
d_result = np . ones ( ( ( 3 * N + 1 ) , len ( x ) ) )
for i in range ( N ) :
d_result [ 3 * i ] = ( np . exp ( - 0.5 / args [ 3 * i + 2 ] ** 2 * ( x - args [ 3 * i + 1 ] ) ** 2 ) )
d_result [ 3 * i + 1 ] = args [ 3 * i ] * d_result [ 3 * i ] * ( x - args [ 3 * i + 1 ] ) / args [ 3 * i + 2 ] ** 2
d_result [ 3 * i + 2 ] = args [ 3 * i ] * d_result [ 3 * i ] * ( x - args [ 3 * i + 1 ] ) ** 2 / args [ 3 * i + 2 ] ** 3
return d_result
attr [ 'fit_deriv' ] = deriv
klass = type ( name , ( Fittable1DModel , ) , attr )
return klass
|
def _check_sdp_from_eigen ( w , tol = None ) :
"""Checks if some of the eigenvalues given are negative , up to a tolerance
level , with a default value of the tolerance depending on the eigenvalues .
Parameters
w : array - like , shape = ( n _ eigenvalues , )
Eigenvalues to check for non semidefinite positiveness .
tol : positive ` float ` , optional
Negative eigenvalues above - tol are considered zero . If
tol is None , and eps is the epsilon value for datatype of w , then tol
is set to w . max ( ) * len ( w ) * eps .
See Also
np . linalg . matrix _ rank for more details on the choice of tolerance ( the same
strategy is applied here )"""
|
if tol is None :
tol = w . max ( ) * len ( w ) * np . finfo ( w . dtype ) . eps
if tol < 0 :
raise ValueError ( "tol should be positive." )
if any ( w < - tol ) :
raise ValueError ( "Matrix is not positive semidefinite (PSD)." )
|
def login ( self ) :
"""Logs the user in , returns the result
Returns
bool - Whether or not the user logged in successfully"""
|
# Request index to obtain initial cookies and look more human
pg = self . getPage ( "http://www.neopets.com" )
form = pg . form ( action = "/login.phtml" )
form . update ( { 'username' : self . username , 'password' : self . password } )
pg = form . submit ( )
logging . getLogger ( "neolib.user" ) . info ( "Login check" , { 'pg' : pg } )
return self . username in pg . content
|
def human_xor_00 ( X , y , model_generator , method_name ) :
"""XOR ( false / false )
This tests how well a feature attribution method agrees with human intuition
for an eXclusive OR operation combined with linear effects . This metric deals
specifically with the question of credit allocation for the following function
when all three inputs are true :
if fever : + 2 points
if cough : + 2 points
if fever or cough but not both : + 6 points
transform = " identity "
sort _ order = 3"""
|
return _human_xor ( X , model_generator , method_name , False , False )
|
def map ( self , layers = None , interactive = True , zoom = None , lat = None , lng = None , size = ( 800 , 400 ) , ax = None ) :
"""Produce a CARTO map visualizing data layers .
Examples :
Create a map with two data : py : class : ` Layer
< cartoframes . layer . Layer > ` \ s , and one : py : class : ` BaseMap
< cartoframes . layer . BaseMap > ` layer : :
import cartoframes
from cartoframes import Layer , BaseMap , styling
cc = cartoframes . CartoContext ( BASEURL , APIKEY )
cc . map ( layers = [ BaseMap ( ) ,
Layer ( ' acadia _ biodiversity ' ,
color = { ' column ' : ' simpson _ index ' ,
' scheme ' : styling . tealRose ( 7 ) } ) ,
Layer ( ' peregrine _ falcon _ nest _ sites ' ,
size = ' num _ eggs ' ,
color = { ' column ' : ' bird _ id ' ,
' scheme ' : styling . vivid ( 10 ) ) ] ,
interactive = True )
Create a snapshot of a map at a specific zoom and center : :
cc . map ( layers = Layer ( ' acadia _ biodiversity ' ,
color = ' simpson _ index ' ) ,
interactive = False ,
zoom = 14,
lng = - 68.3823549,
lat = 44.3036906)
Args :
layers ( list , optional ) : List of zero or more of the following :
- : py : class : ` Layer < cartoframes . layer . Layer > ` : cartoframes
: py : class : ` Layer < cartoframes . layer . Layer > ` object for
visualizing data from a CARTO table . See : py : class : ` Layer
< cartoframes . layer . Layer > ` for all styling options .
- : py : class : ` BaseMap < cartoframes . layer . BaseMap > ` : Basemap for
contextualizng data layers . See : py : class : ` BaseMap
< cartoframes . layer . BaseMap > ` for all styling options .
- : py : class : ` QueryLayer < cartoframes . layer . QueryLayer > ` : Layer
from an arbitrary query . See : py : class : ` QueryLayer
< cartoframes . layer . QueryLayer > ` for all styling options .
interactive ( bool , optional ) : Defaults to ` ` True ` ` to show an
interactive slippy map . Setting to ` ` False ` ` creates a static
map .
zoom ( int , optional ) : Zoom level of map . Acceptable values are
usually in the range 0 to 19 . 0 has the entire earth on a
single tile ( 256px square ) . Zoom 19 is the size of a city
block . Must be used in conjunction with ` ` lng ` ` and ` ` lat ` ` .
Defaults to a view to have all data layers in view .
lat ( float , optional ) : Latitude value for the center of the map .
Must be used in conjunction with ` ` zoom ` ` and ` ` lng ` ` . Defaults
to a view to have all data layers in view .
lng ( float , optional ) : Longitude value for the center of the map .
Must be used in conjunction with ` ` zoom ` ` and ` ` lat ` ` . Defaults
to a view to have all data layers in view .
size ( tuple , optional ) : List of pixel dimensions for the map .
Format is ` ` ( width , height ) ` ` . Defaults to ` ` ( 800 , 400 ) ` ` .
ax : matplotlib axis on which to draw the image . Only used when
` ` interactive ` ` is ` ` False ` ` .
Returns :
IPython . display . HTML or matplotlib Axes : Interactive maps are
rendered as HTML in an ` iframe ` , while static maps are returned as
matplotlib Axes objects or IPython Image ."""
|
# TODO : add layers preprocessing method like
# layers = process _ layers ( layers )
# that uses up to layer limit value error
if layers is None :
layers = [ ]
elif not isinstance ( layers , collections . Iterable ) :
layers = [ layers ]
else :
layers = list ( layers )
if len ( layers ) > 8 :
raise ValueError ( 'Map can have at most 8 layers' )
nullity = [ zoom is None , lat is None , lng is None ]
if any ( nullity ) and not all ( nullity ) :
raise ValueError ( 'Zoom, lat, and lng must all or none be provided' )
# When no layers are passed , set default zoom
if ( ( len ( layers ) == 0 and zoom is None ) or ( len ( layers ) == 1 and layers [ 0 ] . is_basemap ) ) :
[ zoom , lat , lng ] = [ 1 , 0 , 0 ]
has_zoom = zoom is not None
# Check for a time layer , if it exists move it to the front
time_layers = [ idx for idx , layer in enumerate ( layers ) if not layer . is_basemap and layer . time ]
time_layer = layers [ time_layers [ 0 ] ] if len ( time_layers ) > 0 else None
if len ( time_layers ) > 1 :
raise ValueError ( 'Map can at most take 1 Layer with time ' 'column/field' )
if time_layer :
if not interactive :
raise ValueError ( 'Map cannot display a static image with a ' 'time column' )
layers . append ( layers . pop ( time_layers [ 0 ] ) )
base_layers = [ idx for idx , layer in enumerate ( layers ) if layer . is_basemap ]
# Check basemaps , add one if none exist
if len ( base_layers ) > 1 :
raise ValueError ( 'Map can at most take one BaseMap layer' )
elif len ( base_layers ) == 1 : # move baselayer to first position
layers . insert ( 0 , layers . pop ( base_layers [ 0 ] ) )
# add labels layer if requested
if layers [ 0 ] . is_basic ( ) and layers [ 0 ] . labels == 'front' :
layers . append ( BaseMap ( layers [ 0 ] . source , labels = 'front' , only_labels = True ) )
layers [ 0 ] . labels = None
elif not base_layers : # default basemap is dark with labels in back
# labels will be changed if all geoms are non - point
layers . insert ( 0 , BaseMap ( ) )
geoms = set ( )
# Setup layers
for idx , layer in enumerate ( layers ) :
if not layer . is_basemap : # get schema of style columns
if layer . style_cols :
resp = self . sql_client . send ( utils . minify_sql ( ( 'SELECT {cols}' , 'FROM ({query}) AS _wrap' , 'LIMIT 0' , ) ) . format ( cols = ',' . join ( layer . style_cols ) , comma = ',' if layer . style_cols else '' , query = layer . orig_query ) , ** DEFAULT_SQL_ARGS )
self . _debug_print ( layer_fields = resp )
for stylecol , coltype in utils . dict_items ( resp [ 'fields' ] ) :
layer . style_cols [ stylecol ] = coltype [ 'type' ]
layer . geom_type = self . _geom_type ( layer )
if not base_layers :
geoms . add ( layer . geom_type )
# update local style schema to help build proper defaults
layer . _setup ( layers , idx )
# set labels on top if there are no point geometries and a basemap
# is not specified
if not base_layers and 'point' not in geoms :
layers [ 0 ] = BaseMap ( labels = 'front' )
# If basemap labels are on front , add labels layer
basemap = layers [ 0 ]
if basemap . is_basic ( ) and basemap . labels == 'front' :
layers . append ( BaseMap ( basemap . source , labels = basemap . labels , only_labels = True ) )
nb_layers = non_basemap_layers ( layers )
if time_layer and len ( nb_layers ) > 1 :
raise ValueError ( 'Maps with a time element can only consist of a ' 'time layer and a basemap. This constraint will ' 'be removed in the future.' )
options = { 'basemap_url' : basemap . url }
for idx , layer in enumerate ( nb_layers ) :
self . _check_query ( layer . query , style_cols = layer . style_cols )
options [ 'cartocss_' + str ( idx ) ] = layer . cartocss
options [ 'sql_' + str ( idx ) ] = layer . query
params = { 'config' : json . dumps ( options ) , 'anti_cache' : random . random ( ) , }
if has_zoom :
params . update ( { 'zoom' : zoom , 'lat' : lat , 'lon' : lng } )
options . update ( { 'zoom' : zoom , 'lat' : lat , 'lng' : lng } )
else :
bounds = self . _get_bounds ( nb_layers )
options . update ( bounds )
bbox = '{west},{south},{east},{north}' . format ( ** bounds )
params . update ( dict ( bbox = bbox ) )
map_name = self . _send_map_template ( layers , has_zoom = has_zoom )
api_url = utils . join_url ( self . creds . base_url ( ) , 'api/v1/map' )
static_url = ( '{url}.png?{params}' ) . format ( url = utils . join_url ( api_url , 'static/named' , map_name , size [ 0 ] , size [ 1 ] ) , params = urlencode ( params ) )
html = '<img src="{url}" />' . format ( url = static_url )
self . _debug_print ( static_url = static_url )
# TODO : write this as a private method
if interactive :
netloc = urlparse ( self . creds . base_url ( ) ) . netloc
domain = 'carto.com' if netloc . endswith ( '.carto.com' ) else netloc
config = { 'user_name' : self . creds . username ( ) , 'maps_api_template' : self . creds . base_url ( ) , 'sql_api_template' : self . creds . base_url ( ) , 'tiler_protocol' : 'https' , 'tiler_domain' : domain , 'tiler_port' : '80' , 'type' : 'torque' if time_layer else 'namedmap' , 'named_map' : { 'name' : map_name , 'params' : { k : utils . safe_quotes ( v , escape_single_quotes = True ) for k , v in utils . dict_items ( options ) } , } , }
map_options = { 'filter' : [ 'mapnik' , 'torque' , ] , 'https' : True , }
if time_layer : # get turbo - carto processed cartocss
resp = self . _auth_send ( 'api/v1/map/named/{}' . format ( map_name ) , 'POST' , data = params [ 'config' ] , headers = { 'Content-Type' : 'application/json' } )
# check if errors in cartocss ( already turbo - carto processed )
if 'errors' not in resp : # replace previous cartocss with turbo - carto processed
# version
layer . cartocss = ( resp [ 'metadata' ] [ 'layers' ] [ 1 ] [ 'meta' ] [ 'cartocss' ] )
config . update ( { 'order' : 1 , 'options' : { 'query' : time_layer . query , 'user_name' : self . creds . username ( ) , 'tile_style' : layer . cartocss } } )
config [ 'named_map' ] . update ( { 'layers' : [ { 'layer_name' : 't' , } ] , } )
map_options . update ( { 'time_slider' : True , 'loop' : True , } )
bounds = [ ] if has_zoom else [ [ options [ 'north' ] , options [ 'east' ] ] , [ options [ 'south' ] , options [ 'west' ] ] ]
content = self . _get_iframe_srcdoc ( config = config , bounds = bounds , options = options , map_options = map_options , top_layer_url = top_basemap_layer_url ( layers ) )
img_html = html
html = ( '<iframe srcdoc="{content}" width="{width}" height="{height}">' ' Preview image: {img_html}' '</iframe>' ) . format ( content = utils . safe_quotes ( content ) , width = size [ 0 ] , height = size [ 1 ] , img_html = img_html )
return HTML ( html )
elif HAS_MATPLOTLIB :
raw_data = mpi . imread ( static_url , format = 'png' )
if ax is None :
dpi = mpi . rcParams [ 'figure.dpi' ]
mpl_size = ( size [ 0 ] / dpi , size [ 1 ] / dpi )
fig = plt . figure ( figsize = mpl_size , dpi = dpi , frameon = False )
fig . subplots_adjust ( left = 0 , right = 1 , top = 1 , bottom = 0 )
ax = plt . gca ( )
ax . imshow ( raw_data )
ax . axis ( 'off' )
return ax
else :
return Image ( url = static_url , embed = True , format = 'png' , width = size [ 0 ] , height = size [ 1 ] , metadata = dict ( origin_url = static_url ) )
|
def build_if_needed ( self ) :
"""Reset shader source if necesssary ."""
|
if self . _need_build :
self . _build ( )
self . _need_build = False
self . update_variables ( )
|
async def SwitchBlockOff ( self , message , type_ ) :
'''message : str
type _ : str
Returns - > Error'''
|
# map input types to rpc msg
_params = dict ( )
msg = dict ( type = 'Block' , request = 'SwitchBlockOff' , version = 2 , params = _params )
_params [ 'message' ] = message
_params [ 'type' ] = type_
reply = await self . rpc ( msg )
return reply
|
def getCfgFilesInDirForTask ( aDir , aTask , recurse = False ) :
"""This is a specialized function which is meant only to keep the
same code from needlessly being much repeated throughout this
application . This must be kept as fast and as light as possible .
This checks a given directory for . cfg files matching a given
task . If recurse is True , it will check subdirectories .
If aTask is None , it returns all files and ignores aTask ."""
|
if recurse :
flist = irafutils . rglob ( aDir , '*.cfg' )
else :
flist = glob . glob ( aDir + os . sep + '*.cfg' )
if aTask :
retval = [ ]
for f in flist :
try :
if aTask == getEmbeddedKeyVal ( f , TASK_NAME_KEY , '' ) :
retval . append ( f )
except Exception as e :
print ( 'Warning: ' + str ( e ) )
return retval
else :
return flist
|
def cmd_genobstacles ( self , args ) :
'''genobstacles command parser'''
|
usage = "usage: genobstacles <start|stop|restart|clearall|status|set>"
if len ( args ) == 0 :
print ( usage )
return
if args [ 0 ] == "set" :
gen_settings . command ( args [ 1 : ] )
elif args [ 0 ] == "start" :
if self . have_home :
self . start ( )
else :
self . pending_start = True
elif args [ 0 ] == "stop" :
self . stop ( )
self . pending_start = False
elif args [ 0 ] == "restart" :
self . stop ( )
self . start ( )
elif args [ 0 ] == "status" :
print ( self . status ( ) )
elif args [ 0 ] == "remove" :
latlon = self . module ( 'map' ) . click_position
if self . last_click is not None and self . last_click == latlon :
return
self . last_click = latlon
if latlon is not None :
closest = None
closest_distance = 1000
for a in self . aircraft :
dist = a . distance_from ( latlon [ 0 ] , latlon [ 1 ] )
if dist < closest_distance :
closest_distance = dist
closest = a
if closest is not None :
self . aircraft . remove ( closest )
else :
print ( "No obstacle found at click point" )
elif args [ 0 ] == "dropcloud" :
self . cmd_dropobject ( Weather ( ) )
elif args [ 0 ] == "dropeagle" :
self . cmd_dropobject ( BirdOfPrey ( ) )
elif args [ 0 ] == "dropbird" :
self . cmd_dropobject ( BirdMigrating ( ) )
elif args [ 0 ] == "dropplane" :
self . cmd_dropobject ( Aircraft ( ) )
elif args [ 0 ] == "clearall" :
self . clearall ( )
else :
print ( usage )
|
def get_friends ( self , limit = 50 , cacheable = False ) :
"""Returns a list of the user ' s friends ."""
|
seq = [ ]
for node in _collect_nodes ( limit , self , self . ws_prefix + ".getFriends" , cacheable ) :
seq . append ( User ( _extract ( node , "name" ) , self . network ) )
return seq
|
def set_value ( self , value ) :
"""Set the value of the target .
: param obj value : The value to set ."""
|
self . _value = value
setattr ( self . target , self . _name , value )
|
def countedArray ( expr , intExpr = None ) :
"""Helper to define a counted list of expressions .
This helper defines a pattern of the form : :
integer expr expr expr . . .
where the leading integer tells how many expr expressions follow .
The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed ."""
|
arrayExpr = Forward ( )
def countFieldParseAction ( s , l , t ) :
n = t [ 0 ]
arrayExpr << ( n and Group ( And ( [ expr ] * n ) ) or Group ( empty ) )
return [ ]
if intExpr is None :
intExpr = Word ( nums ) . setParseAction ( lambda t : int ( t [ 0 ] ) )
else :
intExpr = intExpr . copy ( )
intExpr . setName ( "arrayLen" )
intExpr . addParseAction ( countFieldParseAction , callDuringTry = True )
return ( intExpr + arrayExpr )
|
def is_url ( str_ ) :
"""heuristic check if str is url formatted"""
|
return any ( [ str_ . startswith ( 'http://' ) , str_ . startswith ( 'https://' ) , str_ . startswith ( 'www.' ) , '.org/' in str_ , '.com/' in str_ , ] )
|
def cases ( arg , case_result_pairs , default = None ) :
"""Create a case expression in one shot .
Returns
case _ expr : SimpleCase"""
|
builder = arg . case ( )
for case , result in case_result_pairs :
builder = builder . when ( case , result )
if default is not None :
builder = builder . else_ ( default )
return builder . end ( )
|
def search_games ( self , query , live = True ) :
"""Search for games that are similar to the query
: param query : the query string
: type query : : class : ` str `
: param live : If true , only returns games that are live on at least one
channel
: type live : : class : ` bool `
: returns : A list of games
: rtype : : class : ` list ` of : class : ` models . Game ` instances
: raises : None"""
|
r = self . kraken_request ( 'GET' , 'search/games' , params = { 'query' : query , 'type' : 'suggest' , 'live' : live } )
games = models . Game . wrap_search ( r )
for g in games :
self . fetch_viewers ( g )
return games
|
def translations ( self ) :
"""Yield all six translations of a nucleotide sequence .
@ return : A generator that produces six L { TranslatedRead } instances ."""
|
rc = self . reverseComplement ( ) . sequence
for reverseComplemented in False , True :
for frame in 0 , 1 , 2 :
seq = rc if reverseComplemented else self . sequence
# Get the suffix of the sequence for translation . I . e . ,
# skip 0 , 1 , or 2 initial bases , depending on the frame .
# Note that this makes a copy of the sequence , which we can
# then safely append ' N ' bases to to adjust its length to
# be zero mod 3.
suffix = seq [ frame : ]
lengthMod3 = len ( suffix ) % 3
if lengthMod3 :
suffix += ( 'NN' if lengthMod3 == 1 else 'N' )
yield TranslatedRead ( self , translate ( suffix ) , frame , reverseComplemented )
|
def template_ellipsoid ( shape ) :
r"""Returns an ellipsoid binary structure of a of the supplied radius that can be used as
template input to the generalized hough transform .
Parameters
shape : tuple of integers
The main axes of the ellipsoid in voxel units .
Returns
template _ sphere : ndarray
A boolean array containing an ellipsoid ."""
|
# prepare template array
template = numpy . zeros ( [ int ( x // 2 + ( x % 2 ) ) for x in shape ] , dtype = numpy . bool )
# in odd shape cases , this will include the ellipses middle line , otherwise not
# get real world offset to compute the ellipsoid membership
rw_offset = [ ]
for s in shape :
if int ( s ) % 2 == 0 :
rw_offset . append ( 0.5 - ( s % 2 ) / 2. )
# number before point is even
else :
rw_offset . append ( - 1 * ( s % int ( s ) ) / 2. )
# number before point is odd
# prepare an array containing the squares of the half axes to avoid computing inside the loop
shape_pow = numpy . power ( numpy . asarray ( shape ) / 2. , 2 )
# we use the ellipse normal form to find all point in its surface as well as volume
# e . g . for 2D , all voxels inside the ellipse ( or on its surface ) with half - axes a and b
# follow x ^ 2 / a ^ 2 + y ^ 2 / b ^ 2 < = 1 ; for higher dimensions accordingly
# to not have to iterate over each voxel , we make use of the ellipsoids symmetry
# and construct just a part of the whole ellipse here
for idx in numpy . ndindex ( template . shape ) :
distance = sum ( ( math . pow ( coordinate + rwo , 2 ) / axes_pow for axes_pow , coordinate , rwo in zip ( shape_pow , idx , rw_offset ) ) )
# plus once since ndarray is zero based , but real - world coordinates not
if distance <= 1 :
template [ idx ] = True
# we take now our ellipse part and flip it once along each dimension , concatenating it in each step
# the slicers are constructed to flip in each step the current dimension i . e . to behave like arr [ . . . , : : - 1 , . . . ]
for i in range ( template . ndim ) :
slicers = [ ( slice ( None , None , - 1 ) if i == j else slice ( None ) ) for j in range ( template . ndim ) ]
if 0 == int ( shape [ i ] ) % 2 : # even case
template = numpy . concatenate ( ( template [ slicers ] , template ) , i )
else : # odd case , in which an overlap has to be created
slicers_truncate = [ ( slice ( None , - 1 ) if i == j else slice ( None ) ) for j in range ( template . ndim ) ]
template = numpy . concatenate ( ( template [ slicers ] [ slicers_truncate ] , template ) , i )
return template
|
def get ( self ) :
"""Return a Deferred that fires with a SourceStamp instance ."""
|
d = self . getBaseRevision ( )
d . addCallback ( self . getPatch )
d . addCallback ( self . done )
return d
|
def VariantDir ( self , variant_dir , src_dir , duplicate = 1 ) :
"""Link the supplied variant directory to the source directory
for purposes of building files ."""
|
if not isinstance ( src_dir , SCons . Node . Node ) :
src_dir = self . Dir ( src_dir )
if not isinstance ( variant_dir , SCons . Node . Node ) :
variant_dir = self . Dir ( variant_dir )
if src_dir . is_under ( variant_dir ) :
raise SCons . Errors . UserError ( "Source directory cannot be under variant directory." )
if variant_dir . srcdir :
if variant_dir . srcdir == src_dir :
return
# We already did this .
raise SCons . Errors . UserError ( "'%s' already has a source directory: '%s'." % ( variant_dir , variant_dir . srcdir ) )
variant_dir . link ( src_dir , duplicate )
|
def geocode ( address ) :
'''Query function to obtain a latitude and longitude from a location
string such as ` Houston , TX ` or ` Colombia ` . This uses an online lookup ,
currently wrapping the ` geopy ` library , and providing an on - disk cache
of queries .
Parameters
address : str
Search string to retrieve the location , [ - ]
Returns
latitude : float
Latitude of address , [ degrees ]
longitude : float
Longitude of address , [ degrees ]
Notes
If a query has been retrieved before , this function will take under 1 ms ;
it takes several seconds otherwise .
Examples
> > > geocode ( ' Fredericton , NB ' )
(45.966425 , - 66.645813)'''
|
loc_tuple = None
try :
cache = geopy_cache ( )
loc_tuple = cache . cached_address ( address )
except : # Handle bugs in the cache , i . e . if there is no space on disk to create
# the database , by ignoring them
pass
if loc_tuple is not None :
return loc_tuple
else :
geocoder = geopy_geolocator ( )
if geocoder is None :
return geopy_missing_msg
location = geocoder . geocode ( address )
try :
cache . cache_address ( address , location . latitude , location . longitude )
except :
pass
return ( location . latitude , location . longitude )
|
def lengthen ( self ) :
'''Returns a new Vowel with its Length lengthened ,
and " : " appended to its IPA symbol .'''
|
vowel = deepcopy ( self )
if vowel [ Length ] == Length . short :
vowel [ Length ] = Length . long
elif vowel [ Length ] == Length . long :
vowel [ Length ] = Length . overlong
vowel . ipa += ':'
return vowel
|
def crop_3dimage ( img , beg_coords , end_coords ) :
"""Crops a 3d image to the bounding box specified ."""
|
cropped_img = img [ beg_coords [ 0 ] : end_coords [ 0 ] , beg_coords [ 1 ] : end_coords [ 1 ] , beg_coords [ 2 ] : end_coords [ 2 ] ]
return cropped_img
|
def report_accounts ( self , path , per_region = True , per_capita = False , pic_size = 1000 , format = 'rst' , ffname = None , ** kwargs ) :
"""Writes a report to the given path for the regional accounts
The report consists of a text file and a folder with the pics
( both names following parameter name )
Notes
This looks prettier with the seaborn module
( import seaborn before calling this method )
Parameters
path : pathlib . Path or string
Root path for the report
per _ region : boolean , optional
If true , reports the accounts per region
per _ capita : boolean , optional
If true , reports the accounts per capita
If per _ capita and per _ region are False , nothing will be done
pic _ size : int , optional
size for the figures in px , 1000 by default
format : string , optional
file format of the report :
' rst ' ( default ) , ' html ' , ' latex ' , . . .
except for rst all depend on the module docutils ( all writer _ name
from docutils can be used as format )
ffname : string , optional
root file name ( without extension , per _ capita or per _ region will be
attached ) and folder names If None gets passed ( default ) , self . name
with be modified to get a valid name for the operation system
without blanks
* * kwargs : key word arguments , optional
This will be passed directly to the pd . DataFrame . plot method
( through the self . plot _ account method )"""
|
if not per_region and not per_capita :
return
_plt = plt . isinteractive ( )
_rcParams = mpl . rcParams . copy ( )
rcParams = { 'figure.figsize' : ( 10 , 5 ) , 'figure.dpi' : 350 , 'axes.titlesize' : 20 , 'axes.labelsize' : 20 , }
plt . ioff ( )
if type ( path ) is str :
path = path . rstrip ( '\\' )
path = Path ( path )
path . mkdir ( parents = True , exist_ok = True )
if ffname is None :
valid_char = string . ascii_letters + string . digits + '_'
ffname = self . name . replace ( ' ' , '_' )
ffname = "" . join ( [ r for r in ffname if r in valid_char ] )
rep_spec = collections . namedtuple ( 'rep_spec' , [ 'make' , 'spec_string' , 'is_per_capita' ] )
reports_to_write = { 'per region accounts' : rep_spec ( per_region , '_per_region' , False ) , 'per capita accounts' : rep_spec ( per_capita , '_per_capita' , True ) }
logging . info ( 'Write report for {}' . format ( self . name ) )
fig_name_list = [ ]
for arep in reports_to_write :
if not reports_to_write [ arep ] . make :
continue
report_txt = [ ]
report_txt . append ( '###########' )
report_txt . append ( 'MRIO report' )
report_txt . append ( '###########' )
report_txt . append ( '\n' )
_ext = 'Extension: ' + self . name + ' - ' + str ( arep )
report_txt . append ( _ext )
report_txt . append ( '=' * len ( _ext ) )
report_txt . append ( '.. contents::\n\n' )
curr_ffname = ffname + reports_to_write [ arep ] . spec_string
subfolder = path / curr_ffname
subfolder . mkdir ( parents = True , exist_ok = True )
for row in self . get_rows ( ) :
name_row = ( str ( row ) . replace ( '(' , '' ) . replace ( ')' , '' ) . replace ( "'" , "" ) . replace ( ' ' , '_' ) . replace ( ', ' , '_' ) . replace ( '__' , '_' ) )
graph_name = ( self . name + ' - ' + str ( row ) . replace ( '(' , '' ) . replace ( ')' , '' ) . replace ( "'" , "" ) )
# get valid file name
def clean ( varStr ) :
return re . sub ( '\W|^(?=\d)' , '_' , varStr )
file_name = ( clean ( name_row + reports_to_write [ arep ] . spec_string ) )
# possibility of still having _ _ in there :
file_name = re . sub ( '_+' , '_' , file_name )
# restrict file length
file_name = file_name [ : 50 ]
def file_name_nr ( a , c ) :
return a + '_' + str ( c )
_loopco = 0
while file_name_nr ( file_name , _loopco ) in fig_name_list :
_loopco += 1
file_name = file_name_nr ( file_name , _loopco )
fig_name_list . append ( file_name )
file_name = file_name + '.png'
file_name = subfolder / file_name
file_name_rel = file_name . relative_to ( path )
self . plot_account ( row , file_name = file_name , per_capita = reports_to_write [ arep ] . is_per_capita , ** kwargs )
plt . close ( )
report_txt . append ( graph_name )
report_txt . append ( '-' * len ( graph_name ) + '\n\n' )
report_txt . append ( '.. image:: ' + file_name_rel )
report_txt . append ( ' :width: {} \n' . format ( int ( pic_size ) ) )
# write report file and convert to given format
report_txt . append ( '\nReport written on ' + time . strftime ( "%Y%m%d %H%M%S" ) )
fin_txt = '\n' . join ( report_txt )
if format is not 'rst' :
try :
import docutils . core as dc
if format == 'tex' :
format == 'latex'
fin_txt = dc . publish_string ( fin_txt , writer_name = format , settings_overrides = { 'output_encoding' : 'unicode' } )
except :
logging . warn ( 'Module docutils not available - write rst instead' )
format = 'rst'
format_str = { 'latex' : 'tex' , 'tex' : 'tex' , 'rst' : 'txt' , 'txt' : 'txt' , 'html' : 'html' }
_repfile = curr_ffname + '.' + format_str . get ( format , str ( format ) )
with open ( path / _repfile , 'w' ) as out_file :
out_file . write ( fin_txt )
logging . info ( 'Report for {what} written to {file_where}' . format ( what = arep , file_where = str ( _repfile ) ) )
# restore plot status
mpl . rcParams . update ( _rcParams )
if _plt :
plt . ion ( )
|
def create_from_hdulist ( cls , hdulist , ** kwargs ) :
"""Creates and returns an HpxMap object from a FITS HDUList
extname : The name of the HDU with the map data
ebounds : The name of the HDU with the energy bin data"""
|
extname = kwargs . get ( 'hdu' , hdulist [ 1 ] . name )
ebins = fits_utils . find_and_read_ebins ( hdulist )
return cls . create_from_hdu ( hdulist [ extname ] , ebins )
|
def get_system_uptime_output_show_system_uptime_rbridge_id ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
get_system_uptime = ET . Element ( "get_system_uptime" )
config = get_system_uptime
output = ET . SubElement ( get_system_uptime , "output" )
show_system_uptime = ET . SubElement ( output , "show-system-uptime" )
rbridge_id = ET . SubElement ( show_system_uptime , "rbridge-id" )
rbridge_id . text = kwargs . pop ( 'rbridge_id' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def create_connection ( self , from_obj , to_obj ) :
"""Creates and returns a connection between the given objects . If a
connection already exists , that connection will be returned instead ."""
|
self . _validate_ctypes ( from_obj , to_obj )
return Connection . objects . get_or_create ( relationship_name = self . name , from_pk = from_obj . pk , to_pk = to_obj . pk ) [ 0 ]
|
def plot_map ( x , y , z , ax = None , cmap = None , ncontours = 100 , vmin = None , vmax = None , levels = None , cbar = True , cax = None , cbar_label = None , cbar_orientation = 'vertical' , norm = None , ** kwargs ) :
"""Plot a two - dimensional map from data on a grid .
Parameters
x : ndarray ( T )
Binned x - coordinates .
y : ndarray ( T )
Binned y - coordinates .
z : ndarray ( T )
Binned z - coordinates .
ax : matplotlib . Axes object , optional , default = None
The ax to plot to ; if ax = None , a new ax ( and fig ) is created .
cmap : matplotlib colormap , optional , default = None
The color map to use .
ncontours : int , optional , default = 100
Number of contour levels .
vmin : float , optional , default = None
Lowest z - value to be plotted .
vmax : float , optional , default = None
Highest z - value to be plotted .
levels : iterable of float , optional , default = None
Contour levels to plot .
cbar : boolean , optional , default = True
Plot a color bar .
cax : matplotlib . Axes object , optional , default = None
Plot the colorbar into a custom axes object instead of
stealing space from ax .
cbar _ label : str , optional , default = None
Colorbar label string ; use None to suppress it .
cbar _ orientation : str , optional , default = ' vertical '
Colorbar orientation ; choose ' vertical ' or ' horizontal ' .
norm : matplotlib norm , optional , default = None
Use a norm when coloring the contour plot .
Optional parameters for contourf ( * * kwargs )
corner _ mask : boolean , optional
Enable / disable corner masking , which only has an effect if
z is a masked array . If False , any quad touching a masked
point is masked out . If True , only the triangular corners
of quads nearest those points are always masked out , other
triangular corners comprising three unmasked points are
contoured as usual .
Defaults to rcParams [ ' contour . corner _ mask ' ] , which
defaults to True .
alpha : float
The alpha blending value .
locator : [ None | ticker . Locator subclass ]
If locator is None , the default MaxNLocator is used . The
locator is used to determine the contour levels if they are
not given explicitly via the levels argument .
extend : [ ‘ neither ’ | ‘ both ’ | ‘ min ’ | ‘ max ’ ]
Unless this is ‘ neither ’ , contour levels are automatically
added to one or both ends of the range so that all data are
included . These added ranges are then mapped to the special
colormap values which default to the ends of the
colormap range , but can be set via
matplotlib . colors . Colormap . set _ under ( ) and
matplotlib . colors . Colormap . set _ over ( ) methods .
xunits , yunits : [ None | registered units ]
Override axis units by specifying an instance of a
matplotlib . units . ConversionInterface .
antialiased : boolean , optional
Enable antialiasing , overriding the defaults . For filled
contours , the default is True . For line contours , it is
taken from rcParams [ ‘ lines . antialiased ’ ] .
nchunk : [ 0 | integer ]
If 0 , no subdivision of the domain . Specify a positive
integer to divide the domain into subdomains of nchunk by
nchunk quads . Chunking reduces the maximum length of polygons
generated by the contouring algorithm which reduces the
rendering workload passed on to the backend and also requires
slightly less RAM . It can however introduce rendering
artifacts at chunk boundaries depending on the backend , the
antialiased flag and value of alpha .
hatches :
A list of cross hatch patterns to use on the filled areas .
If None , no hatching will be added to the contour . Hatching
is supported in the PostScript , PDF , SVG and Agg backends
only .
zorder : float
Set the zorder for the artist . Artists with lower zorder
values are drawn first .
Returns
fig : matplotlib . Figure object
The figure in which the used ax resides .
ax : matplotlib . Axes object
The ax in which the map was plotted .
misc : dict
Contains a matplotlib . contour . QuadContourSet ' mappable ' and ,
if requested , a matplotlib . Colorbar object ' cbar ' ."""
|
import matplotlib . pyplot as _plt
if ax is None :
fig , ax = _plt . subplots ( )
else :
fig = ax . get_figure ( )
mappable = ax . contourf ( x , y , z , ncontours , norm = norm , vmin = vmin , vmax = vmax , cmap = cmap , levels = levels , ** _prune_kwargs ( kwargs ) )
misc = dict ( mappable = mappable )
if cbar_orientation not in ( 'horizontal' , 'vertical' ) :
raise ValueError ( 'cbar_orientation must be "horizontal" or "vertical"' )
if cbar :
if cax is None :
cbar_ = fig . colorbar ( mappable , ax = ax , orientation = cbar_orientation )
else :
cbar_ = fig . colorbar ( mappable , cax = cax , orientation = cbar_orientation )
if cbar_label is not None :
cbar_ . set_label ( cbar_label )
misc . update ( cbar = cbar_ )
return fig , ax , misc
|
def convert_pdf_to_txt ( pdf , startpage = None ) :
"""Convert a pdf file to text and return the text .
This method requires pdftotext to be installed .
Parameters
pdf : str
path to pdf file
startpage : int , optional
the first page we try to convert
Returns
str
the converted text"""
|
if startpage is not None :
startpageargs = [ '-f' , str ( startpage ) ]
else :
startpageargs = [ ]
stdout = subprocess . Popen ( [ "pdftotext" , "-q" ] + startpageargs + [ pdf , "-" ] , stdout = subprocess . PIPE ) . communicate ( ) [ 0 ]
# python2 and 3
if not isinstance ( stdout , str ) :
stdout = stdout . decode ( )
return stdout
|
def get_function ( self ) :
"""Return function object for my function .
raise ProcessorConfigurationError when function could not be resolved ."""
|
if not hasattr ( self , '_function' ) :
try :
modname , funcname = self . function . rsplit ( '.' , 1 )
mod = import_module ( modname )
self . _function = getattr ( mod , funcname )
except ( ImportError , AttributeError , ValueError ) , err :
raise ProcessorConfigurationError ( err )
return self . _function
|
def bigram ( self , items ) :
"""generate bigrams of either items = " tokens " or " stems " """
|
def bigram_join ( tok_list ) :
text = nltk . bigrams ( tok_list )
return list ( map ( lambda x : x [ 0 ] + '.' + x [ 1 ] , text ) )
if items == "tokens" :
self . bigrams = list ( map ( bigram_join , self . tokens ) )
elif items == "stems" :
self . bigrams = list ( map ( bigram_join , self . stems ) )
else :
raise ValueError ( "Items must be either \'tokens\' or \'stems\'." )
|
def distance ( self , xyz = ( 0.00 , 0.00 , 0.00 ) , records = ( 'ATOM' , 'HETATM' ) ) :
"""Computes Euclidean distance between atoms and a 3D point .
Parameters
xyz : tuple , default : ( 0.00 , 0.00 , 0.00)
X , Y , and Z coordinate of the reference center for the distance
computation .
records : iterable , default : ( ' ATOM ' , ' HETATM ' )
Specify which record sections to consider . For example , to consider
both protein and ligand atoms , set ` records = ( ' ATOM ' , ' HETATM ' ) ` .
This setting is ignored if ` df ` is not set to None .
For downward compatibility , a string argument is still supported
but deprecated and will be removed in future versions .
Returns
pandas . Series : Pandas Series object containing the Euclidean
distance between the atoms in the record section and ` xyz ` ."""
|
if isinstance ( records , str ) :
warnings . warn ( 'Using a string as `records` argument is ' 'deprecated and will not be supported in future' ' versions. Please use a tuple or' ' other iterable instead' , DeprecationWarning )
records = ( records , )
df = pd . concat ( objs = [ self . df [ i ] for i in records ] )
return np . sqrt ( np . sum ( df [ [ 'x_coord' , 'y_coord' , 'z_coord' ] ] . subtract ( xyz , axis = 1 ) ** 2 , axis = 1 ) )
|
def get_repository_hierarchy_design_session ( self ) :
"""Gets the repository hierarchy design session .
return : ( osid . repository . RepositoryHierarchyDesignSession ) - a
` ` RepostoryHierarchyDesignSession ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented -
` ` supports _ repository _ hierarchy _ design ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ repository _ hierarchy _ design ( ) ` ` is ` ` true ` ` . *"""
|
if not self . supports_repository_hierarchy_design ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . RepositoryHierarchyDesignSession ( runtime = self . _runtime )
|
def setEditorData ( self , editor , index ) :
"""Provides the widget with data to manipulate .
Calls the setEditorValue of the config tree item at the index .
: type editor : QWidget
: type index : QModelIndex
Reimplemented from QStyledItemDelegate ."""
|
# We take the config value via the model to be consistent with setModelData
data = index . model ( ) . data ( index , Qt . EditRole )
editor . setData ( data )
|
def parseFile ( self , fil ) :
"""Opens a file and parses it"""
|
f = open ( fil )
self . parseStr ( f . read ( ) )
f . close ( )
|
def dumpgrants ( destination , as_json = None , setspec = None ) :
"""Harvest grants from OpenAIRE and store them locally ."""
|
if os . path . isfile ( destination ) :
click . confirm ( "Database '{0}' already exists." "Do you want to write to it?" . format ( destination ) , abort = True )
# no cover
dumper = OAIREDumper ( destination , setspec = setspec )
dumper . dump ( as_json = as_json )
|
def parse_environment_file_list ( names , world_size = ( 60 , 60 ) ) :
"""Extract information about spatial resources from all environment files in
a list .
Arguments :
names - a list of strings representing the paths to the environment files .
world _ size - a tuple representing the x and y coordinates of the world .
( default : 60x60)
Returns a dictionary in which the keys are filenames and the values are
list of lists of sets indicating the set of resources
available at each x , y location in the Avida grid for that environment ."""
|
# Convert single file to list if necessary
try :
names [ 0 ] = names [ 0 ]
except :
names = [ names ]
envs = [ ]
for name in names :
envs . append ( parse_environment_file ( name , world_size ) )
return envs
|
def Copy ( self , name = None ) :
"""Returns a copy of this Cdf .
Args :
name : string name for the new Cdf"""
|
if name is None :
name = self . name
return Cdf ( list ( self . xs ) , list ( self . ps ) , name )
|
def contributors ( lancet , output ) :
"""List all contributors visible in the git history ."""
|
sorting = pygit2 . GIT_SORT_TIME | pygit2 . GIT_SORT_REVERSE
commits = lancet . repo . walk ( lancet . repo . head . target , sorting )
contributors = ( ( c . author . name , c . author . email ) for c in commits )
contributors = OrderedDict ( contributors )
template_content = content_from_path ( lancet . config . get ( 'packaging' , 'contributors_template' ) )
template = Template ( template_content )
output . write ( template . render ( contributors = contributors ) . encode ( 'utf-8' ) )
|
def touch_import ( package , name , node ) :
"""Works like ` does _ tree _ import ` but adds an import statement
if it was not imported ."""
|
def is_import_stmt ( node ) :
return ( node . type == syms . simple_stmt and node . children and is_import ( node . children [ 0 ] ) )
root = find_root ( node )
if does_tree_import ( package , name , root ) :
return
# figure out where to insert the new import . First try to find
# the first import and then skip to the last one .
insert_pos = offset = 0
for idx , node in enumerate ( root . children ) :
if not is_import_stmt ( node ) :
continue
for offset , node2 in enumerate ( root . children [ idx : ] ) :
if not is_import_stmt ( node2 ) :
break
insert_pos = idx + offset
break
# if there are no imports where we can insert , find the docstring .
# if that also fails , we stick to the beginning of the file
if insert_pos == 0 :
for idx , node in enumerate ( root . children ) :
if ( node . type == syms . simple_stmt and node . children and node . children [ 0 ] . type == token . STRING ) :
insert_pos = idx + 1
break
if package is None :
import_ = Node ( syms . import_name , [ Leaf ( token . NAME , u"import" ) , Leaf ( token . NAME , name , prefix = u" " ) ] )
else :
import_ = FromImport ( package , [ Leaf ( token . NAME , name , prefix = u" " ) ] )
children = [ import_ , Newline ( ) ]
root . insert_child ( insert_pos , Node ( syms . simple_stmt , children ) )
|
async def on_message ( message ) :
"""The on _ message event handler for this module
Args :
message ( discord . Message ) : Input message"""
|
# Simplify message info
server = message . server
author = message . author
channel = message . channel
content = message . content
data = datatools . get_data ( )
if not data [ "discord" ] [ "servers" ] [ server . id ] [ _data . modulename ] [ "activated" ] :
return
# Only reply to server messages and don ' t reply to myself
if server is not None and author != channel . server . me : # Commands section
prefix = data [ "discord" ] [ "servers" ] [ server . id ] [ "prefix" ]
if content . startswith ( prefix ) : # Parse message
package = content . split ( " " )
command = package [ 0 ] [ len ( prefix ) : ]
# Commands
if command == 'gamedeals' :
await client . send_typing ( channel )
# Get posts from Reddit API
posts = api_reddit . get_top10 ( )
if posts :
for post in posts : # Create embed UI
embed = ui_embed . success ( channel , post )
await embed . send ( )
else :
embed = ui_embed . no_results ( channel )
await embed . send ( )
|
def random_draw ( self , size = None ) :
"""Draw random samples of the hyperparameters .
Parameters
size : None , int or array - like , optional
The number / shape of samples to draw . If None , only one sample is
returned . Default is None ."""
|
return scipy . asarray ( [ scipy . stats . lognorm . rvs ( s , loc = 0 , scale = em , size = size ) for s , em in zip ( self . sigma , self . emu ) ] )
|
def evaluate ( self , m ) :
"""Search for comments ."""
|
g = m . groupdict ( )
if g [ "strings" ] :
self . evaluate_strings ( g )
self . line_num += g [ 'strings' ] . count ( '\n' )
elif g [ "code" ] :
self . line_num += g [ "code" ] . count ( '\n' )
else :
if g [ 'block' ] :
self . evaluate_block ( g )
elif g [ 'start' ] is None :
self . evaluate_inline_tail ( g )
else :
self . evaluate_inline ( g )
self . line_num += g [ 'comments' ] . count ( '\n' )
|
def _load_state ( self , context ) :
"""Load state from cookie to the context
: type context : satosa . context . Context
: param context : Session context"""
|
try :
state = cookie_to_state ( context . cookie , self . config [ "COOKIE_STATE_NAME" ] , self . config [ "STATE_ENCRYPTION_KEY" ] )
except SATOSAStateError as e :
msg_tmpl = 'Failed to decrypt state {state} with {error}'
msg = msg_tmpl . format ( state = context . cookie , error = str ( e ) )
satosa_logging ( logger , logging . WARNING , msg , None )
state = State ( )
finally :
context . state = state
|
def future ( self , rev = None ) :
"""Return a Mapping of items after the given revision .
Default revision is the last one looked up ."""
|
if rev is not None :
self . seek ( rev )
return WindowDictFutureView ( self . _future )
|
def get_items_by_ids ( self , item_ids , item_type = None ) :
"""Given a list of item ids , return all the Item objects
Args :
item _ ids ( obj ) : List of item IDs to query
item _ type ( str ) : ( optional ) Item type to filter results with
Returns :
List of ` Item ` objects for given item IDs and given item type"""
|
urls = [ urljoin ( self . item_url , f"{i}.json" ) for i in item_ids ]
result = self . _run_async ( urls = urls )
items = [ Item ( r ) for r in result if r ]
if item_type :
return [ item for item in items if item . item_type == item_type ]
else :
return items
|
def __sendCommand ( self , cmd ) :
"""send specific command to reference unit over serial port
Args :
cmd : OpenThread _ WpanCtl command string
Returns :
Fail : Failed to send the command to reference unit and parse it
Value : successfully retrieve the desired value from reference unit
Error : some errors occur , indicates by the followed specific error number"""
|
logging . info ( '%s: sendCommand[%s]' , self . port , cmd )
if self . logThreadStatus == self . logStatus [ 'running' ] :
self . logThreadStatus = self . logStatus [ 'pauseReq' ]
while self . logThreadStatus != self . logStatus [ 'paused' ] and self . logThreadStatus != self . logStatus [ 'stop' ] :
pass
ssh_stdin = None
ssh_stdout = None
ssh_stderr = None
try : # command retransmit times
retry_times = 3
while retry_times > 0 :
retry_times -= 1
try :
if self . _is_net :
ssh_stdin , ssh_stdout , ssh_stderr = self . handle . exec_command ( cmd )
else :
self . _sendline ( cmd )
self . _expect ( cmd )
except Exception as e :
logging . exception ( '%s: failed to send command[%s]: %s' , self . port , cmd , str ( e ) )
if retry_times == 0 :
raise
else :
break
line = None
response = [ ]
retry_times = 20
stdout_lines = [ ]
stderr_lines = [ ]
if self . _is_net :
stdout_lines = ssh_stdout . readlines ( )
stderr_lines = ssh_stderr . readlines ( )
if stderr_lines :
for stderr_line in stderr_lines :
if re . search ( r'Not\s+Found|failed\s+with\s+error' , stderr_line . strip ( ) , re . M | re . I ) :
print "Command failed:" + stderr_line
return 'Fail'
print "Got line: " + stderr_line
logging . info ( '%s: the read line is[%s]' , self . port , stderr_line )
response . append ( str ( stderr_line . strip ( ) ) )
elif stdout_lines :
for stdout_line in stdout_lines :
logging . info ( '%s: the read line is[%s]' , self . port , stdout_line )
if re . search ( r'Not\s+Found|failed\s+with\s+error' , stdout_line . strip ( ) , re . M | re . I ) :
print "Command failed"
return 'Fail'
print "Got line: " + stdout_line
logging . info ( '%s: send command[%s] done!' , self . port , cmd )
response . append ( str ( stdout_line . strip ( ) ) )
response . append ( WPAN_CARRIER_PROMPT )
return response
else :
while retry_times > 0 :
line = self . _readline ( )
print "read line: %s" % line
logging . info ( '%s: the read line is[%s]' , self . port , line )
if line :
response . append ( line )
if re . match ( WPAN_CARRIER_PROMPT , line ) :
break
elif re . search ( r'Not\s+Found|failed\s+with\s+error' , line , re . M | re . I ) :
print "Command failed"
return 'Fail'
retry_times -= 1
time . sleep ( 0.1 )
if retry_times == 0 :
raise Exception ( '%s: failed to find end of response' % self . port )
logging . info ( '%s: send command[%s] done!' , self . port , cmd )
return response
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( 'sendCommand() Error: ' + str ( e ) )
raise
|
def close ( self ) :
"""Closes connection to the server ."""
|
if self . _protocol is not None :
self . _protocol . processor . close ( )
del self . _protocol
|
def _get_current_userprofile ( ) :
"""Get current user profile .
. . note : : If the user is anonymous , then a
: class : ` invenio _ userprofiles . models . AnonymousUserProfile ` instance is
returned .
: returns : The : class : ` invenio _ userprofiles . models . UserProfile ` instance ."""
|
if current_user . is_anonymous :
return AnonymousUserProfile ( )
profile = g . get ( 'userprofile' , UserProfile . get_by_userid ( current_user . get_id ( ) ) )
if profile is None :
profile = UserProfile ( user_id = int ( current_user . get_id ( ) ) )
g . userprofile = profile
return profile
|
def list_tables ( self , like = None , database = None ) :
"""List tables in the current ( or indicated ) database . Like the SHOW
TABLES command in the impala - shell .
Parameters
like : string , default None
e . g . ' foo * ' to match all tables starting with ' foo '
database : string , default None
If not passed , uses the current / default database
Returns
tables : list of strings"""
|
statement = 'SHOW TABLES'
if database :
statement += ' IN {0}' . format ( database )
if like :
m = ddl . fully_qualified_re . match ( like )
if m :
database , quoted , unquoted = m . groups ( )
like = quoted or unquoted
return self . list_tables ( like = like , database = database )
statement += " LIKE '{0}'" . format ( like )
with self . _execute ( statement , results = True ) as cur :
result = self . _get_list ( cur )
return result
|
def get_percentage_volume_change ( self ) :
"""Returns the percentage volume change .
Returns :
Volume change in percentage , e . g . , 0.055 implies a 5.5 % increase ."""
|
initial_vol = self . initial . lattice . volume
final_vol = self . final . lattice . volume
return final_vol / initial_vol - 1
|
def get_photo_url ( photo_id ) :
"""Request the photo download url with the photo id
: param photo _ id : The photo id of flickr
: type photo _ id : str
: return : Photo download url
: rtype : str"""
|
args = _get_request_args ( 'flickr.photos.getSizes' , photo_id = photo_id )
resp = requests . post ( API_URL , data = args )
resp_json = json . loads ( resp . text . encode ( 'utf-8' ) )
logger . debug ( json . dumps ( resp_json , indent = 2 ) )
size_list = resp_json [ 'sizes' ] [ 'size' ]
size_list_len = len ( size_list )
global image_size_mode
image_size_mode = size_list_len if size_list_len < image_size_mode else image_size_mode
download_url = resp_json [ 'sizes' ] [ 'size' ] [ - image_size_mode ] [ 'source' ]
return download_url
|
def replace ( key , value , host = DEFAULT_HOST , port = DEFAULT_PORT , time = DEFAULT_TIME , min_compress_len = DEFAULT_MIN_COMPRESS_LEN ) :
'''Replace a key on the memcached server . This only succeeds if the key
already exists . This is the opposite of : mod : ` memcached . add
< salt . modules . memcached . add > `
CLI Example :
. . code - block : : bash
salt ' * ' memcached . replace < key > < value >'''
|
if not isinstance ( time , six . integer_types ) :
raise SaltInvocationError ( '\'time\' must be an integer' )
if not isinstance ( min_compress_len , six . integer_types ) :
raise SaltInvocationError ( '\'min_compress_len\' must be an integer' )
conn = _connect ( host , port )
stats = conn . get_stats ( )
return conn . replace ( key , value , time = time , min_compress_len = min_compress_len )
|
def read ( self , num_bytes ) :
"""Read ` num _ bytes ` from the compressed data chunks .
Data is returned as ` bytes ` of length ` num _ bytes `
Will raise an EOFError if data is unavailable .
Note : Will always return ` num _ bytes ` of data ( unlike the file read method ) ."""
|
while len ( self . decoded ) < num_bytes :
try :
tag , data = next ( self . chunks )
except StopIteration :
raise EOFError ( )
if tag != b'IDAT' :
continue
self . decoded += self . decompressor . decompress ( data )
r = self . decoded [ : num_bytes ]
self . decoded = self . decoded [ num_bytes : ]
return r
|
def get_sequence_str ( self ) :
''': return : string representation of the sequence'''
|
sequence = self . get_sequence ( )
return '->' . join ( e . dst . name for e in sequence )
|
def _val_to_store_info ( self , val , min_compress_len ) :
"""Transform val to a storable representation , returning a tuple of the flags , the length of the new value , and the new value itself ."""
|
flags = 0
if isinstance ( val , str ) :
pass
elif isinstance ( val , int ) :
flags |= Client . _FLAG_INTEGER
val = "%d" % val
# force no attempt to compress this silly string .
min_compress_len = 0
elif isinstance ( val , long ) :
flags |= Client . _FLAG_LONG
val = "%d" % val
# force no attempt to compress this silly string .
min_compress_len = 0
else :
flags |= Client . _FLAG_PICKLE
file = StringIO ( )
if self . picklerIsKeyword :
pickler = self . pickler ( file , protocol = self . pickleProtocol )
else :
pickler = self . pickler ( file , self . pickleProtocol )
if self . persistent_id :
pickler . persistent_id = self . persistent_id
pickler . dump ( val )
val = file . getvalue ( )
lv = len ( val )
# We should try to compress if min _ compress _ len > 0 and we could
# import zlib and this string is longer than our min threshold .
if min_compress_len and _supports_compress and lv > min_compress_len :
comp_val = compress ( val )
# Only retain the result if the compression result is smaller
# than the original .
if len ( comp_val ) < lv :
flags |= Client . _FLAG_COMPRESSED
val = comp_val
# silently do not store if value length exceeds maximum
if self . server_max_value_length != 0 and len ( val ) > self . server_max_value_length :
return ( 0 )
return ( flags , len ( val ) , val )
|
def _normalize ( self , metric_name , submit_method , prefix ) :
"""Replace case - sensitive metric name characters , normalize the metric name ,
prefix and suffix according to its type ."""
|
metric_prefix = "mongodb." if not prefix else "mongodb.{0}." . format ( prefix )
metric_suffix = "ps" if submit_method == RATE else ""
# Replace case - sensitive metric name characters
for pattern , repl in iteritems ( self . CASE_SENSITIVE_METRIC_NAME_SUFFIXES ) :
metric_name = re . compile ( pattern ) . sub ( repl , metric_name )
# Normalize , and wrap
return u"{metric_prefix}{normalized_metric_name}{metric_suffix}" . format ( normalized_metric_name = self . normalize ( metric_name . lower ( ) ) , metric_prefix = metric_prefix , metric_suffix = metric_suffix , )
|
def interp_w_v1 ( self ) :
"""Calculate the actual water stage based on linear interpolation .
Required control parameters :
| llake _ control . V |
| llake _ control . W |
Required state sequence :
| llake _ states . V |
Calculated state sequence :
| llake _ states . W |
Examples :
Prepare a model object :
> > > from hydpy . models . llake import *
> > > parameterstep ( ' 1d ' )
> > > simulationstep ( ' 12h ' )
For the sake of brevity , define a test function :
> > > def test ( * vs ) :
. . . for v in vs :
. . . states . v . new = v
. . . model . interp _ w _ v1 ( )
. . . print ( repr ( states . v ) , repr ( states . w ) )
Define a simple ` w ` - ` v ` relationship consisting of three nodes and
calculate the water stages for different volumes :
> > > n ( 3)
> > > v ( 0 . , 2 . , 4 . )
> > > w ( - 1 . , 1 . , 2 . )
Perform the interpolation for a few test points :
> > > test ( 0 . , . 5 , 2 . , 3 . , 4 . , 5 . )
v ( 0.0 ) w ( - 1.0)
v ( 0.5 ) w ( - 0.5)
v ( 2.0 ) w ( 1.0)
v ( 3.0 ) w ( 1.5)
v ( 4.0 ) w ( 2.0)
v ( 5.0 ) w ( 2.5)
The reference water stage of the relationship can be selected
arbitrarily . Even negative water stages are returned , as is
demonstrated by the first two calculations . For volumes outside
the range of the ( ` v ` , ` w ` ) pairs , the outer two highest pairs are
used for linear extrapolation ."""
|
con = self . parameters . control . fastaccess
new = self . sequences . states . fastaccess_new
for jdx in range ( 1 , con . n ) :
if con . v [ jdx ] >= new . v :
break
new . w = ( ( new . v - con . v [ jdx - 1 ] ) * ( con . w [ jdx ] - con . w [ jdx - 1 ] ) / ( con . v [ jdx ] - con . v [ jdx - 1 ] ) + con . w [ jdx - 1 ] )
|
def qteInsertKey ( self , keysequence : QtmacsKeysequence , macroName : str ) :
"""Insert a new key into the key map and associate it with a
macro .
If the key sequence is already associated with a macro then it
will be overwritten .
| Args |
* ` ` keysequence ` ` ( * * QtmacsKeysequence * * ) : associate a macro with
a key sequence in this key map .
* ` ` macroName ` ` ( * * str * * ) : macro name .
| Returns |
* * None * *
| Raises |
* * * QtmacsArgumentError * * if at least one argument has an invalid type ."""
|
# Get a dedicated reference to self to facilitate traversing
# through the key map .
keyMap = self
# Get the key sequence as a list of tuples , where each tuple
# contains the the control modifier and the key code , and both
# are specified as Qt constants .
keysequence = keysequence . toQtKeylist ( )
# Traverse the shortcut sequence and generate new keys as
# necessary .
for key in keysequence [ : - 1 ] : # If the key does not yet exist add an empty dictionary
# ( it will be filled later ) .
if key not in keyMap :
keyMap [ key ] = { }
# Similarly , if the key does exist but references anything
# other than a dictionary ( eg . a previously installed
# ` ` QtmacdMacro ` ` instance ) , then delete it .
if not isinstance ( keyMap [ key ] , dict ) :
keyMap [ key ] = { }
# Go one level down in the key - map tree .
keyMap = keyMap [ key ]
# Assign the new macro object associated with this key .
keyMap [ keysequence [ - 1 ] ] = macroName
|
def basis_state ( i , n ) :
"""` ` n x 1 ` ` ` sympy . Matrix ` representing the ` i ` ' th eigenstate of an
` n ` - dimensional Hilbert space ( ` i ` > = 0)"""
|
v = sympy . zeros ( n , 1 )
v [ i ] = 1
return v
|
def notify ( title , message , api_key = NTFY_API_KEY , provider_key = None , priority = 0 , url = None , retcode = None ) :
"""Optional parameters :
* ` ` api _ key ` ` - use your own application token
* ` ` provider _ key ` ` - if you are whitelisted
* ` ` priority ` `
* ` ` url ` `"""
|
data = { 'apikey' : api_key , 'application' : 'ntfy' , 'event' : title , 'description' : message , }
if MIN_PRIORITY <= priority <= MAX_PRIORITY :
data [ 'priority' ] = priority
else :
raise ValueError ( 'priority must be an integer from {:d} to {:d}' . format ( MIN_PRIORITY , MAX_PRIORITY ) )
if url is not None :
data [ 'url' ] = url
if provider_key is not None :
data [ 'providerkey' ] = provider_key
resp = requests . post ( API_URL , data = data , headers = { 'User-Agent' : USER_AGENT , } )
resp . raise_for_status ( )
|
def get_warcinfo ( self ) :
'''Returns WARCINFO record from the archieve as a single string including
WARC header . Expects the record to be in the beginning of the archieve ,
otherwise it will be not found .'''
|
if self . searched_for_warcinfo :
return self . warcinfo
prev_line = None
in_warcinfo_record = False
self . searched_for_warcinfo = True
for line in self . file_object :
if not in_warcinfo_record :
if line [ : 11 ] == b'WARC-Type: ' :
if line [ : 19 ] == b'WARC-Type: warcinfo' :
in_warcinfo_record = True
warcinfo_lines = [ prev_line , line ]
else :
self . warcinfo = None
break
else :
if line [ 0 : 1 ] == self . w_letter and self . warc_header_re . match ( line ) :
self . warcinfo = b'' . join ( warcinfo_lines )
break
warcinfo_lines . append ( line )
prev_line = line
self . file_object . seek ( 0 )
return self . warcinfo
|
def get_operator ( self , operator ) :
"""| coro |
Checks the players stats for this operator , only loading them if they haven ' t already been found
Parameters
operator : str
the name of the operator
Returns
: class : ` Operator `
the operator object found"""
|
if operator in self . operators :
return self . operators [ operator ]
result = yield from self . load_operator ( operator )
return result
|
def recurse ( self , value , max_depth = 6 , _depth = 0 , ** kwargs ) :
"""Given ` ` value ` ` , recurse ( using the parent serializer ) to handle
coercing of newly defined values ."""
|
string_max_length = kwargs . get ( 'string_max_length' , None )
_depth += 1
if _depth >= max_depth :
try :
value = text_type ( repr ( value ) ) [ : string_max_length ]
except Exception as e :
import traceback
traceback . print_exc ( )
self . manager . logger . exception ( e )
return text_type ( type ( value ) )
return self . manager . transform ( value , max_depth = max_depth , _depth = _depth , ** kwargs )
|
def q ( line , cell = None , _ns = None ) :
"""Run q code .
Options :
- l ( dir | script ) - pre - load database or script
- h host : port - execute on the given host
- o var - send output to a variable named var .
- i var1 , . . , varN - input variables
-1 / - 2 - redirect stdout / stderr"""
|
if cell is None :
return pyq . q ( line )
if _ns is None :
_ns = vars ( sys . modules [ '__main__' ] )
input = output = None
preload = [ ]
outs = { }
try :
h = pyq . q ( '0i' )
if line :
for opt , value in getopt ( line . split ( ) , "h:l:o:i:12" ) [ 0 ] :
if opt == '-l' :
preload . append ( value )
elif opt == '-h' :
h = pyq . K ( str ( ':' + value ) )
elif opt == '-o' :
output = str ( value )
# ( see # 673)
elif opt == '-i' :
input = str ( value ) . split ( ',' )
elif opt in ( '-1' , '-2' ) :
outs [ int ( opt [ 1 ] ) ] = None
if outs :
if int ( h ) != 0 :
raise ValueError ( "Cannot redirect remote std stream" )
for fd in outs :
tmpfd , tmpfile = mkstemp ( )
try :
pyq . q ( r'\%d %s' % ( fd , tmpfile ) )
finally :
os . unlink ( tmpfile )
os . close ( tmpfd )
r = None
for script in preload :
h ( pyq . kp ( r"\l " + script ) )
if input is not None :
for chunk in logical_lines ( cell ) :
func = "{[%s]%s}" % ( ';' . join ( input ) , chunk )
args = tuple ( _ns [ i ] for i in input )
if r != Q_NONE :
r . show ( )
r = h ( ( pyq . kp ( func ) , ) + args )
if outs :
_forward_outputs ( outs )
else :
for chunk in logical_lines ( cell ) :
if r != Q_NONE :
r . show ( )
r = h ( pyq . kp ( chunk ) )
if outs :
_forward_outputs ( outs )
except pyq . kerr as e :
print ( "'%s" % e )
else :
if output is not None :
if output . startswith ( 'q.' ) :
pyq . q ( '@[`.;;:;]' , output [ 2 : ] , r )
else :
_ns [ output ] = r
else :
if r != Q_NONE :
return r
|
def discover_engines ( self , executor = None ) :
"""Discover configured engines .
: param executor : Optional executor module override"""
|
if executor is None :
executor = getattr ( settings , 'FLOW_EXECUTOR' , { } ) . get ( 'NAME' , 'resolwe.flow.executors.local' )
self . executor = self . load_executor ( executor )
logger . info ( __ ( "Loaded '{}' executor." , str ( self . executor . __class__ . __module__ ) . replace ( '.prepare' , '' ) ) )
expression_engines = getattr ( settings , 'FLOW_EXPRESSION_ENGINES' , [ 'resolwe.flow.expression_engines.jinja' ] )
self . expression_engines = self . load_expression_engines ( expression_engines )
logger . info ( __ ( "Found {} expression engines: {}" , len ( self . expression_engines ) , ', ' . join ( self . expression_engines . keys ( ) ) ) )
execution_engines = getattr ( settings , 'FLOW_EXECUTION_ENGINES' , [ 'resolwe.flow.execution_engines.bash' ] )
self . execution_engines = self . load_execution_engines ( execution_engines )
logger . info ( __ ( "Found {} execution engines: {}" , len ( self . execution_engines ) , ', ' . join ( self . execution_engines . keys ( ) ) ) )
|
def read_config ( desired_type : Type [ ConfigParser ] , file_object : TextIOBase , logger : Logger , * args , ** kwargs ) -> ConfigParser :
"""Helper method to read a configuration file according to the ' configparser ' format , and return it as a dictionary
of dictionaries ( section > [ property > value ] )
: param file _ object :
: return :"""
|
# see https : / / docs . python . org / 3 / library / configparser . html for details
config = ConfigParser ( )
config . read_file ( file_object )
return config
|
def draw ( self , img , pixmapper , bounds ) :
'''draw the thumbnail on the image'''
|
if self . hidden :
return
thumb = self . img ( )
( px , py ) = pixmapper ( self . latlon )
# find top left
( w , h ) = image_shape ( thumb )
px -= w // 2
py -= h // 2
( px , py , sx , sy , w , h ) = self . clip ( px , py , w , h , img )
thumb_roi = thumb [ sy : sy + h , sx : sx + w ]
img [ py : py + h , px : px + w ] = thumb_roi
# remember where we placed it for clicked ( )
self . posx = px + w // 2
self . posy = py + h // 2
|
def run ( self ) -> Generator [ Tuple [ int , int , str , type ] , None , None ] :
"""Yields :
tuple ( line _ number : int , offset : int , text : str , check : type )"""
|
if is_test_file ( self . filename ) :
self . load ( )
for func in self . all_funcs ( ) :
try :
for error in func . check_all ( ) :
yield ( error . line_number , error . offset , error . text , Checker )
except ValidationError as error :
yield error . to_flake8 ( Checker )
|
def split_pow_tgh ( self , text ) :
"""Split a power / toughness string on the correct slash .
Correctly accounts for curly braces to denote fractions .
E . g . , ' 2/2 ' - - > [ ' 2 ' , ' 2 ' ]
'3{1/2 } / 3{1/2 } ' - - > [ ' 3{1/2 } ' , ' 3{1/2 } ' ]"""
|
return [ n for n in re . split ( r"/(?=([^{}]*{[^{}]*})*[^{}]*$)" , text ) if n is not None ] [ : 2 ]
|
def _set_circuit_type ( self , v , load = False ) :
"""Setter method for circuit _ type , mapped from YANG variable / routing _ system / interface / ve / intf _ isis / interface _ isis / circuit _ type ( enumeration )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ circuit _ type is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ circuit _ type ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = RestrictedClassType ( base_type = unicode , restriction_type = "dict_key" , restriction_arg = { u'level-1-2' : { 'value' : 3 } , u'level-2' : { 'value' : 2 } , u'level-1' : { 'value' : 1 } } , ) , is_leaf = True , yang_name = "circuit-type" , rest_name = "circuit-type" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'cli-full-command' : None , u'info' : u'Define inter-area/intra area operation mode' , u'cli-full-no' : None } } , namespace = 'urn:brocade.com:mgmt:brocade-isis' , defining_module = 'brocade-isis' , yang_type = 'enumeration' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """circuit_type must be of a type compatible with enumeration""" , 'defined-type' : "brocade-isis:enumeration" , 'generated-type' : """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'level-1-2': {'value': 3}, u'level-2': {'value': 2}, u'level-1': {'value': 1}},), is_leaf=True, yang_name="circuit-type", rest_name="circuit-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Define inter-area/intra area operation mode', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='enumeration', is_config=True)""" , } )
self . __circuit_type = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def safe_load ( fname ) :
"""Load the file fname and make sure it can be done in parallel
Parameters
fname : str
The path name"""
|
lock = fasteners . InterProcessLock ( fname + '.lck' )
lock . acquire ( )
try :
with open ( fname ) as f :
return ordered_yaml_load ( f )
except :
raise
finally :
lock . release ( )
|
def _fetch_dataframe ( self ) :
"""Return a pandas dataframe with all the training jobs , along with their
hyperparameters , results , and metadata . This also includes a column to indicate
if a training job was the best seen so far ."""
|
def reshape ( training_summary ) : # Helper method to reshape a single training job summary into a dataframe record
out = { }
for k , v in training_summary [ 'TunedHyperParameters' ] . items ( ) : # Something ( bokeh ? ) gets confused with ints so convert to float
try :
v = float ( v )
except ( TypeError , ValueError ) :
pass
out [ k ] = v
out [ 'TrainingJobName' ] = training_summary [ 'TrainingJobName' ]
out [ 'TrainingJobStatus' ] = training_summary [ 'TrainingJobStatus' ]
out [ 'FinalObjectiveValue' ] = training_summary . get ( 'FinalHyperParameterTuningJobObjectiveMetric' , { } ) . get ( 'Value' )
start_time = training_summary . get ( 'TrainingStartTime' , None )
end_time = training_summary . get ( 'TrainingEndTime' , None )
out [ 'TrainingStartTime' ] = start_time
out [ 'TrainingEndTime' ] = end_time
if start_time and end_time :
out [ 'TrainingElapsedTimeSeconds' ] = ( end_time - start_time ) . total_seconds ( )
return out
# Run that helper over all the summaries .
df = pd . DataFrame ( [ reshape ( tjs ) for tjs in self . training_job_summaries ( ) ] )
return df
|
def get_theme ( self ) :
"""Gets theme settings from settings service . Falls back to default ( LMS ) theme
if settings service is not available , xblock theme settings are not set or does
contain mentoring theme settings ."""
|
xblock_settings = self . get_xblock_settings ( default = { } )
if xblock_settings and self . theme_key in xblock_settings :
return xblock_settings [ self . theme_key ]
return self . default_theme_config
|
def wait_for_port_open ( self , postmaster , timeout ) :
"""Waits until PostgreSQL opens ports ."""
|
for _ in polling_loop ( timeout ) :
with self . _cancellable_lock :
if self . _is_cancelled :
return False
if not postmaster . is_running ( ) :
logger . error ( 'postmaster is not running' )
self . set_state ( 'start failed' )
return False
isready = self . pg_isready ( )
if isready != STATE_NO_RESPONSE :
if isready not in [ STATE_REJECT , STATE_RUNNING ] :
logger . warning ( "Can't determine PostgreSQL startup status, assuming running" )
return True
logger . warning ( "Timed out waiting for PostgreSQL to start" )
return False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.