signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _add_reference ( self , obj , ident = 0 ) :
"""Adds a read reference to the marshaler storage
: param obj : Reference to add
: param ident : Log indentation level"""
|
log_debug ( "## New reference handle 0x{0:X}: {1} -> {2}" . format ( len ( self . references ) + self . BASE_REFERENCE_IDX , type ( obj ) . __name__ , repr ( obj ) , ) , ident , )
self . references . append ( obj )
|
def get_activity_lookup_session ( self , proxy ) :
"""Gets the ` ` OsidSession ` ` associated with the activity lookup service .
arg : proxy ( osid . proxy . Proxy ) : a proxy
return : ( osid . learning . ActivityLookupSession ) - an
` ` ActivityLookupSession ` `
raise : NullArgument - ` ` proxy ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ activity _ lookup ( ) ` ` is
` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ activity _ lookup ( ) ` ` is ` ` true ` ` . *"""
|
if not self . supports_activity_lookup ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . ActivityLookupSession ( proxy = proxy , runtime = self . _runtime )
|
def _evaluate ( self , * args , ** kwargs ) :
"""NAME :
_ _ call _ _ ( _ evaluate )
PURPOSE :
evaluate the actions ( jr , lz , jz )
INPUT :
Either :
a ) R , vR , vT , z , vz [ , phi ] :
1 ) floats : phase - space value for single object ( phi is optional ) ( each can be a Quantity )
2 ) numpy . ndarray : [ N ] phase - space values for N objects ( each can be a Quantity )
b ) Orbit instance : initial condition used if that ' s it , orbit ( t ) if there is a time given as well as the second argument
delta = ( object - wide default ) can be used to override the object - wide focal length ; can also be an array with length N to allow different delta for different phase - space points
u0 = ( None ) if object - wide option useu0 is set , u0 to use ( if useu0 and useu0 is None , a good value will be computed )
c = ( object - wide default , bool ) True / False to override the object - wide setting for whether or not to use the C implementation
order = ( object - wide default , int ) number of points to use in the Gauss - Legendre numerical integration of the relevant action integrals
When not using C :
fixed _ quad = ( False ) if True , use Gaussian quadrature ( scipy . integrate . fixed _ quad instead of scipy . integrate . quad )
scipy . integrate . fixed _ quad or . quad keywords
OUTPUT :
( jr , lz , jz )
HISTORY :
2012-11-27 - Written - Bovy ( IAS )
2017-12-27 - Allowed individual delta for each point - Bovy ( UofT )"""
|
delta = kwargs . pop ( 'delta' , self . _delta )
order = kwargs . get ( 'order' , self . _order )
if ( ( self . _c and not ( 'c' in kwargs and not kwargs [ 'c' ] ) ) or ( ext_loaded and ( ( 'c' in kwargs and kwargs [ 'c' ] ) ) ) ) and _check_c ( self . _pot ) :
if len ( args ) == 5 : # R , vR . vT , z , vz
R , vR , vT , z , vz = args
elif len ( args ) == 6 : # R , vR . vT , z , vz , phi
R , vR , vT , z , vz , phi = args
else :
self . _parse_eval_args ( * args )
R = self . _eval_R
vR = self . _eval_vR
vT = self . _eval_vT
z = self . _eval_z
vz = self . _eval_vz
if isinstance ( R , float ) :
R = nu . array ( [ R ] )
vR = nu . array ( [ vR ] )
vT = nu . array ( [ vT ] )
z = nu . array ( [ z ] )
vz = nu . array ( [ vz ] )
Lz = R * vT
if self . _useu0 : # First calculate u0
if 'u0' in kwargs :
u0 = nu . asarray ( kwargs [ 'u0' ] )
else :
E = nu . array ( [ _evaluatePotentials ( self . _pot , R [ ii ] , z [ ii ] ) + vR [ ii ] ** 2. / 2. + vz [ ii ] ** 2. / 2. + vT [ ii ] ** 2. / 2. for ii in range ( len ( R ) ) ] )
u0 = actionAngleStaeckel_c . actionAngleStaeckel_calcu0 ( E , Lz , self . _pot , delta ) [ 0 ]
kwargs . pop ( 'u0' , None )
else :
u0 = None
jr , jz , err = actionAngleStaeckel_c . actionAngleStaeckel_c ( self . _pot , delta , R , vR , vT , z , vz , u0 = u0 , order = order )
if err == 0 :
return ( jr , Lz , jz )
else : # pragma : no cover
raise RuntimeError ( "C-code for calculation actions failed; try with c=False" )
else :
if 'c' in kwargs and kwargs [ 'c' ] and not self . _c : # pragma : no cover
warnings . warn ( "C module not used because potential does not have a C implementation" , galpyWarning )
kwargs . pop ( 'c' , None )
if ( len ( args ) == 5 or len ( args ) == 6 ) and isinstance ( args [ 0 ] , nu . ndarray ) :
ojr = nu . zeros ( ( len ( args [ 0 ] ) ) )
olz = nu . zeros ( ( len ( args [ 0 ] ) ) )
ojz = nu . zeros ( ( len ( args [ 0 ] ) ) )
for ii in range ( len ( args [ 0 ] ) ) :
if len ( args ) == 5 :
targs = ( args [ 0 ] [ ii ] , args [ 1 ] [ ii ] , args [ 2 ] [ ii ] , args [ 3 ] [ ii ] , args [ 4 ] [ ii ] )
elif len ( args ) == 6 :
targs = ( args [ 0 ] [ ii ] , args [ 1 ] [ ii ] , args [ 2 ] [ ii ] , args [ 3 ] [ ii ] , args [ 4 ] [ ii ] , args [ 5 ] [ ii ] )
tkwargs = copy . copy ( kwargs )
try :
tkwargs [ 'delta' ] = delta [ ii ]
except TypeError :
tkwargs [ 'delta' ] = delta
tjr , tlz , tjz = self ( * targs , ** tkwargs )
ojr [ ii ] = tjr
ojz [ ii ] = tjz
olz [ ii ] = tlz
return ( ojr , olz , ojz )
else : # Set up the actionAngleStaeckelSingle object
aASingle = actionAngleStaeckelSingle ( * args , pot = self . _pot , delta = delta )
return ( aASingle . JR ( ** copy . copy ( kwargs ) ) , aASingle . _R * aASingle . _vT , aASingle . Jz ( ** copy . copy ( kwargs ) ) )
|
def _pick_level ( cls , btc_amount ) :
"""Choose between small , medium , large , . . . depending on the
amount specified ."""
|
for size , level in cls . TICKER_LEVEL :
if btc_amount < size :
return level
return cls . TICKER_LEVEL [ - 1 ] [ 1 ]
|
def mktk03 ( terms , seed , G2 , G3 ) :
"""generates a list of gauss coefficients drawn from the TK03 distribution"""
|
# random . seed ( n )
p = 0
n = seed
gh = [ ]
g10 , sfact , afact = - 18e3 , 3.8 , 2.4
g20 = G2 * g10
g30 = G3 * g10
alpha = g10 / afact
s1 = s_l ( 1 , alpha )
s10 = sfact * s1
gnew = random . normal ( g10 , s10 )
if p == 1 :
print ( 1 , 0 , gnew , 0 )
gh . append ( gnew )
gh . append ( random . normal ( 0 , s1 ) )
gnew = gh [ - 1 ]
gh . append ( random . normal ( 0 , s1 ) )
hnew = gh [ - 1 ]
if p == 1 :
print ( 1 , 1 , gnew , hnew )
for l in range ( 2 , terms + 1 ) :
for m in range ( l + 1 ) :
OFF = 0.0
if l == 2 and m == 0 :
OFF = g20
if l == 3 and m == 0 :
OFF = g30
s = s_l ( l , alpha )
j = ( l - m ) % 2
if j == 1 :
s = s * sfact
gh . append ( random . normal ( OFF , s ) )
gnew = gh [ - 1 ]
if m == 0 :
hnew = 0
else :
gh . append ( random . normal ( 0 , s ) )
hnew = gh [ - 1 ]
if p == 1 :
print ( l , m , gnew , hnew )
return gh
|
def wait ( self , timeout = None ) :
"""An implementation of the wait method which doesn ' t involve
polling but instead utilizes a " real " synchronization
scheme ."""
|
import threading
if self . _state != self . PENDING :
return
e = threading . Event ( )
self . addCallback ( lambda v : e . set ( ) )
self . addErrback ( lambda r : e . set ( ) )
e . wait ( timeout )
|
def _run_python ( work_bam_a , work_bam_b , out_dir , aligner , prefix , items ) :
"""Run python version of disambiguation"""
|
Args = collections . namedtuple ( "Args" , "A B output_dir intermediate_dir " "no_sort prefix aligner" )
args = Args ( work_bam_a , work_bam_b , out_dir , out_dir , True , "" , aligner )
disambiguate_main ( args )
|
def loadScopeGroupbyName ( self , name , service_group_id , callback = None , errback = None ) :
"""Load an existing Scope Group by name and service group id into a high level Scope Group object
: param str name : Name of an existing Scope Group
: param int service _ group _ id : id of the service group the Scope group is associated with"""
|
import ns1 . ipam
scope_group = ns1 . ipam . Scopegroup ( self . config , name = name , service_group_id = service_group_id )
return scope_group . load ( callback = callback , errback = errback )
|
def _params_extend ( params , _ignore_name = False , ** kwargs ) :
'''Extends the params dictionary by values from keyword arguments .
. . versionadded : : 2016.3.0
: param params : Dictionary with parameters for zabbix API .
: param _ ignore _ name : Salt State module is passing first line as ' name ' parameter . If API uses optional parameter
' name ' ( for ex . host _ create , user _ create method ) , please use ' visible _ name ' or ' firstname ' instead of ' name ' to
not mess these values .
: param _ connection _ user : Optional - zabbix user ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ password : Optional - zabbix password ( can also be set in opts or pillar , see module ' s docstring )
: param _ connection _ url : Optional - url of zabbix frontend ( can also be set in opts , pillar , see module ' s docstring )
: return : Extended params dictionary with parameters .'''
|
# extend params value by optional zabbix API parameters
for key in kwargs :
if not key . startswith ( '_' ) :
params . setdefault ( key , kwargs [ key ] )
# ignore name parameter passed from Salt state module , use firstname or visible _ name instead
if _ignore_name :
params . pop ( 'name' , None )
if 'firstname' in params :
params [ 'name' ] = params . pop ( 'firstname' )
elif 'visible_name' in params :
params [ 'name' ] = params . pop ( 'visible_name' )
return params
|
def alocar ( self , nome , id_tipo_rede , id_ambiente , descricao , id_ambiente_vip = None , vrf = None ) :
"""Inserts a new VLAN .
: param nome : Name of Vlan . String with a maximum of 50 characters .
: param id _ tipo _ rede : Identifier of the Network Type . Integer value and greater than zero .
: param id _ ambiente : Identifier of the Environment . Integer value and greater than zero .
: param descricao : Description of Vlan . String with a maximum of 200 characters .
: param id _ ambiente _ vip : Identifier of the Environment Vip . Integer value and greater than zero .
: return : Following dictionary :
{ ' vlan ' : { ' id ' : < id _ vlan > ,
' nome ' : < nome _ vlan > ,
' num _ vlan ' : < num _ vlan > ,
' id _ tipo _ rede ' : < id _ tipo _ rede > ,
' id _ ambiente ' : < id _ ambiente > ,
' rede _ oct1 ' : < rede _ oct1 > ,
' rede _ oct2 ' : < rede _ oct2 > ,
' rede _ oct3 ' : < rede _ oct3 > ,
' rede _ oct4 ' : < rede _ oct4 > ,
' bloco ' : < bloco > ,
' mascara _ oct1 ' : < mascara _ oct1 > ,
' mascara _ oct2 ' : < mascara _ oct2 > ,
' mascara _ oct3 ' : < mascara _ oct3 > ,
' mascara _ oct4 ' : < mascara _ oct4 > ,
' broadcast ' : < broadcast > ,
' descricao ' : < descricao > ,
' acl _ file _ name ' : < acl _ file _ name > ,
' acl _ valida ' : < acl _ valida > ,
' ativada ' : < ativada > } }
: raise VlanError : VLAN name already exists , VLAN name already exists , DC division of the environment invalid or does not exist VLAN number available .
: raise VlanNaoExisteError : VLAN not found .
: raise TipoRedeNaoExisteError : Network Type not registered .
: raise AmbienteNaoExisteError : Environment not registered .
: raise EnvironmentVipNotFoundError : Environment VIP not registered .
: raise InvalidParameterError : Name of Vlan and / or the identifier of the Environment is null or invalid .
: raise IPNaoDisponivelError : There is no network address is available to create the VLAN .
: raise ConfigEnvironmentInvalidError : Invalid Environment Configuration or not registered
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
vlan_map = dict ( )
vlan_map [ 'nome' ] = nome
vlan_map [ 'id_tipo_rede' ] = id_tipo_rede
vlan_map [ 'id_ambiente' ] = id_ambiente
vlan_map [ 'descricao' ] = descricao
vlan_map [ 'id_ambiente_vip' ] = id_ambiente_vip
vlan_map [ 'vrf' ] = vrf
code , xml = self . submit ( { 'vlan' : vlan_map } , 'POST' , 'vlan/' )
return self . response ( code , xml )
|
def update ( self , title , key ) :
"""Update this key .
: param str title : ( required ) , title of the key
: param str key : ( required ) , text of the key file
: returns : bool"""
|
json = None
if title and key :
data = { 'title' : title , 'key' : key }
json = self . _json ( self . _patch ( self . _api , data = dumps ( data ) ) , 200 )
if json :
self . _update_ ( json )
return True
return False
|
def Vdiff ( D1 , D2 ) :
"""finds the vector difference between two directions D1 , D2"""
|
A = dir2cart ( [ D1 [ 0 ] , D1 [ 1 ] , 1. ] )
B = dir2cart ( [ D2 [ 0 ] , D2 [ 1 ] , 1. ] )
C = [ ]
for i in range ( 3 ) :
C . append ( A [ i ] - B [ i ] )
return cart2dir ( C )
|
def copy ( self ) :
"""Deeply copies everything in the query object except the connection object is shared"""
|
connection = self . connection
del self . connection
copied_query = deepcopy ( self )
copied_query . connection = connection
self . connection = connection
return copied_query
|
def to_pb ( self ) :
"""Converts the column family to a protobuf .
: rtype : : class : ` . table _ v2 _ pb2 . ColumnFamily `
: returns : The converted current object ."""
|
if self . gc_rule is None :
return table_v2_pb2 . ColumnFamily ( )
else :
return table_v2_pb2 . ColumnFamily ( gc_rule = self . gc_rule . to_pb ( ) )
|
def get_possible_importers ( file_uris , current_doc = None ) :
"""Return all the importer objects that can handle the specified files .
Possible imports may vary depending on the currently active document"""
|
importers = [ ]
for importer in IMPORTERS :
if importer . can_import ( file_uris , current_doc ) :
importers . append ( importer )
return importers
|
def run_with_reloader ( main_func , extra_files = None , interval = 1 ) :
"""Run the given function in an independent python interpreter ."""
|
import signal
signal . signal ( signal . SIGTERM , lambda * args : sys . exit ( 0 ) )
if os . environ . get ( 'WERKZEUG_RUN_MAIN' ) == 'true' :
thread . start_new_thread ( main_func , ( ) )
try :
reloader_loop ( extra_files , interval )
except KeyboardInterrupt :
return
try :
sys . exit ( restart_with_reloader ( ) )
except KeyboardInterrupt :
pass
|
def to_latlon ( array , domain , axis = 'lon' ) :
"""Broadcasts a 1D axis dependent array across another axis .
: param array input _ array : the 1D array used for broadcasting
: param domain : the domain associated with that
array
: param axis : the axis that the input array will
be broadcasted across
[ default : ' lon ' ]
: return : Field with the same shape as the
domain
: Example :
> > > import climlab
> > > from climlab . domain . field import to _ latlon
> > > import numpy as np
> > > state = climlab . surface _ state ( num _ lat = 3 , num _ lon = 4)
> > > m = climlab . EBM _ annual ( state = state )
> > > insolation = np . array ( [ 237 . , 417 . , 237 . ] )
> > > insolation = to _ latlon ( insolation , domain = m . domains [ ' Ts ' ] )
> > > insolation . shape
(3 , 4 , 1)
> > > insolation
Field ( [ [ [ 237 . ] , [ [ 417 . ] , [ [ 237 . ] ,
[ 237 . ] , [ 417 . ] , [ 237 . ] ,
[ 237 . ] , [ 417 . ] , [ 237 . ] ,
[ 237 . ] ] , [ 417 . ] ] , [ 237 . ] ] ] )"""
|
# if array is latitude dependent ( has the same shape as lat )
axis , array , depth = np . meshgrid ( domain . axes [ axis ] . points , array , domain . axes [ 'depth' ] . points )
if axis == 'lat' : # if array is longitude dependent ( has the same shape as lon )
np . swapaxes ( array , 1 , 0 )
return Field ( array , domain = domain )
|
def _map_agent_mod ( self , agent , mod_condition ) :
"""Map a single modification condition on an agent .
Parameters
agent : : py : class : ` indra . statements . Agent `
Agent to check for invalid modification sites .
mod _ condition : : py : class : ` indra . statements . ModCondition `
Modification to check for validity and map .
Returns
protmapper . MappedSite or None
A MappedSite object is returned if a UniProt ID was found for the
agent , and if both the position and residue for the modification
condition were available . Otherwise None is returned ."""
|
# Get the UniProt ID of the agent , if not found , return
up_id = _get_uniprot_id ( agent )
if not up_id :
logger . debug ( "No uniprot ID for %s" % agent . name )
return None
# If no site information for this residue , skip
if mod_condition . position is None or mod_condition . residue is None :
return None
# Otherwise , try to map it and return the mapped site
mapped_site = self . map_to_human_ref ( up_id , 'uniprot' , mod_condition . residue , mod_condition . position , do_methionine_offset = self . do_methionine_offset , do_orthology_mapping = self . do_orthology_mapping , do_isoform_mapping = self . do_isoform_mapping )
return mapped_site
|
def duplicate_items ( * collections ) :
"""Search for duplicate items in all collections .
Examples
> > > duplicate _ items ( [ 1 , 2 ] , [ 3 ] )
set ( )
> > > duplicate _ items ( { 1 : ' a ' , 2 : ' a ' } )
set ( )
> > > duplicate _ items ( [ ' a ' , ' b ' , ' a ' ] )
> > > duplicate _ items ( [ 1 , 2 ] , { 3 : ' hi ' , 4 : ' ha ' } , ( 2 , 3 ) )
{2 , 3}"""
|
duplicates = set ( )
seen = set ( )
for item in flatten ( collections ) :
if item in seen :
duplicates . add ( item )
else :
seen . add ( item )
return duplicates
|
def ParseDestList ( self , parser_mediator , olecf_item ) :
"""Parses the DestList OLECF item .
Args :
parser _ mediator ( ParserMediator ) : mediates interactions between parsers
and other components , such as storage and dfvfs .
olecf _ item ( pyolecf . item ) : OLECF item .
Raises :
UnableToParseFile : if the DestList cannot be parsed ."""
|
header_map = self . _GetDataTypeMap ( 'dest_list_header' )
try :
header , entry_offset = self . _ReadStructureFromFileObject ( olecf_item , 0 , header_map )
except ( ValueError , errors . ParseError ) as exception :
raise errors . UnableToParseFile ( 'Unable to parse DestList header with error: {0!s}' . format ( exception ) )
if header . format_version == 1 :
entry_map = self . _GetDataTypeMap ( 'dest_list_entry_v1' )
elif header . format_version in ( 3 , 4 ) :
entry_map = self . _GetDataTypeMap ( 'dest_list_entry_v3' )
else :
parser_mediator . ProduceExtractionWarning ( 'unsupported format version: {0:d}.' . format ( header . format_version ) )
return
while entry_offset < olecf_item . size :
try :
entry , entry_data_size = self . _ReadStructureFromFileObject ( olecf_item , entry_offset , entry_map )
except ( ValueError , errors . ParseError ) as exception :
raise errors . UnableToParseFile ( 'Unable to parse DestList entry with error: {0!s}' . format ( exception ) )
display_name = 'DestList entry at offset: 0x{0:08x}' . format ( entry_offset )
try :
droid_volume_identifier = self . _ParseDistributedTrackingIdentifier ( parser_mediator , entry . droid_volume_identifier , display_name )
except ( TypeError , ValueError ) as exception :
droid_volume_identifier = ''
parser_mediator . ProduceExtractionWarning ( 'unable to read droid volume identifier with error: {0!s}' . format ( exception ) )
try :
droid_file_identifier = self . _ParseDistributedTrackingIdentifier ( parser_mediator , entry . droid_file_identifier , display_name )
except ( TypeError , ValueError ) as exception :
droid_file_identifier = ''
parser_mediator . ProduceExtractionWarning ( 'unable to read droid file identifier with error: {0!s}' . format ( exception ) )
try :
birth_droid_volume_identifier = ( self . _ParseDistributedTrackingIdentifier ( parser_mediator , entry . birth_droid_volume_identifier , display_name ) )
except ( TypeError , ValueError ) as exception :
birth_droid_volume_identifier = ''
parser_mediator . ProduceExtractionWarning ( ( 'unable to read birth droid volume identifier with error: ' '{0:s}' ) . format ( exception ) )
try :
birth_droid_file_identifier = self . _ParseDistributedTrackingIdentifier ( parser_mediator , entry . birth_droid_file_identifier , display_name )
except ( TypeError , ValueError ) as exception :
birth_droid_file_identifier = ''
parser_mediator . ProduceExtractionWarning ( ( 'unable to read birth droid file identifier with error: ' '{0:s}' ) . format ( exception ) )
if entry . last_modification_time == 0 :
date_time = dfdatetime_semantic_time . SemanticTime ( 'Not set' )
else :
date_time = dfdatetime_filetime . Filetime ( timestamp = entry . last_modification_time )
event_data = AutomaticDestinationsDestListEntryEventData ( )
event_data . birth_droid_file_identifier = birth_droid_file_identifier
event_data . birth_droid_volume_identifier = birth_droid_volume_identifier
event_data . droid_file_identifier = droid_file_identifier
event_data . droid_volume_identifier = droid_volume_identifier
event_data . entry_number = entry . entry_number
event_data . hostname = entry . hostname . rstrip ( '\x00' )
event_data . offset = entry_offset
event_data . path = entry . path . rstrip ( '\x00' )
event_data . pin_status = entry . pin_status
event = time_events . DateTimeValuesEvent ( date_time , definitions . TIME_DESCRIPTION_MODIFICATION )
parser_mediator . ProduceEventWithEventData ( event , event_data )
entry_offset += entry_data_size
|
def start ( self ) :
"""Confirm that we may access the target cluster ."""
|
version = yield self . request ( "get" , "/version" )
if version != 2 :
raise GanetiApiError ( "Can't work with Ganeti RAPI version %d" % version )
log . msg ( "Accessing Ganeti RAPI, version %d" % version , system = "Gentleman" )
self . version = version
try :
features = yield self . request ( "get" , "/2/features" )
except NotOkayError , noe :
if noe . code == 404 : # Okay , let ' s calm down , this is totally reasonable . Certain
# older Ganeti RAPIs don ' t have a list of features .
features = [ ]
else : # No , wait , panic was the correct thing to do .
raise
log . msg ( "RAPI features: %r" % ( features , ) , system = "Gentleman" )
self . features = features
|
def display ( self , ret , indent , prefix , out ) :
'''Recursively iterate down through data structures to determine output'''
|
if isinstance ( ret , six . string_types ) :
lines = ret . split ( '\n' )
for line in lines :
out += '{0}{1}{2}{3}{4}\n' . format ( self . colors [ 'RED' ] , ' ' * indent , prefix , line , self . colors [ 'ENDC' ] )
elif isinstance ( ret , dict ) :
for key in sorted ( ret ) :
val = ret [ key ]
out += '{0}{1}{2}{3}{4}:\n' . format ( self . colors [ 'CYAN' ] , ' ' * indent , prefix , key , self . colors [ 'ENDC' ] )
out = self . display ( val , indent + 4 , '' , out )
return out
|
def set_review_solution ( self , during_attempt = None , after_attempt = None , before_deadline = None , after_deadline = None ) :
"""stub"""
|
solution = self . my_osid_object_form . _my_map [ 'reviewOptions' ] [ 'solution' ]
if during_attempt is not None :
solution [ 'duringAttempt' ] = bool ( during_attempt )
if after_attempt is not None :
solution [ 'afterAttempt' ] = bool ( after_attempt )
if before_deadline is not None :
solution [ 'beforeDeadline' ] = bool ( before_deadline )
if after_deadline is not None :
solution [ 'afterDeadline' ] = bool ( after_deadline )
|
def cipher_block ( self , state ) :
"""Perform AES block cipher on input"""
|
# PKCS7 Padding
state = state + [ 16 - len ( state ) ] * ( 16 - len ( state ) )
# Fails test if it changes the input with + =
self . _add_round_key ( state , 0 )
for i in range ( 1 , self . _Nr ) :
self . _sub_bytes ( state )
self . _shift_rows ( state )
self . _mix_columns ( state , False )
self . _add_round_key ( state , i )
self . _sub_bytes ( state )
self . _shift_rows ( state )
self . _add_round_key ( state , self . _Nr )
return state
|
def _gegetate_args ( self , options ) :
"""Generator of args parts based on options specification ."""
|
for optkey , optval in self . _normalize_options ( options ) :
yield optkey
if isinstance ( optval , ( list , tuple ) ) :
assert len ( optval ) == 2 and optval [ 0 ] and optval [ 1 ] , 'Option value can only be either a string or a (tuple, list) of 2 items'
yield optval [ 0 ]
yield optval [ 1 ]
else :
yield optval
|
def update ( self , ** kwargs ) :
"""Fetch all changes for this remote , including new branches which will
be forced in ( in case your local remote branch is not part the new remote branches
ancestry anymore ) .
: param kwargs :
Additional arguments passed to git - remote update
: return : self"""
|
scmd = 'update'
kwargs [ 'insert_kwargs_after' ] = scmd
self . repo . git . remote ( scmd , self . name , ** kwargs )
return self
|
def _cursor_down ( self , count = 1 ) :
"""Moves cursor down count lines in same column . Cursor stops at bottom
margin ."""
|
self . y = min ( self . size [ 0 ] - 1 , self . y + count )
|
def save ( self , data , * args , ** kwargs ) :
"""inserts data ( dict or list of dicts )
expected kwargs :
collection _ name : by default uses MONGODB _ DEFAULT _ COLLECTION
w : by default set to 0 to disable write acknowledgement
assumes that data has been verified / validated"""
|
try :
collection_name = kwargs . get ( 'collection_name' , MONGODB_DEFAULT_COLLECTION )
w = kwargs . get ( 'w' , 0 )
if not self . collection :
self . set_collection ( collection_name )
self . collection . insert ( data , w )
except ( ConnectionFailure , AutoReconnect , InvalidURI ) , e : # fail silently - just log and die . . .
if ADD_LOG_FAILURES :
logging . exception ( 'Error connection to %s, unable to insert %s' % ( MONGODB_URI , data ) )
|
def _base_signup_form_class ( ) :
"""Currently , we inherit from the custom form , if any . This is all
not very elegant , though it serves a purpose :
- There are two signup forms : one for local accounts , and one for
social accounts
- Both share a common base ( BaseSignupForm )
- Given the above , how to put in a custom signup form ? Which form
would your custom form derive from , the local or the social one ?"""
|
if not app_settings . SIGNUP_FORM_CLASS :
return _DummyCustomSignupForm
try :
fc_module , fc_classname = app_settings . SIGNUP_FORM_CLASS . rsplit ( '.' , 1 )
except ValueError :
raise exceptions . ImproperlyConfigured ( '%s does not point to a form' ' class' % app_settings . SIGNUP_FORM_CLASS )
try :
mod = import_module ( fc_module )
except ImportError as e :
raise exceptions . ImproperlyConfigured ( 'Error importing form class %s:' ' "%s"' % ( fc_module , e ) )
try :
fc_class = getattr ( mod , fc_classname )
except AttributeError :
raise exceptions . ImproperlyConfigured ( 'Module "%s" does not define a' ' "%s" class' % ( fc_module , fc_classname ) )
if not hasattr ( fc_class , 'signup' ) :
if hasattr ( fc_class , 'save' ) :
warnings . warn ( "The custom signup form must offer" " a `def signup(self, request, user)` method" , DeprecationWarning )
else :
raise exceptions . ImproperlyConfigured ( 'The custom signup form must implement a "signup" method' )
return fc_class
|
def publish ( self , topic , options = None , args = None , kwargs = None ) :
"""Publishes a messages to the server"""
|
topic = self . get_full_uri ( topic )
if options is None :
options = { 'acknowledge' : True }
if options . get ( 'acknowledge' ) :
request = PUBLISH ( options = options or { } , topic = topic , args = args or [ ] , kwargs = kwargs or { } )
result = self . send_and_await_response ( request )
return result
else :
request = PUBLISH ( options = options or { } , topic = topic , args = args or [ ] , kwargs = kwargs or { } )
self . send_message ( request )
return request . request_id
|
def assign ( self , role ) :
'''Assign : class : ` Role ` ` ` role ` ` to this : class : ` Subject ` . If this
: class : ` Subject ` is the : attr : ` Role . owner ` , this method does nothing .'''
|
if role . owner_id != self . id :
return self . roles . add ( role )
|
def wrap ( self , text ) :
'''Wraps the text object to width , breaking at whitespaces . Runs of
whitespace characters are preserved , provided they do not fall at a
line boundary . The implementation is based on that of textwrap from the
standard library , but we can cope with StringWithFormatting objects .
: returns : a list of string - like objects .'''
|
result = [ ]
chunks = self . _chunk ( text )
while chunks :
self . _lstrip ( chunks )
current_line = [ ]
current_line_length = 0
current_chunk_length = 0
while chunks :
current_chunk_length = len ( chunks [ 0 ] )
if current_line_length + current_chunk_length <= self . width :
current_line . append ( chunks . pop ( 0 ) )
current_line_length += current_chunk_length
else : # Line is full
break
# Handle case where chunk is bigger than an entire line
if current_chunk_length > self . width :
space_left = self . width - current_line_length
current_line . append ( chunks [ 0 ] [ : space_left ] )
chunks [ 0 ] = chunks [ 0 ] [ space_left : ]
self . _rstrip ( current_line )
if current_line :
result . append ( reduce ( lambda x , y : x + y , current_line [ 1 : ] , current_line [ 0 ] ) )
else : # FIXME : should this line go ? Removing it makes at least simple
# cases like wrap ( ' ' , 10 ) actually behave like
# textwrap . wrap . . .
result . append ( '' )
return result
|
def key_from_keybase ( username , fingerprint = None ) :
"""Look up a public key from a username"""
|
url = keybase_lookup_url ( username )
resp = requests . get ( url )
if resp . status_code == 200 :
j_resp = json . loads ( polite_string ( resp . content ) )
if 'them' in j_resp and len ( j_resp [ 'them' ] ) == 1 :
kb_obj = j_resp [ 'them' ] [ 0 ]
if fingerprint :
return fingerprint_from_keybase ( fingerprint , kb_obj )
else :
if 'public_keys' in kb_obj and 'pgp_public_keys' in kb_obj [ 'public_keys' ] :
key = kb_obj [ 'public_keys' ] [ 'primary' ]
return massage_key ( key )
return None
|
def notification_selected_sm_changed ( self , model , prop_name , info ) :
"""If a new state machine is selected , make sure expansion state is stored and tree updated"""
|
selected_state_machine_id = self . model . selected_state_machine_id
if selected_state_machine_id is None :
return
self . update ( )
|
def quantile_for_single_value ( self , ** kwargs ) :
"""Returns quantile of each column or row .
Returns :
A new QueryCompiler object containing the quantile of each column or row ."""
|
if self . _is_transposed :
kwargs [ "axis" ] = kwargs . get ( "axis" , 0 ) ^ 1
return self . transpose ( ) . quantile_for_single_value ( ** kwargs )
axis = kwargs . get ( "axis" , 0 )
q = kwargs . get ( "q" , 0.5 )
assert type ( q ) is float
def quantile_builder ( df , ** kwargs ) :
try :
return pandas . DataFrame . quantile ( df , ** kwargs )
except ValueError :
return pandas . Series ( )
func = self . _build_mapreduce_func ( quantile_builder , ** kwargs )
result = self . _full_axis_reduce ( axis , func )
if axis == 0 :
result . index = [ q ]
else :
result . columns = [ q ]
return result
|
def columns_classes ( self ) :
'''returns columns count'''
|
md = 12 / self . objects_per_row
sm = None
if self . objects_per_row > 2 :
sm = 12 / ( self . objects_per_row / 2 )
return md , ( sm or md ) , 12
|
def get_fld2val ( self , name , vals ) :
"""Describe summary statistics for a list of numbers ."""
|
if vals :
return self . _init_fld2val_stats ( name , vals )
return self . _init_fld2val_null ( name )
|
def merge_versioned ( releases , schema = None , merge_rules = None ) :
"""Merges a list of releases into a versionedRelease ."""
|
if not merge_rules :
merge_rules = get_merge_rules ( schema )
merged = OrderedDict ( )
for release in sorted ( releases , key = lambda release : release [ 'date' ] ) :
release = release . copy ( )
# Don ' t version the OCID .
ocid = release . pop ( 'ocid' )
merged [ ( 'ocid' , ) ] = ocid
releaseID = release [ 'id' ]
date = release [ 'date' ]
# Prior to OCDS 1.1.4 , ` tag ` didn ' t set " omitWhenMerged " : true .
tag = release . pop ( 'tag' , None )
flat = flatten ( release , merge_rules )
processed = process_flattened ( flat )
for key , value in processed . items ( ) : # If value is unchanged , don ' t add to history .
if key in merged and value == merged [ key ] [ - 1 ] [ 'value' ] :
continue
if key not in merged :
merged [ key ] = [ ]
merged [ key ] . append ( OrderedDict ( [ ( 'releaseID' , releaseID ) , ( 'releaseDate' , date ) , ( 'releaseTag' , tag ) , ( 'value' , value ) , ] ) )
return unflatten ( merged , merge_rules )
|
def appraise_source_model ( self ) :
"""Identify parameters defined in NRML source model file , so that
shapefile contains only source model specific fields ."""
|
for src in self . sources : # source params
src_taglist = get_taglist ( src )
if "areaSource" in src . tag :
self . has_area_source = True
npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ]
npd_size = len ( npd_node )
hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ]
hdd_size = len ( hdd_node )
self . num_np = ( npd_size if npd_size > self . num_np else self . num_np )
self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd )
elif "pointSource" in src . tag :
self . has_point_source = True
npd_node = src . nodes [ src_taglist . index ( "nodalPlaneDist" ) ]
npd_size = len ( npd_node )
hdd_node = src . nodes [ src_taglist . index ( "hypoDepthDist" ) ]
hdd_size = len ( hdd_node )
self . num_np = ( npd_size if npd_size > self . num_np else self . num_np )
self . num_hd = ( hdd_size if hdd_size > self . num_hd else self . num_hd )
elif "simpleFaultSource" in src . tag :
self . has_simple_fault_geometry = True
elif "complexFaultSource" in src . tag :
self . has_complex_fault_geometry = True
elif "characteristicFaultSource" in src . tag : # Get the surface node
surface_node = src . nodes [ src_taglist . index ( "surface" ) ]
p_size = 0
for surface in surface_node . nodes :
if "simpleFaultGeometry" in surface . tag :
self . has_simple_fault_geometry = True
elif "complexFaultGeometry" in surface . tag :
self . has_complex_fault_geometry = True
elif "planarSurface" in surface . tag :
self . has_planar_geometry = True
p_size += 1
self . num_p = p_size if p_size > self . num_p else self . num_p
else :
pass
# MFD params
if "truncGutenbergRichterMFD" in src_taglist :
self . has_mfd_gr = True
elif "incrementalMFD" in src_taglist :
self . has_mfd_incremental = True
# Get rate size
mfd_node = src . nodes [ src_taglist . index ( "incrementalMFD" ) ]
r_size = len ( mfd_node . nodes [ 0 ] . text )
self . num_r = r_size if r_size > self . num_r else self . num_r
else :
pass
|
def deletelogicalnetwork ( check_processor = default_logicalnetwork_delete_check , reorder_dict = default_iterate_dict ) :
""": param check _ processor : check _ processor ( logicalnetwork , logicalnetworkmap ,
physicalnetwork , physicalnetworkmap ,
walk , write , \ * , parameters )"""
|
def walker ( walk , write , timestamp , parameters_dict ) :
for key , parameters in reorder_dict ( parameters_dict ) :
try :
value = walk ( key )
except KeyError :
pass
else :
try :
logmap = walk ( LogicalNetworkMap . _network . leftkey ( key ) )
except KeyError :
pass
else :
try :
phynet = walk ( value . physicalnetwork . getkey ( ) )
except KeyError :
pass
else :
try :
phymap = walk ( PhysicalNetworkMap . _network . leftkey ( phynet ) )
except KeyError :
pass
else :
check_processor ( value , logmap , phynet , phymap , walk , write , parameters = parameters )
phymap . logicnetworks . dataset ( ) . discard ( value . create_weakreference ( ) )
write ( phymap . getkey ( ) , phymap )
write ( key , None )
write ( logmap . getkey ( ) , None )
try :
logicalnetworkset = walk ( LogicalNetworkSet . default_key ( ) )
except KeyError :
pass
else :
logicalnetworkset . set . dataset ( ) . discard ( value . create_weakreference ( ) )
write ( logicalnetworkset . getkey ( ) , logicalnetworkset )
return walker
|
def MobileDevice ( self , data = None , subset = None ) :
"""{ dynamic _ docstring }"""
|
return self . factory . get_object ( jssobjects . MobileDevice , data , subset )
|
def children_after_parents ( self , piper1 , piper2 ) :
"""Custom compare function . Returns ` ` 1 ` ` if the first ` ` Piper ` ` instance
is upstream of the second ` ` Piper ` ` instance , ` ` - 1 ` ` if the first
` ` Piper ` ` is downstream of the second ` ` Piper ` ` and ` ` 0 ` ` if the two
` ` Pipers ` ` are independent .
Arguments :
- piper1 ( ` ` Piper ` ` ) ` ` Piper ` ` instance .
- piper2 ( ` ` Piper ` ` ) ` ` Piper ` ` instance ."""
|
if piper1 in self [ piper2 ] . deep_nodes ( ) :
return 1
elif piper2 in self [ piper1 ] . deep_nodes ( ) :
return - 1
else :
return 0
|
def _can_construct_from_str ( strict_mode : bool , from_type : Type , to_type : Type ) -> bool :
"""Returns true if the provided types are valid for constructor _ with _ str _ arg conversion
Explicitly declare that we are not able to convert primitive types ( they already have their own converters )
: param strict _ mode :
: param from _ type :
: param to _ type :
: return :"""
|
return to_type not in { int , float , bool }
|
def extract ( pattern , string , * , assert_equal = False , one = False , condense = False , default = None , default_if_multiple = True , default_if_none = True ) :
"""Used to extract a given regex pattern from a string , given several options"""
|
if isinstance ( pattern , str ) :
output = get_content ( pattern , string )
else : # Must be a linear container
output = [ ]
for p in pattern :
output += get_content ( p , string )
output = process_output ( output , one = one , condense = condense , default = default , default_if_multiple = default_if_multiple , default_if_none = default_if_none )
if assert_equal :
assert_output ( output , assert_equal )
else :
return output
|
def _gssapi_login ( self ) :
"""Authenticate to the / ssllogin endpoint with GSSAPI authentication .
: returns : deferred that when fired returns a dict from sslLogin"""
|
method = treq_kerberos . post
auth = treq_kerberos . TreqKerberosAuth ( force_preemptive = True )
return self . _request_login ( method , auth = auth )
|
def get_registry ( entry , runtime ) :
"""Returns a record registry given an entry and runtime"""
|
try :
records_location_param_id = Id ( 'parameter:recordsRegistry@mongo' )
registry = runtime . get_configuration ( ) . get_value_by_parameter ( records_location_param_id ) . get_string_value ( )
return import_module ( registry ) . __dict__ . get ( entry , { } )
except ( ImportError , AttributeError , KeyError , NotFound ) :
return { }
|
def _read_config_file ( cf , _globals = globals ( ) , _locals = locals ( ) , interactive = True ) : # noqa : E501
"""Read a config file : execute a python file while loading scapy , that may contain # noqa : E501
some pre - configured values .
If _ globals or _ locals are specified , they will be updated with the loaded vars . # noqa : E501
This allows an external program to use the function . Otherwise , vars are only available # noqa : E501
from inside the scapy console .
params :
- _ globals : the globals ( ) vars
- _ locals : the locals ( ) vars
- interactive : specified whether or not errors should be printed using the scapy console or # noqa : E501
raised .
ex , content of a config . py file :
' conf . verb = 42 \n '
Manual loading :
> > > _ read _ config _ file ( " . / config . py " ) )
> > > conf . verb
42"""
|
log_loading . debug ( "Loading config file [%s]" , cf )
try :
exec ( compile ( open ( cf ) . read ( ) , cf , 'exec' ) , _globals , _locals )
except IOError as e :
if interactive :
raise
log_loading . warning ( "Cannot read config file [%s] [%s]" , cf , e )
except Exception :
if interactive :
raise
log_loading . exception ( "Error during evaluation of config file [%s]" , cf )
|
def image ( self , raw_url , title = '' , alt = '' ) :
'''extract the images'''
|
max_images = self . _config . get ( 'count' )
if max_images is not None and len ( self . _out . images ) >= max_images : # We already have enough images , so bail out
return ' '
image_specs = raw_url
if title :
image_specs += ' "{}"' . format ( title )
alt , container_args = image . parse_alt_text ( alt )
spec_list , _ = image . get_spec_list ( image_specs , container_args )
for spec in spec_list :
if not spec :
continue
self . _out . images . append ( self . _render_image ( spec , alt ) )
if max_images is not None and len ( self . _out . images ) >= max_images :
break
return ' '
|
def visit ( spht , node ) :
"""Append opening tags to document body list .
: param sphinx . writers . html . SmartyPantsHTMLTranslator spht : Object to modify .
: param sphinxcontrib . imgur . nodes . ImgurImageNode node : This class ' instance ."""
|
if node . options [ 'target' ] :
html_attrs_ah = dict ( CLASS = 'reference external image-reference' , href = node . options [ 'target' ] )
spht . body . append ( spht . starttag ( node , 'a' , '' , ** html_attrs_ah ) )
html_attrs_img = dict ( src = node . src , alt = node . options [ 'alt' ] )
if node . options [ 'align' ] :
html_attrs_img [ 'CLASS' ] = 'align-{}' . format ( node . options [ 'align' ] )
if node . style :
html_attrs_img [ 'style' ] = node . style
spht . body . append ( spht . starttag ( node , 'img' , '' if node . options [ 'target' ] else '\n' , ** html_attrs_img ) )
|
def update ( records , column , values ) :
"""Update the column of records
: param records : a list of dictionaries
: param column : a string
: param values : an iterable or a function
: returns : new records with the columns updated
> > > movies = [
. . . { ' title ' : ' The Holy Grail ' , ' year ' : 1975 , ' budget ' : 4E5 , ' total _ gross ' : 5E6 } ,
. . . { ' title ' : ' Life of Brian ' , ' year ' : 1979 , ' budget ' : 4E6 , ' total _ gross ' : 20E6 } ,
. . . { ' title ' : ' The Meaning of Life ' , ' year ' : 1983 , ' budget ' : 9E6 , ' total _ gross ' : 14.9E6}
> > > new _ movies = update ( movies , ' budget ' , lambda x : 2 * x )
> > > [ new _ movies [ i ] [ ' budget ' ] for i , _ in enumerate ( movies ) ]
[800000.0 , 800000.0 , 1800000.0]
> > > new _ movies2 = update ( movies , ' budget ' , ( 40 , 400 , 900 ) )
> > > [ new _ movies2 [ i ] [ ' budget ' ] for i , _ in enumerate ( movies ) ]
[40 , 400 , 900]"""
|
new_records = deepcopy ( records )
if values . __class__ . __name__ == 'function' :
for row in new_records :
row [ column ] = values ( row [ column ] )
elif isiterable ( values ) :
for i , row in enumerate ( new_records ) :
row [ column ] = values [ i ]
else :
msg = "You must provide a function or an iterable."
raise ValueError ( msg )
return new_records
|
def fake_lens_path_set ( lens_path , value , obj ) :
"""Simulates R . set with a lens _ path since we don ' t have lens functions
: param lens _ path : Array of string paths
: param value : The value to set at the lens path
: param obj : Object containing the given path
: return : The value at the path or None"""
|
segment = head ( lens_path )
obj_copy = copy . copy ( obj )
def set_array_index ( i , v , l ) : # Fill the array with None up to the given index and set the index to v
try :
l [ i ] = v
except IndexError :
for _ in range ( i - len ( l ) + 1 ) :
l . append ( None )
l [ i ] = v
if not ( length ( lens_path ) - 1 ) : # Done
new_value = value
else : # Find the value at the path or create a { } or [ ] at obj [ segment ]
found_or_created = item_path_or ( if_else ( lambda segment : segment . isnumeric ( ) , always ( [ ] ) , always ( { } ) ) ( head ( tail ( lens_path ) ) ) , segment , obj )
# Recurse on the rest of the path
new_value = fake_lens_path_set ( tail ( lens_path ) , value , found_or_created )
# Set or replace
if segment . isnumeric ( ) :
set_array_index ( int ( segment ) , new_value , obj_copy )
else :
obj_copy [ segment ] = new_value
return obj_copy
|
def delete_doc ( self , doc_id , revision ) :
'''Imitates sending DELETE request to CouchDB server'''
|
d = defer . Deferred ( )
self . increase_stat ( 'delete_doc' )
try :
doc = self . _get_doc ( doc_id )
if doc [ '_rev' ] != revision :
raise ConflictError ( "Document update conflict." )
if doc . get ( '_deleted' , None ) :
raise NotFoundError ( '%s deleted' % doc_id )
doc [ '_deleted' ] = True
self . _expire_cache ( doc [ '_id' ] )
for key in doc . keys ( ) :
if key in [ '_rev' , '_deleted' , '_id' ] :
continue
del ( doc [ key ] )
self . log ( 'Marking document %r as deleted' , doc_id )
del self . _attachments [ doc [ '_id' ] ]
self . _update_rev ( doc )
self . _analize_changes ( doc )
d . callback ( Response ( ok = True , id = doc_id , rev = doc [ '_rev' ] ) )
except ( ConflictError , NotFoundError , ) as e :
d . errback ( e )
return d
|
def patch_namespaced_role_binding ( self , name , namespace , body , ** kwargs ) : # noqa : E501
"""patch _ namespaced _ role _ binding # noqa : E501
partially update the specified RoleBinding # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . patch _ namespaced _ role _ binding ( name , namespace , body , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the RoleBinding ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param UNKNOWN _ BASE _ TYPE body : ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed
: return : V1RoleBinding
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . patch_namespaced_role_binding_with_http_info ( name , namespace , body , ** kwargs )
# noqa : E501
else :
( data ) = self . patch_namespaced_role_binding_with_http_info ( name , namespace , body , ** kwargs )
# noqa : E501
return data
|
def _handle_unsubscribed ( self , * args , chanId = None , ** kwargs ) :
"""Handles responses to unsubscribe ( ) commands - removes a channel id from
the client .
: param chanId : int , represent channel id as assigned by server"""
|
log . debug ( "_handle_unsubscribed: %s - %s" , chanId , kwargs )
try :
self . channels . pop ( chanId )
except KeyError :
raise NotRegisteredError ( )
try :
self . _heartbeats . pop ( chanId )
except KeyError :
pass
try :
self . _late_heartbeats . pop ( chanId )
except KeyError :
pass
|
def _curve ( x1 , y1 , x2 , y2 , hunit = HUNIT , vunit = VUNIT ) :
"""Return a PyX curved path from ( x1 , y1 ) to ( x2 , y2 ) ,
such that the slope at either end is zero ."""
|
ax1 , ax2 , axm = x1 * hunit , x2 * hunit , ( x1 + x2 ) * hunit / 2
ay1 , ay2 = y1 * vunit , y2 * vunit
return pyx . path . curve ( ax1 , ay1 , axm , ay1 , axm , ay2 , ax2 , ay2 )
|
def isochrone_to_aa ( w , potential ) :
"""Transform the input cartesian position and velocity to action - angle
coordinates in the Isochrone potential . See Section 3.5.2 in
Binney & Tremaine ( 2008 ) , and be aware of the errata entry for
Eq . 3.225.
This transformation is analytic and can be used as a " toy potential "
in the Sanders & Binney ( 2014 ) formalism for computing action - angle
coordinates in any potential .
. . note : :
This function is included as a method of the
: class : ` ~ gala . potential . IsochronePotential ` and it is recommended
to call : meth : ` ~ gala . potential . IsochronePotential . phase _ space ( ) `
instead .
Parameters
w : : class : ` gala . dynamics . PhaseSpacePosition ` , : class : ` gala . dynamics . Orbit `
potential : : class : ` gala . potential . IsochronePotential ` , dict
An instance of the potential to use for computing the transformation
to angle - action coordinates . Or , a dictionary of parameters used to
define an : class : ` gala . potential . IsochronePotential ` instance .
Returns
actions : : class : ` numpy . ndarray `
An array of actions computed from the input positions and velocities .
angles : : class : ` numpy . ndarray `
An array of angles computed from the input positions and velocities .
freqs : : class : ` numpy . ndarray `
An array of frequencies computed from the input positions and velocities ."""
|
if not isinstance ( potential , PotentialBase ) :
potential = IsochronePotential ( ** potential )
usys = potential . units
GM = ( G * potential . parameters [ 'm' ] ) . decompose ( usys ) . value
b = potential . parameters [ 'b' ] . decompose ( usys ) . value
E = w . energy ( Hamiltonian ( potential ) ) . decompose ( usys ) . value
E = np . squeeze ( E )
if np . any ( E > 0. ) :
raise ValueError ( "Unbound particle. (E = {})" . format ( E ) )
# convert position , velocity to spherical polar coordinates
w_sph = w . represent_as ( coord . PhysicsSphericalRepresentation )
r , phi , theta = map ( np . squeeze , [ w_sph . r . decompose ( usys ) . value , w_sph . phi . radian , w_sph . theta . radian ] )
ang_unit = u . radian / usys [ 'time' ]
vr , phi_dot , theta_dot = map ( np . squeeze , [ w_sph . radial_velocity . decompose ( usys ) . value , w_sph . pm_phi . to ( ang_unit ) . value , w_sph . pm_theta . to ( ang_unit ) . value ] )
vphi = r * np . sin ( theta ) * phi_dot
vtheta = r * theta_dot
# Compute the actions
L_vec = np . squeeze ( w . angular_momentum ( ) . decompose ( usys ) . value )
Lz = L_vec [ 2 ]
L = np . linalg . norm ( L_vec , axis = 0 )
# Radial action
Jr = GM / np . sqrt ( - 2 * E ) - 0.5 * ( L + np . sqrt ( L * L + 4 * GM * b ) )
# compute the three action variables
actions = np . array ( [ Jr , Lz , L - np . abs ( Lz ) ] )
# Jr , Jphi , Jtheta
# Angles
c = GM / ( - 2 * E ) - b
e = np . sqrt ( 1 - L * L * ( 1 + b / c ) / GM / c )
# Compute theta _ r using eta
tmp1 = r * vr / np . sqrt ( - 2. * E )
tmp2 = b + c - np . sqrt ( b * b + r * r )
eta = np . arctan2 ( tmp1 , tmp2 )
thetar = eta - e * c * np . sin ( eta ) / ( c + b )
# same as theta3
# Compute theta _ z
psi = np . arctan2 ( np . cos ( theta ) , - np . sin ( theta ) * r * vtheta / L )
psi [ np . abs ( vtheta ) <= 1e-10 ] = np . pi / 2.
# blows up for small vtheta
omega_th = 0.5 * ( 1 + L / np . sqrt ( L * L + 4 * GM * b ) )
a = np . sqrt ( ( 1 + e ) / ( 1 - e ) )
ap = np . sqrt ( ( 1 + e + 2 * b / c ) / ( 1 - e + 2 * b / c ) )
def F ( x , y ) :
z = np . zeros_like ( x )
ix = y > np . pi / 2.
z [ ix ] = np . pi / 2. - np . arctan ( np . tan ( np . pi / 2. - 0.5 * y [ ix ] ) / x [ ix ] )
ix = y < - np . pi / 2.
z [ ix ] = - np . pi / 2. + np . arctan ( np . tan ( np . pi / 2. + 0.5 * y [ ix ] ) / x [ ix ] )
ix = ( y <= np . pi / 2 ) & ( y >= - np . pi / 2 )
z [ ix ] = np . arctan ( x [ ix ] * np . tan ( 0.5 * y [ ix ] ) )
return z
A = omega_th * thetar - F ( a , eta ) - F ( ap , eta ) / np . sqrt ( 1 + 4 * GM * b / L / L )
thetaz = psi + A
LR = Lz / L
sinu = ( LR / np . sqrt ( 1. - LR * LR ) / np . tan ( theta ) )
uu = np . arcsin ( sinu )
uu [ sinu > 1. ] = np . pi / 2.
uu [ sinu < - 1. ] = - np . pi / 2.
uu [ vtheta > 0. ] = np . pi - uu [ vtheta > 0. ]
thetap = phi - uu + np . sign ( Lz ) * thetaz
angles = np . array ( [ thetar , thetap , thetaz ] )
angles = angles % ( 2 * np . pi )
# Frequencies
freqs = np . zeros_like ( actions )
omega_r = GM ** 2 / ( Jr + 0.5 * ( L + np . sqrt ( L * L + 4 * GM * b ) ) ) ** 3
freqs [ 0 ] = omega_r
freqs [ 1 ] = np . sign ( actions [ 1 ] ) * omega_th * omega_r
freqs [ 2 ] = omega_th * omega_r
a_unit = ( 1 * usys [ 'angular momentum' ] / usys [ 'mass' ] ) . decompose ( usys ) . unit
f_unit = ( 1 * usys [ 'frequency' ] ) . decompose ( usys ) . unit
return actions * a_unit , angles * u . radian , freqs * f_unit
|
def random_walk ( self , path_length , alpha = 0 , rand = random . Random ( ) , start = None ) :
"""Returns a truncated random walk .
path _ length : Length of the random walk .
alpha : probability of restarts .
start : the start node of the random walk ."""
|
G = self
if start :
path = [ start ]
else : # Sampling is uniform w . r . t V , and not w . r . t E
path = [ rand . choice ( list ( G . keys ( ) ) ) ]
while len ( path ) < path_length :
cur = path [ - 1 ]
if len ( G [ cur ] ) > 0 :
if rand . random ( ) >= alpha :
path . append ( rand . choice ( G [ cur ] ) )
else :
path . append ( path [ 0 ] )
else :
break
return [ str ( node ) for node in path ]
|
def remove_listener ( self , uid ) :
"""Remove listener with given uid ."""
|
self . listeners [ : ] = ( listener for listener in self . listeners if listener [ 'uid' ] != uid )
|
def gotoItem ( self , path ) :
"""Goes to a particular path within the XDK .
: param path | < str >"""
|
if not path :
return
sections = nativestring ( path ) . split ( '/' )
check = projex . text . underscore ( sections [ 0 ] )
for i in range ( self . uiContentsTREE . topLevelItemCount ( ) ) :
item = self . uiContentsTREE . topLevelItem ( i )
if check in ( projex . text . underscore ( item . text ( 0 ) ) , item . text ( 1 ) ) :
item . gotoItem ( '/' . join ( sections [ 1 : ] ) )
break
|
def _close ( self ) :
"""Close the TCP connection ."""
|
self . client . stop ( )
self . open = False
self . waiting = False
|
def handle ( self , connection_id , message_content ) :
"""A connection must use one of the supported authorization types
to prove their identity . If a requester deviates
from the procedure in any way , the requester will be rejected and the
connection will be closed . The same is true if the requester sends
multiple ConnectionRequests or multiple of any authorization - type
message . The validator receiving a new connection will receive a
ConnectionRequest . The validator will respond with a ConnectionResponse
message . The ConnectionResponse message will contain a list of
RoleEntry messages and an AuthorizationType . Role entries are
the accepted type of connections that are supported on the endpoint
that the ConnectionRequest was sent to . AuthorizationType describes the
procedure required to gain access to that role . If the validator is not
accepting connections or does not support the listed authorization
type , return an ConnectionResponse . ERROR and close the connection ."""
|
message = ConnectionRequest ( )
message . ParseFromString ( message_content )
LOGGER . debug ( "got connect message from %s. sending ack" , connection_id )
# Need to use join here to get the string " 0.0.0.0 " . Otherwise ,
# bandit thinks we are binding to all interfaces and returns a
# Medium security risk .
interfaces = [ "*" , "." . join ( [ "0" , "0" , "0" , "0" ] ) ]
interfaces += netifaces . interfaces ( )
if self . is_valid_endpoint_host ( interfaces , message . endpoint ) is False :
LOGGER . warning ( "Connecting peer provided an invalid endpoint: %s; " "Ignoring connection request." , message . endpoint )
connection_response = ConnectionResponse ( status = ConnectionResponse . ERROR )
return HandlerResult ( HandlerStatus . RETURN_AND_CLOSE , message_out = connection_response , message_type = validator_pb2 . Message . AUTHORIZATION_CONNECTION_RESPONSE )
LOGGER . debug ( "Endpoint of connecting node is %s" , message . endpoint )
self . _network . update_connection_endpoint ( connection_id , message . endpoint )
# Get what AuthorizationType the network role requires
roles = self . _network . roles
auth_type = roles . get ( "network" )
if auth_type == AuthorizationType . TRUST :
role_type = ConnectionResponse . RoleEntry ( role = RoleType . Value ( "NETWORK" ) , auth_type = ConnectionResponse . TRUST )
connection_response = ConnectionResponse ( roles = [ role_type ] )
elif auth_type == AuthorizationType . CHALLENGE :
role_type = ConnectionResponse . RoleEntry ( role = RoleType . Value ( "NETWORK" ) , auth_type = ConnectionResponse . CHALLENGE )
connection_response = ConnectionResponse ( roles = [ role_type ] )
else :
LOGGER . warning ( "Network role is set to an unsupported" "Authorization Type: %s" , auth_type )
connection_response = ConnectionResponse ( status = ConnectionResponse . ERROR )
return HandlerResult ( HandlerStatus . RETURN_AND_CLOSE , message_out = connection_response , message_type = validator_pb2 . Message . AUTHORIZATION_CONNECTION_RESPONSE )
try :
is_outbound_connection = self . _network . is_outbound_connection ( connection_id )
except KeyError : # Connection has gone away , drop message
return HandlerResult ( HandlerStatus . DROP )
if not is_outbound_connection :
if self . _network . allow_inbound_connection ( ) :
LOGGER . debug ( "Allowing incoming connection: %s" , connection_id )
connection_response . status = connection_response . OK
else :
connection_response . status = connection_response . ERROR
return HandlerResult ( HandlerStatus . RETURN_AND_CLOSE , message_out = connection_response , message_type = validator_pb2 . Message . AUTHORIZATION_CONNECTION_RESPONSE )
if self . _network . get_connection_status ( connection_id ) is not None :
LOGGER . debug ( "Connection has already sent ConnectionRequest:" " %s, Remove connection." , connection_id )
connection_response . status = connection_response . ERROR
return HandlerResult ( HandlerStatus . RETURN_AND_CLOSE , message_out = connection_response , message_type = validator_pb2 . Message . AUTHORIZATION_CONNECTION_RESPONSE )
self . _network . update_connection_status ( connection_id , ConnectionStatus . CONNECTION_REQUEST )
return HandlerResult ( HandlerStatus . RETURN , message_out = connection_response , message_type = validator_pb2 . Message . AUTHORIZATION_CONNECTION_RESPONSE )
|
def return_on_initial_capital ( capital , period_pl , leverage = None ) :
"""Return the daily return series based on the capital"""
|
if capital <= 0 :
raise ValueError ( 'cost must be a positive number not %s' % capital )
leverage = leverage or 1.
eod = capital + ( leverage * period_pl . cumsum ( ) )
ltd_rets = ( eod / capital ) - 1.
dly_rets = ltd_rets
dly_rets . iloc [ 1 : ] = ( 1. + ltd_rets ) . pct_change ( ) . iloc [ 1 : ]
return dly_rets
|
def random_leaf ( self ) :
"Returns a random variable with the associated weight"
|
for i in range ( self . _number_tries_feasible_ind ) :
var = np . random . randint ( self . nvar )
v = self . _random_leaf ( var )
if v is None :
continue
return v
raise RuntimeError ( "Could not find a suitable random leaf" )
|
def formatTime ( self , record , datefmt = None ) :
"""Format time , including milliseconds ."""
|
formatted = super ( PalletFormatter , self ) . formatTime ( record , datefmt = datefmt )
return formatted + '.%03dZ' % record . msecs
|
def get_cpu_info ( self ) -> str :
'''Show device CPU information .'''
|
output , _ = self . _execute ( '-s' , self . device_sn , 'shell' , 'cat' , '/proc/cpuinfo' )
return output
|
def clear_time_value ( self ) :
"""stub"""
|
if ( self . get_time_value_metadata ( ) . is_read_only ( ) or self . get_time_value_metadata ( ) . is_required ( ) ) :
raise NoAccess ( )
self . my_osid_object_form . _my_map [ 'timeValue' ] = dict ( self . get_time_value_metadata ( ) . get_default_duration_values ( ) [ 0 ] )
|
def join ( self , other ) :
"""Join two headings into a new one .
It assumes that self and other are headings that share no common dependent attributes ."""
|
return Heading ( [ self . attributes [ name ] . todict ( ) for name in self . primary_key ] + [ other . attributes [ name ] . todict ( ) for name in other . primary_key if name not in self . primary_key ] + [ self . attributes [ name ] . todict ( ) for name in self . dependent_attributes if name not in other . primary_key ] + [ other . attributes [ name ] . todict ( ) for name in other . dependent_attributes if name not in self . primary_key ] )
|
def get_raw_data ( self ) :
"""Get raw HID report based on internal report item settings ,
creates new c _ ubytes storage"""
|
if self . __report_kind != HidP_Output and self . __report_kind != HidP_Feature :
raise HIDError ( "Only for output or feature reports" )
self . __prepare_raw_data ( )
# return read - only object for internal storage
return helpers . ReadOnlyList ( self . __raw_data )
|
def default_font_toggled ( self , settings , key , user_data ) :
"""If the gconf var use _ default _ font be changed , this method
will be called and will change the font style to the gnome
default or to the chosen font in style / font / style in all
terminals open ."""
|
font_name = None
if settings . get_boolean ( key ) :
gio_settings = Gio . Settings ( 'org.gnome.desktop.interface' )
font_name = gio_settings . get_string ( 'monospace-font-name' )
else :
font_name = self . settings . styleFont . get_string ( 'style' )
if not font_name :
log . error ( "Error: unable to find font name (%s)" , font_name )
return
font = Pango . FontDescription ( font_name )
if not font :
log . error ( "Error: unable to load font (%s)" , font_name )
return
for i in self . guake . notebook_manager . iter_terminals ( ) :
i . set_font ( font )
|
def typewrite ( message , interval = 0.0 , pause = None , _pause = True ) :
"""Performs a keyboard key press down , followed by a release , for each of
the characters in message .
The message argument can also be list of strings , in which case any valid
keyboard name can be used .
Since this performs a sequence of keyboard presses and does not hold down
keys , it cannot be used to perform keyboard shortcuts . Use the hotkey ( )
function for that .
Args :
message ( str , list ) : If a string , then the characters to be pressed . If a
list , then the key names of the keys to press in order . The valid names
are listed in KEYBOARD _ KEYS .
interval ( float , optional ) : The number of seconds in between each press .
0.0 by default , for no pause in between presses .
Returns :
None"""
|
interval = float ( interval )
_failSafeCheck ( )
for c in message :
if len ( c ) > 1 :
c = c . lower ( )
press ( c , _pause = False )
time . sleep ( interval )
_failSafeCheck ( )
_autoPause ( pause , _pause )
|
def sils ( T , f , c , d , h ) :
"""sils - - LP lotsizing for the single item lot sizing problem
Parameters :
- T : number of periods
- P : set of products
- f [ t ] : set - up costs ( on period t )
- c [ t ] : variable costs
- d [ t ] : demand values
- h [ t ] : holding costs
Returns a model , ready to be solved ."""
|
model = Model ( "single item lotsizing" )
Ts = range ( 1 , T + 1 )
M = sum ( d [ t ] for t in Ts )
y , x , I = { } , { } , { }
for t in Ts :
y [ t ] = model . addVar ( vtype = "I" , ub = 1 , name = "y(%s)" % t )
x [ t ] = model . addVar ( vtype = "C" , ub = M , name = "x(%s)" % t )
I [ t ] = model . addVar ( vtype = "C" , name = "I(%s)" % t )
I [ 0 ] = 0
for t in Ts :
model . addCons ( x [ t ] <= M * y [ t ] , "ConstrUB(%s)" % t )
model . addCons ( I [ t - 1 ] + x [ t ] == I [ t ] + d [ t ] , "FlowCons(%s)" % t )
model . setObjective ( quicksum ( f [ t ] * y [ t ] + c [ t ] * x [ t ] + h [ t ] * I [ t ] for t in Ts ) , "minimize" )
model . data = y , x , I
return model
|
def retrieve_page ( self , method , path , post_params = { } , headers = { } , status = 200 , username = None , password = None , * args , ** kwargs ) :
"""Makes the actual request . This will also go through and generate the
needed steps to make the request , i . e . basic auth .
` ` method ` ` :
Any supported HTTP methods defined in : rfc : ` 2616 ` .
` ` path ` ` :
Absolute or relative path . See : meth : ` _ prepare _ uri ` for more
detail .
` ` post _ params ` ` :
Dictionary of key / value pairs to be added as ` POST ` parameters .
` ` headers ` ` :
Dictionary of key / value pairs to be added to the HTTP headers .
` ` status ` ` :
Will error out if the HTTP status code does not match this value .
Set this to ` None ` to disable checking .
` ` username ` ` , ` ` password ` ` :
Username and password for basic auth ; see
: meth : ` _ prepare _ basicauth ` for more detail .
An important note is that when ` ` post _ params ` ` is specified , its
behavior depends on the ` ` method ` ` . That is , for ` PUT ` and ` POST `
requests , the dictionary is multipart encoded and put into the body of
the request . For everything else , it is added as a query string to the
URL ."""
|
# Copy headers so that making changes here won ' t affect the original
headers = headers . copy ( )
# Update basic auth information
basicauth = self . _prepare_basicauth ( username , password )
if basicauth :
headers . update ( [ basicauth ] )
# If this is a POST or PUT , we can put the data into the body as
# form - data encoded ; otherwise , it should be part of the query string .
if method in [ "PUT" , "POST" ] :
datagen , form_hdrs = poster . encode . multipart_encode ( post_params )
body = "" . join ( datagen )
headers . update ( form_hdrs )
uri = self . _prepare_uri ( path )
else :
body = ""
uri = self . _prepare_uri ( path , post_params )
# Make the actual request
response = self . _make_request ( uri , method , body , headers )
# Assert that the status we received was expected .
if status :
real_status = int ( response . status_int )
assert real_status == int ( status ) , "expected %s, received %s." % ( status , real_status )
return response
|
def verify ( path ) :
"""Verify folder file format
The folder file format is only valid when
there is only one file format present ."""
|
valid = True
fifo = SeriesFolder . _search_files ( path )
# dataset size
if len ( fifo ) == 0 :
valid = False
# number of different file formats
fifmts = [ ff [ 1 ] for ff in fifo ]
if len ( set ( fifmts ) ) != 1 :
valid = False
return valid
|
def usearch_sort_by_abundance ( fasta_filepath , output_filepath = None , sizein = True , sizeout = True , minsize = 0 , log_name = "abundance_sort.log" , usersort = False , HALT_EXEC = False , save_intermediate_files = False , remove_usearch_logs = False , working_dir = None ) :
"""Sorts fasta file by abundance
fasta _ filepath = input fasta file , generally a dereplicated fasta
output _ filepath = output abundance sorted fasta filepath
sizein = not defined in usearch helpstring
sizeout = not defined in usearch helpstring
minsize = minimum size of cluster to retain .
log _ name = string to specify log filename
usersort = Use if not sorting by abundance or usearch will raise an error
HALT _ EXEC : Used for debugging app controller
save _ intermediate _ files : Preserve all intermediate files created ."""
|
if not output_filepath :
_ , output_filepath = mkstemp ( prefix = 'usearch_abundance_sorted' , suffix = '.fasta' )
log_filepath = join ( working_dir , "minsize_" + str ( minsize ) + "_" + log_name )
params = { }
app = Usearch ( params , WorkingDir = working_dir , HALT_EXEC = HALT_EXEC )
if usersort :
app . Parameters [ '--usersort' ] . on ( )
if minsize :
app . Parameters [ '--minsize' ] . on ( minsize )
if sizein :
app . Parameters [ '--sizein' ] . on ( )
if sizeout :
app . Parameters [ '--sizeout' ] . on ( )
data = { '--sortsize' : fasta_filepath , '--output' : output_filepath }
if not remove_usearch_logs :
data [ '--log' ] = log_filepath
# Can have no data following this filter step , which will raise an
# application error , try to catch it here to raise meaningful message .
try :
app_result = app ( data )
except ApplicationError :
raise ValueError ( 'No data following filter steps, please check ' + 'parameter settings for usearch_qf.' )
return app_result , output_filepath
|
def dispatch ( self , * args , ** kwargs ) :
"""This decorator sets this view to have restricted permissions ."""
|
return super ( StrainDelete , self ) . dispatch ( * args , ** kwargs )
|
def check_input ( self , token ) :
"""Performs checks on the input token . Raises an exception if unsupported .
: param token : the token to check
: type token : Token"""
|
if token is None :
raise Exception ( self . full_name + ": No token provided!" )
if isinstance ( token . payload , str ) :
return
raise Exception ( self . full_name + ": Unhandled class: " + classes . get_classname ( token . payload ) )
|
async def set_mode ( self , target , * modes ) :
"""Set mode on target .
Users should only rely on the mode actually being changed when receiving an on _ { channel , user } _ mode _ change callback ."""
|
if self . is_channel ( target ) and not self . in_channel ( target ) :
raise NotInChannel ( target )
await self . rawmsg ( 'MODE' , target , * modes )
|
def perform_command ( self ) :
"""Perform command and return the appropriate exit code .
: rtype : int"""
|
self . log ( u"This function should be overloaded in derived classes" )
self . log ( [ u"Invoked with %s" , self . actual_arguments ] )
return self . NO_ERROR_EXIT_CODE
|
def serve_coil_assets ( path ) :
"""Serve Coil assets .
This is meant to be used ONLY by the internal dev server .
Please configure your web server to handle requests to this URL : :
/ coil _ assets / = > coil / data / coil _ assets"""
|
res = pkg_resources . resource_filename ( 'coil' , os . path . join ( 'data' , 'coil_assets' ) )
return send_from_directory ( res , path )
|
def plot_sn_discovery_ratio_map ( log , snSurveyDiscoveryTimes , redshifts , peakAppMagList , snCampaignLengthList , extraSurveyConstraints , pathToOutputPlotFolder ) :
"""* Plot the SN discoveries and non - discoveries in a polar plot as function of redshift *
* * Key Arguments : * *
- ` ` log ` ` - - logger
- ` ` snSurveyDiscoveryTimes ` ` - -
- ` ` redshifts ` ` - -
- ` ` peakAppMagList ` ` - - the list of peakmags for each SN in each filter
- ` ` snCampaignLengthList ` ` - - a list of campaign lengths in each filter
- ` ` extraSurveyConstraints ` ` - -
- ` ` pathToOutputPlotDirectory ` ` - - path to add plots to
* * Return : * *
- None"""
|
# # # # # # > IMPORTS # # # # #
# # STANDARD LIB # #
import sys
# # THIRD PARTY # #
import matplotlib . pyplot as plt
import numpy as np
# # LOCAL APPLICATION # #
import dryxPython . plotting as dp
filters = [ 'g' , 'r' , 'i' , 'z' ]
faintMagLimit = extraSurveyConstraints [ 'Faint-Limit of Peak Magnitude' ]
# # # # # # > ACTION ( S ) # # # # #
discovered = [ ]
tooFaint = [ ]
shortCampaign = [ ]
discoveredRedshift = [ ]
tooFaintRedshift = [ ]
notDiscoveredRedshift = [ ]
shortCampaignRedshift = [ ]
# log . info ( ' len ( redshifts ) % s ' % ( len ( redshifts ) , ) )
dataDictionary = { }
for item in range ( len ( redshifts ) ) :
if snSurveyDiscoveryTimes [ item ] [ 'any' ] is True :
discoveryDayList = [ ]
faintDayList = [ ]
shortCampaignDayList = [ ]
for ffilter in filters :
if snSurveyDiscoveryTimes [ item ] [ ffilter ] :
if peakAppMagList [ item ] [ ffilter ] < faintMagLimit :
if snCampaignLengthList [ item ] [ 'max' ] < extraSurveyConstraints [ 'Observable for at least ? number of days' ] :
shortCampaignDayList . append ( snSurveyDiscoveryTimes [ item ] [ ffilter ] )
else :
discoveryDayList . append ( snSurveyDiscoveryTimes [ item ] [ ffilter ] )
else :
faintDayList . append ( snSurveyDiscoveryTimes [ item ] [ ffilter ] )
if len ( discoveryDayList ) > 0 :
discovered . append ( min ( discoveryDayList ) )
discoveredRedshift . append ( redshifts [ item ] )
elif len ( shortCampaignDayList ) > 0 :
shortCampaign . append ( min ( shortCampaignDayList ) )
shortCampaignRedshift . append ( redshifts [ item ] )
else :
tooFaint . append ( min ( faintDayList ) )
tooFaintRedshift . append ( redshifts [ item ] )
else :
notDiscoveredRedshift . append ( redshifts [ item ] )
if len ( notDiscoveredRedshift ) > 0 :
dataDictionary [ "Undiscovered" ] = notDiscoveredRedshift
if len ( tooFaintRedshift ) > 0 :
dataDictionary [ "Detected - too faint to constrain as transient" ] = tooFaintRedshift
if len ( discoveredRedshift ) > 0 :
dataDictionary [ "Discovered" ] = discoveredRedshift
if len ( shortCampaignRedshift ) > 0 :
dataDictionary [ "Detected - campaign to short to constrain as transient" ] = shortCampaignRedshift
maxInList = max ( redshifts ) * 1.1
# # # # # # > ACTION ( S ) # # # # #
imageLink = plot_polar ( log , title = "Redshift Map of transients Simulated within the Survey Volume" , dataDictionary = dataDictionary , pathToOutputPlotsFolder = pathToOutputPlotFolder , dataRange = False , ylabel = False , radius = maxInList , circumference = False , circleTicksRange = ( 0 , 360 , 60 ) , circleTicksLabels = "." , prependNum = False )
return imageLink
|
def _calcOrbits ( self ) :
"""Prepares data structure for breaking data into orbits . Not intended
for end user ."""
|
# if the breaks between orbit have not been defined , define them
# also , store the data so that grabbing different orbits does not
# require reloads of whole dataset
if len ( self . _orbit_breaks ) == 0 : # determine orbit breaks
self . _detBreaks ( )
# store a copy of data
self . _fullDayData = self . sat . data . copy ( )
# set current orbit counter to zero ( default )
self . _current = 0
|
def send_audio ( self , chat_id , audio , duration = None , performer = None , title = None , reply_to_message_id = None , reply_markup = None ) :
"""Use this method to send audio files , if you want Telegram clients to display them in the music player .
Your audio must be in the . mp3 format . On success , the sent Message is returned . Bots can currently send
audio files of up to 50 MB in size , this limit may be changed in the future .
For backward compatibility , when the fields title and performer are both empty and the mime - type of the
file to be sent is not audio / mpeg , the file will be sent as a playable voice message . For this to work ,
the audio must be in an . ogg file encoded with OPUS .
This behavior will be phased out in the future .
For sending voice messages , use the send _ voice method instead ."""
|
self . logger . info ( 'sending audio payload %s' , audio )
payload = dict ( chat_id = chat_id , duration = duration , performer = performer , title = title , reply_to_message_id = reply_to_message_id , reply_markup = reply_markup )
files = dict ( audio = open ( audio , 'rb' ) )
return Message . from_api ( self , ** self . _post ( 'sendAudio' , payload , files ) )
|
def get_post_data ( self ) :
'''Get all the arguments from post request . Only get the first argument by default .'''
|
post_data = { }
for key in self . request . arguments :
post_data [ key ] = self . get_arguments ( key ) [ 0 ]
return post_data
|
def is_none ( self ) :
"""Ensures : attr : ` subject ` is ` ` None ` ` ."""
|
self . _run ( unittest_case . assertIsNone , ( self . _subject , ) )
return ChainInspector ( self . _subject )
|
def get_share_dirname ( url ) :
'''从url中提取出当前的目录'''
|
dirname_match = re . search ( '(dir|path)=([^&]+)' , encoder . decode_uri_component ( url ) )
if dirname_match :
return dirname_match . group ( 2 )
else :
return None
|
def analyze ( self , scratch , ** kwargs ) :
"""Run and return the results from the DuplicateScripts plugin .
Only takes into account scripts with more than 3 blocks ."""
|
scripts_set = set ( )
for script in self . iter_scripts ( scratch ) :
if script [ 0 ] . type . text == 'define %s' :
continue
# Ignore user defined scripts
blocks_list = [ ]
for name , _ , _ in self . iter_blocks ( script . blocks ) :
blocks_list . append ( name )
blocks_tuple = tuple ( blocks_list )
if blocks_tuple in scripts_set :
if len ( blocks_list ) > 3 :
self . total_duplicate += 1
self . list_duplicate . append ( blocks_list )
else :
scripts_set . add ( blocks_tuple )
|
def find_range_in_section_list ( start , end , section_list ) :
"""Returns all sections belonging to the given range .
The given list is assumed to contain start points of consecutive
sections , except for the final point , assumed to be the end point of the
last section . For example , the list [ 5 , 8 , 30 , 31 ] is interpreted as the
following list of sections : [ 5-8 ) , [ 8-30 ) , [ 30-31 ] . As such , this function
will return [ 5,8 ] for the range ( 7,9 ) and [ 5,8,30 ] while for ( 7 , 30 ) .
Parameters
start : float
The start of the desired range .
end : float
The end of the desired range .
section _ list : sortedcontainers . SortedList
A list of start points of consecutive sections .
Returns
iterable
The starting points of all sections belonging to the given range .
Example
> > > from sortedcontainers import SortedList
> > > seclist = SortedList ( [ 5 , 8 , 30 , 31 ] )
> > > find _ range _ in _ section _ list ( 3 , 4 , seclist )
> > > find _ range _ in _ section _ list ( 6 , 7 , seclist )
> > > find _ range _ in _ section _ list ( 7 , 9 , seclist )
[5 , 8]
> > > find _ range _ in _ section _ list ( 7 , 30 , seclist )
[5 , 8 , 30]
> > > find _ range _ in _ section _ list ( 7 , 321 , seclist )
[5 , 8 , 30]
> > > find _ range _ in _ section _ list ( 4 , 321 , seclist )
[5 , 8 , 30]"""
|
ind = find_range_ix_in_section_list ( start , end , section_list )
return section_list [ ind [ 0 ] : ind [ 1 ] ]
|
def import_ ( zone , path ) :
'''Import the configuration to memory from stable storage .
zone : string
name of zone
path : string
path of file to export to
CLI Example :
. . code - block : : bash
salt ' * ' zonecfg . import epyon / zones / epyon . cfg'''
|
ret = { 'status' : True }
# create from file
_dump_cfg ( path )
res = __salt__ [ 'cmd.run_all' ] ( 'zonecfg -z {zone} -f {path}' . format ( zone = zone , path = path , ) )
ret [ 'status' ] = res [ 'retcode' ] == 0
ret [ 'message' ] = res [ 'stdout' ] if ret [ 'status' ] else res [ 'stderr' ]
if ret [ 'message' ] == '' :
del ret [ 'message' ]
else :
ret [ 'message' ] = _clean_message ( ret [ 'message' ] )
return ret
|
def open ( cls , name = None , mode = "r" , fileobj = None , bufsize = RECORDSIZE , ** kwargs ) :
"""Open a tar archive for reading , writing or appending . Return
an appropriate TarFile class .
mode :
' r ' or ' r : * ' open for reading with transparent compression
' r : ' open for reading exclusively uncompressed
' r : gz ' open for reading with gzip compression
' r : bz2 ' open for reading with bzip2 compression
' a ' or ' a : ' open for appending , creating the file if necessary
' w ' or ' w : ' open for writing without compression
' w : gz ' open for writing with gzip compression
' w : bz2 ' open for writing with bzip2 compression
' r | * ' open a stream of tar blocks with transparent compression
' r | ' open an uncompressed stream of tar blocks for reading
' r | gz ' open a gzip compressed stream of tar blocks
' r | bz2 ' open a bzip2 compressed stream of tar blocks
' w | ' open an uncompressed stream for writing
' w | gz ' open a gzip compressed stream for writing
' w | bz2 ' open a bzip2 compressed stream for writing"""
|
if not name and not fileobj :
raise ValueError ( "nothing to open" )
if mode in ( "r" , "r:*" ) : # Find out which * open ( ) is appropriate for opening the file .
for comptype in cls . OPEN_METH :
func = getattr ( cls , cls . OPEN_METH [ comptype ] )
if fileobj is not None :
saved_pos = fileobj . tell ( )
try :
return func ( name , "r" , fileobj , ** kwargs )
except ( ReadError , CompressionError ) as e :
if fileobj is not None :
fileobj . seek ( saved_pos )
continue
raise ReadError ( "file could not be opened successfully" )
elif ":" in mode :
filemode , comptype = mode . split ( ":" , 1 )
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the * open ( ) function according to
# given compression .
if comptype in cls . OPEN_METH :
func = getattr ( cls , cls . OPEN_METH [ comptype ] )
else :
raise CompressionError ( "unknown compression type %r" % comptype )
return func ( name , filemode , fileobj , ** kwargs )
elif "|" in mode :
filemode , comptype = mode . split ( "|" , 1 )
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw" :
raise ValueError ( "mode must be 'r' or 'w'" )
stream = _Stream ( name , filemode , comptype , fileobj , bufsize )
try :
t = cls ( name , filemode , stream , ** kwargs )
except :
stream . close ( )
raise
t . _extfileobj = False
return t
elif mode in "aw" :
return cls . taropen ( name , mode , fileobj , ** kwargs )
raise ValueError ( "undiscernible mode" )
|
def updateDefinition ( self , json_dict ) :
"""The updateDefinition operation supports updating a definition
property in a hosted feature service . The result of this
operation is a response indicating success or failure with error
code and description .
Input :
json _ dict - part to add to host service . The part format can
be derived from the asDictionary property . For
layer level modifications , run updates on each
individual feature service layer object .
Output :
JSON Message as dictionary"""
|
definition = None
if json_dict is not None :
if isinstance ( json_dict , collections . OrderedDict ) == True :
definition = json_dict
else :
definition = collections . OrderedDict ( )
if 'hasStaticData' in json_dict :
definition [ 'hasStaticData' ] = json_dict [ 'hasStaticData' ]
if 'allowGeometryUpdates' in json_dict :
definition [ 'allowGeometryUpdates' ] = json_dict [ 'allowGeometryUpdates' ]
if 'capabilities' in json_dict :
definition [ 'capabilities' ] = json_dict [ 'capabilities' ]
if 'editorTrackingInfo' in json_dict :
definition [ 'editorTrackingInfo' ] = collections . OrderedDict ( )
if 'enableEditorTracking' in json_dict [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ 'enableEditorTracking' ] = json_dict [ 'editorTrackingInfo' ] [ 'enableEditorTracking' ]
if 'enableOwnershipAccessControl' in json_dict [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ 'enableOwnershipAccessControl' ] = json_dict [ 'editorTrackingInfo' ] [ 'enableOwnershipAccessControl' ]
if 'allowOthersToUpdate' in json_dict [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ 'allowOthersToUpdate' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToUpdate' ]
if 'allowOthersToDelete' in json_dict [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ 'allowOthersToDelete' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToDelete' ]
if 'allowOthersToQuery' in json_dict [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ 'allowOthersToQuery' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToQuery' ]
if isinstance ( json_dict [ 'editorTrackingInfo' ] , dict ) :
for k , v in json_dict [ 'editorTrackingInfo' ] . items ( ) :
if k not in definition [ 'editorTrackingInfo' ] :
definition [ 'editorTrackingInfo' ] [ k ] = v
if isinstance ( json_dict , dict ) :
for k , v in json_dict . items ( ) :
if k not in definition :
definition [ k ] = v
params = { "f" : "json" , "updateDefinition" : json . dumps ( obj = definition , separators = ( ',' , ':' ) ) , "async" : False }
uURL = self . _url + "/updateDefinition"
res = self . _post ( url = uURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
self . refresh ( )
return res
|
def crypto_validator ( func ) :
"""This a decorator to be used for any method relying on the cryptography library . # noqa : E501
Its behaviour depends on the ' crypto _ valid ' attribute of the global ' conf ' ."""
|
def func_in ( * args , ** kwargs ) :
if not conf . crypto_valid :
raise ImportError ( "Cannot execute crypto-related method! " "Please install python-cryptography v1.7 or later." )
# noqa : E501
return func ( * args , ** kwargs )
return func_in
|
def set_pragmas ( self , pragmas ) :
"""Set pragmas for the current database connection .
Parameters
pragmas : dict
Dictionary of pragmas ; see constants . default _ pragmas for a template
and http : / / www . sqlite . org / pragma . html for a full list ."""
|
self . pragmas = pragmas
c = self . conn . cursor ( )
c . executescript ( ';\n' . join ( [ 'PRAGMA %s=%s' % i for i in self . pragmas . items ( ) ] ) )
self . conn . commit ( )
|
def astra_projection_geometry ( geometry ) :
"""Create an ASTRA projection geometry from an ODL geometry object .
As of ASTRA version 1.7 , the length values are not required any more to be
rescaled for 3D geometries and non - unit ( but isotropic ) voxel sizes .
Parameters
geometry : ` Geometry `
ODL projection geometry from which to create the ASTRA geometry .
Returns
proj _ geom : dict
Dictionary defining the ASTRA projection geometry ."""
|
if not isinstance ( geometry , Geometry ) :
raise TypeError ( '`geometry` {!r} is not a `Geometry` instance' '' . format ( geometry ) )
if 'astra' in geometry . implementation_cache : # Shortcut , reuse already computed value .
return geometry . implementation_cache [ 'astra' ]
if not geometry . det_partition . is_uniform :
raise ValueError ( 'non-uniform detector sampling is not supported' )
if ( isinstance ( geometry , ParallelBeamGeometry ) and isinstance ( geometry . detector , ( Flat1dDetector , Flat2dDetector ) ) and geometry . ndim == 2 ) : # TODO : change to parallel _ vec when available
det_width = geometry . det_partition . cell_sides [ 0 ]
det_count = geometry . detector . size
# Instead of rotating the data by 90 degrees counter - clockwise ,
# we subtract pi / 2 from the geometry angles , thereby rotating the
# geometry by 90 degrees clockwise
angles = geometry . angles - np . pi / 2
proj_geom = astra . create_proj_geom ( 'parallel' , det_width , det_count , angles )
elif ( isinstance ( geometry , DivergentBeamGeometry ) and isinstance ( geometry . detector , ( Flat1dDetector , Flat2dDetector ) ) and geometry . ndim == 2 ) :
det_count = geometry . detector . size
vec = astra_conebeam_2d_geom_to_vec ( geometry )
proj_geom = astra . create_proj_geom ( 'fanflat_vec' , det_count , vec )
elif ( isinstance ( geometry , ParallelBeamGeometry ) and isinstance ( geometry . detector , ( Flat1dDetector , Flat2dDetector ) ) and geometry . ndim == 3 ) : # Swap detector axes ( see astra _ * _ 3d _ to _ vec )
det_row_count = geometry . det_partition . shape [ 0 ]
det_col_count = geometry . det_partition . shape [ 1 ]
vec = astra_parallel_3d_geom_to_vec ( geometry )
proj_geom = astra . create_proj_geom ( 'parallel3d_vec' , det_row_count , det_col_count , vec )
elif ( isinstance ( geometry , DivergentBeamGeometry ) and isinstance ( geometry . detector , ( Flat1dDetector , Flat2dDetector ) ) and geometry . ndim == 3 ) : # Swap detector axes ( see astra _ * _ 3d _ to _ vec )
det_row_count = geometry . det_partition . shape [ 0 ]
det_col_count = geometry . det_partition . shape [ 1 ]
vec = astra_conebeam_3d_geom_to_vec ( geometry )
proj_geom = astra . create_proj_geom ( 'cone_vec' , det_row_count , det_col_count , vec )
else :
raise NotImplementedError ( 'unknown ASTRA geometry type {!r}' '' . format ( geometry ) )
if 'astra' not in geometry . implementation_cache : # Save computed value for later
geometry . implementation_cache [ 'astra' ] = proj_geom
return proj_geom
|
def compat_serializer_attr ( serializer , obj ) :
"""Required only for DRF 3.1 , which does not make dynamically added attribute available in obj in serializer .
This is a quick solution but works without breajing anything ."""
|
if DRFVLIST [ 0 ] == 3 and DRFVLIST [ 1 ] == 1 :
for i in serializer . instance :
if i . id == obj . id :
return i
else :
return obj
|
def get_bip32_address ( self , ecdh = False ) :
"""Compute BIP32 derivation address according to SLIP - 0013/0017."""
|
index = struct . pack ( '<L' , self . identity_dict . get ( 'index' , 0 ) )
addr = index + self . to_bytes ( )
log . debug ( 'bip32 address string: %r' , addr )
digest = hashlib . sha256 ( addr ) . digest ( )
s = io . BytesIO ( bytearray ( digest ) )
hardened = 0x80000000
addr_0 = 17 if bool ( ecdh ) else 13
address_n = [ addr_0 ] + list ( util . recv ( s , '<LLLL' ) )
return [ ( hardened | value ) for value in address_n ]
|
def get_hist ( rfile , histname , get_overflow = False ) :
"""Read a 1D Histogram ."""
|
import root_numpy as rnp
rfile = open_rfile ( rfile )
hist = rfile [ histname ]
xlims = np . array ( list ( hist . xedges ( ) ) )
bin_values = rnp . hist2array ( hist , include_overflow = get_overflow )
rfile . close ( )
return bin_values , xlims
|
def plotPointing ( self , maptype = None , colour = 'b' , mod3 = 'r' , showOuts = True , ** kwargs ) :
"""Plot the FOV"""
|
if maptype is None :
maptype = self . defaultMap
radec = self . currentRaDec
for ch in radec [ : , 2 ] [ : : 4 ] :
idx = np . where ( radec [ : , 2 ] . astype ( np . int ) == ch ) [ 0 ]
idx = np . append ( idx , idx [ 0 ] )
# % points to draw a box
c = colour
if ch in self . brokenChannels :
c = mod3
maptype . plot ( radec [ idx , 3 ] , radec [ idx , 4 ] , '-' , color = c , ** kwargs )
# Show the origin of the col and row coords for this ch
if showOuts :
maptype . plot ( radec [ idx [ 0 ] , 3 ] , radec [ idx [ 0 ] , 4 ] , 'o' , color = c )
|
def get_correctness_for_response ( self , response ) :
"""get measure of correctness available for a particular response"""
|
for answer in self . my_osid_object . get_answers ( ) :
if self . _is_match ( response , answer ) :
try :
return answer . get_score ( )
except AttributeError :
return 100
for answer in self . my_osid_object . get_wrong_answers ( ) :
if self . _is_match ( response , answer ) :
try :
return answer . get_score ( )
except AttributeError :
return 0
return 0
|
async def open_clients_async ( self ) :
"""Responsible for establishing connection to event hub client
throws EventHubsException , IOException , InterruptedException , ExecutionException ."""
|
await self . partition_context . get_initial_offset_async ( )
# Create event hub client and receive handler and set options
self . eh_client = EventHubClientAsync ( self . host . eh_config . client_address , debug = self . host . eph_options . debug_trace , http_proxy = self . host . eph_options . http_proxy )
self . partition_receive_handler = self . eh_client . add_async_receiver ( self . partition_context . consumer_group_name , self . partition_context . partition_id , Offset ( self . partition_context . offset ) , prefetch = self . host . eph_options . prefetch_count , keep_alive = self . host . eph_options . keep_alive_interval , auto_reconnect = self . host . eph_options . auto_reconnect_on_error , loop = self . loop )
self . partition_receiver = PartitionReceiver ( self )
|
def colon_subscripts ( u ) :
"""Array colon subscripts foo ( 1:10 ) and colon expressions 1:10 look
too similar to each other . Now is the time to find out who is who ."""
|
if u . __class__ in ( node . arrayref , node . cellarrayref ) :
for w in u . args :
if w . __class__ is node . expr and w . op == ":" :
w . _replace ( op = "::" )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.